1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_SECOND_SECT_LABEL
308 #define FUNC_SECOND_SECT_LABEL "LFSB"
309 #endif
310
311 #ifndef FUNC_END_LABEL
312 #define FUNC_END_LABEL "LFE"
313 #endif
314
315 #ifndef PROLOGUE_END_LABEL
316 #define PROLOGUE_END_LABEL "LPE"
317 #endif
318
319 #ifndef EPILOGUE_BEGIN_LABEL
320 #define EPILOGUE_BEGIN_LABEL "LEB"
321 #endif
322
323 #ifndef FRAME_BEGIN_LABEL
324 #define FRAME_BEGIN_LABEL "Lframe"
325 #endif
326 #define CIE_AFTER_SIZE_LABEL "LSCIE"
327 #define CIE_END_LABEL "LECIE"
328 #define FDE_LABEL "LSFDE"
329 #define FDE_AFTER_SIZE_LABEL "LASFDE"
330 #define FDE_END_LABEL "LEFDE"
331 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
332 #define LINE_NUMBER_END_LABEL "LELT"
333 #define LN_PROLOG_AS_LABEL "LASLTP"
334 #define LN_PROLOG_END_LABEL "LELTP"
335 #define DIE_LABEL_PREFIX "DW"
336
337 /* Match the base name of a file to the base name of a compilation unit. */
338
339 static int
matches_main_base(const char * path)340 matches_main_base (const char *path)
341 {
342 /* Cache the last query. */
343 static const char *last_path = NULL;
344 static int last_match = 0;
345 if (path != last_path)
346 {
347 const char *base;
348 int length = base_of_path (path, &base);
349 last_path = path;
350 last_match = (length == main_input_baselength
351 && memcmp (base, main_input_basename, length) == 0);
352 }
353 return last_match;
354 }
355
356 #ifdef DEBUG_DEBUG_STRUCT
357
358 static int
dump_struct_debug(tree type,enum debug_info_usage usage,enum debug_struct_file criterion,int generic,int matches,int result)359 dump_struct_debug (tree type, enum debug_info_usage usage,
360 enum debug_struct_file criterion, int generic,
361 int matches, int result)
362 {
363 /* Find the type name. */
364 tree type_decl = TYPE_STUB_DECL (type);
365 tree t = type_decl;
366 const char *name = 0;
367 if (TREE_CODE (t) == TYPE_DECL)
368 t = DECL_NAME (t);
369 if (t)
370 name = IDENTIFIER_POINTER (t);
371
372 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
373 criterion,
374 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
375 matches ? "bas" : "hdr",
376 generic ? "gen" : "ord",
377 usage == DINFO_USAGE_DFN ? ";" :
378 usage == DINFO_USAGE_DIR_USE ? "." : "*",
379 result,
380 (void*) type_decl, name);
381 return result;
382 }
383 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
384 dump_struct_debug (type, usage, criterion, generic, matches, result)
385
386 #else
387
388 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
389 (result)
390
391 #endif
392
393 /* Get the number of HOST_WIDE_INTs needed to represent the precision
394 of the number. Some constants have a large uniform precision, so
395 we get the precision needed for the actual value of the number. */
396
397 static unsigned int
get_full_len(const wide_int & op)398 get_full_len (const wide_int &op)
399 {
400 int prec = wi::min_precision (op, UNSIGNED);
401 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
402 / HOST_BITS_PER_WIDE_INT);
403 }
404
405 static bool
should_emit_struct_debug(tree type,enum debug_info_usage usage)406 should_emit_struct_debug (tree type, enum debug_info_usage usage)
407 {
408 enum debug_struct_file criterion;
409 tree type_decl;
410 bool generic = lang_hooks.types.generic_p (type);
411
412 if (generic)
413 criterion = debug_struct_generic[usage];
414 else
415 criterion = debug_struct_ordinary[usage];
416
417 if (criterion == DINFO_STRUCT_FILE_NONE)
418 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
419 if (criterion == DINFO_STRUCT_FILE_ANY)
420 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
421
422 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
423
424 if (type_decl != NULL)
425 {
426 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
428
429 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
430 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
431 }
432
433 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
434 }
435
436 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
437 switch to the data section instead, and write out a synthetic start label
438 for collect2 the first time around. */
439
440 static void
switch_to_eh_frame_section(bool back ATTRIBUTE_UNUSED)441 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
442 {
443 if (eh_frame_section == 0)
444 {
445 int flags;
446
447 if (EH_TABLES_CAN_BE_READ_ONLY)
448 {
449 int fde_encoding;
450 int per_encoding;
451 int lsda_encoding;
452
453 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
454 /*global=*/0);
455 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
456 /*global=*/1);
457 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
458 /*global=*/0);
459 flags = ((! flag_pic
460 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
461 && (fde_encoding & 0x70) != DW_EH_PE_aligned
462 && (per_encoding & 0x70) != DW_EH_PE_absptr
463 && (per_encoding & 0x70) != DW_EH_PE_aligned
464 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
465 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
466 ? 0 : SECTION_WRITE);
467 }
468 else
469 flags = SECTION_WRITE;
470
471 #ifdef EH_FRAME_SECTION_NAME
472 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
473 #else
474 eh_frame_section = ((flags == SECTION_WRITE)
475 ? data_section : readonly_data_section);
476 #endif /* EH_FRAME_SECTION_NAME */
477 }
478
479 switch_to_section (eh_frame_section);
480
481 #ifdef EH_FRAME_THROUGH_COLLECT2
482 /* We have no special eh_frame section. Emit special labels to guide
483 collect2. */
484 if (!back)
485 {
486 tree label = get_file_function_name ("F");
487 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
488 targetm.asm_out.globalize_label (asm_out_file,
489 IDENTIFIER_POINTER (label));
490 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
491 }
492 #endif
493 }
494
495 /* Switch [BACK] to the eh or debug frame table section, depending on
496 FOR_EH. */
497
498 static void
switch_to_frame_table_section(int for_eh,bool back)499 switch_to_frame_table_section (int for_eh, bool back)
500 {
501 if (for_eh)
502 switch_to_eh_frame_section (back);
503 else
504 {
505 if (!debug_frame_section)
506 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
507 SECTION_DEBUG, NULL);
508 switch_to_section (debug_frame_section);
509 }
510 }
511
512 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
513
514 enum dw_cfi_oprnd_type
dw_cfi_oprnd1_desc(enum dwarf_call_frame_info cfi)515 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
516 {
517 switch (cfi)
518 {
519 case DW_CFA_nop:
520 case DW_CFA_GNU_window_save:
521 case DW_CFA_remember_state:
522 case DW_CFA_restore_state:
523 return dw_cfi_oprnd_unused;
524
525 case DW_CFA_set_loc:
526 case DW_CFA_advance_loc1:
527 case DW_CFA_advance_loc2:
528 case DW_CFA_advance_loc4:
529 case DW_CFA_MIPS_advance_loc8:
530 return dw_cfi_oprnd_addr;
531
532 case DW_CFA_offset:
533 case DW_CFA_offset_extended:
534 case DW_CFA_def_cfa:
535 case DW_CFA_offset_extended_sf:
536 case DW_CFA_def_cfa_sf:
537 case DW_CFA_restore:
538 case DW_CFA_restore_extended:
539 case DW_CFA_undefined:
540 case DW_CFA_same_value:
541 case DW_CFA_def_cfa_register:
542 case DW_CFA_register:
543 case DW_CFA_expression:
544 case DW_CFA_val_expression:
545 return dw_cfi_oprnd_reg_num;
546
547 case DW_CFA_def_cfa_offset:
548 case DW_CFA_GNU_args_size:
549 case DW_CFA_def_cfa_offset_sf:
550 return dw_cfi_oprnd_offset;
551
552 case DW_CFA_def_cfa_expression:
553 return dw_cfi_oprnd_loc;
554
555 default:
556 gcc_unreachable ();
557 }
558 }
559
560 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
561
562 enum dw_cfi_oprnd_type
dw_cfi_oprnd2_desc(enum dwarf_call_frame_info cfi)563 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
564 {
565 switch (cfi)
566 {
567 case DW_CFA_def_cfa:
568 case DW_CFA_def_cfa_sf:
569 case DW_CFA_offset:
570 case DW_CFA_offset_extended_sf:
571 case DW_CFA_offset_extended:
572 return dw_cfi_oprnd_offset;
573
574 case DW_CFA_register:
575 return dw_cfi_oprnd_reg_num;
576
577 case DW_CFA_expression:
578 case DW_CFA_val_expression:
579 return dw_cfi_oprnd_loc;
580
581 case DW_CFA_def_cfa_expression:
582 return dw_cfi_oprnd_cfa_loc;
583
584 default:
585 return dw_cfi_oprnd_unused;
586 }
587 }
588
589 /* Output one FDE. */
590
591 static void
output_fde(dw_fde_ref fde,bool for_eh,bool second,char * section_start_label,int fde_encoding,char * augmentation,bool any_lsda_needed,int lsda_encoding)592 output_fde (dw_fde_ref fde, bool for_eh, bool second,
593 char *section_start_label, int fde_encoding, char *augmentation,
594 bool any_lsda_needed, int lsda_encoding)
595 {
596 const char *begin, *end;
597 static unsigned int j;
598 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
599
600 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
601 /* empty */ 0);
602 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
603 for_eh + j);
604 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
605 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
606 if (!XCOFF_DEBUGGING_INFO || for_eh)
607 {
608 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
609 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
610 " indicating 64-bit DWARF extension");
611 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
612 "FDE Length");
613 }
614 ASM_OUTPUT_LABEL (asm_out_file, l1);
615
616 if (for_eh)
617 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
618 else
619 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
620 debug_frame_section, "FDE CIE offset");
621
622 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
623 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
624
625 if (for_eh)
626 {
627 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
628 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
629 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
630 "FDE initial location");
631 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
632 end, begin, "FDE address range");
633 }
634 else
635 {
636 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
637 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
638 }
639
640 if (augmentation[0])
641 {
642 if (any_lsda_needed)
643 {
644 int size = size_of_encoded_value (lsda_encoding);
645
646 if (lsda_encoding == DW_EH_PE_aligned)
647 {
648 int offset = ( 4 /* Length */
649 + 4 /* CIE offset */
650 + 2 * size_of_encoded_value (fde_encoding)
651 + 1 /* Augmentation size */ );
652 int pad = -offset & (PTR_SIZE - 1);
653
654 size += pad;
655 gcc_assert (size_of_uleb128 (size) == 1);
656 }
657
658 dw2_asm_output_data_uleb128 (size, "Augmentation size");
659
660 if (fde->uses_eh_lsda)
661 {
662 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
663 fde->funcdef_number);
664 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
665 gen_rtx_SYMBOL_REF (Pmode, l1),
666 false,
667 "Language Specific Data Area");
668 }
669 else
670 {
671 if (lsda_encoding == DW_EH_PE_aligned)
672 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
673 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
674 "Language Specific Data Area (none)");
675 }
676 }
677 else
678 dw2_asm_output_data_uleb128 (0, "Augmentation size");
679 }
680
681 /* Loop through the Call Frame Instructions associated with this FDE. */
682 fde->dw_fde_current_label = begin;
683 {
684 size_t from, until, i;
685
686 from = 0;
687 until = vec_safe_length (fde->dw_fde_cfi);
688
689 if (fde->dw_fde_second_begin == NULL)
690 ;
691 else if (!second)
692 until = fde->dw_fde_switch_cfi_index;
693 else
694 from = fde->dw_fde_switch_cfi_index;
695
696 for (i = from; i < until; i++)
697 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
698 }
699
700 /* If we are to emit a ref/link from function bodies to their frame tables,
701 do it now. This is typically performed to make sure that tables
702 associated with functions are dragged with them and not discarded in
703 garbage collecting links. We need to do this on a per function basis to
704 cope with -ffunction-sections. */
705
706 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
707 /* Switch to the function section, emit the ref to the tables, and
708 switch *back* into the table section. */
709 switch_to_section (function_section (fde->decl));
710 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
711 switch_to_frame_table_section (for_eh, true);
712 #endif
713
714 /* Pad the FDE out to an address sized boundary. */
715 ASM_OUTPUT_ALIGN (asm_out_file,
716 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
717 ASM_OUTPUT_LABEL (asm_out_file, l2);
718
719 j += 2;
720 }
721
722 /* Return true if frame description entry FDE is needed for EH. */
723
724 static bool
fde_needed_for_eh_p(dw_fde_ref fde)725 fde_needed_for_eh_p (dw_fde_ref fde)
726 {
727 if (flag_asynchronous_unwind_tables)
728 return true;
729
730 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
731 return true;
732
733 if (fde->uses_eh_lsda)
734 return true;
735
736 /* If exceptions are enabled, we have collected nothrow info. */
737 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
738 return false;
739
740 return true;
741 }
742
743 /* Output the call frame information used to record information
744 that relates to calculating the frame pointer, and records the
745 location of saved registers. */
746
747 static void
output_call_frame_info(int for_eh)748 output_call_frame_info (int for_eh)
749 {
750 unsigned int i;
751 dw_fde_ref fde;
752 dw_cfi_ref cfi;
753 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
754 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
755 bool any_lsda_needed = false;
756 char augmentation[6];
757 int augmentation_size;
758 int fde_encoding = DW_EH_PE_absptr;
759 int per_encoding = DW_EH_PE_absptr;
760 int lsda_encoding = DW_EH_PE_absptr;
761 int return_reg;
762 rtx personality = NULL;
763 int dw_cie_version;
764
765 /* Don't emit a CIE if there won't be any FDEs. */
766 if (!fde_vec)
767 return;
768
769 /* Nothing to do if the assembler's doing it all. */
770 if (dwarf2out_do_cfi_asm ())
771 return;
772
773 /* If we don't have any functions we'll want to unwind out of, don't emit
774 any EH unwind information. If we make FDEs linkonce, we may have to
775 emit an empty label for an FDE that wouldn't otherwise be emitted. We
776 want to avoid having an FDE kept around when the function it refers to
777 is discarded. Example where this matters: a primary function template
778 in C++ requires EH information, an explicit specialization doesn't. */
779 if (for_eh)
780 {
781 bool any_eh_needed = false;
782
783 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
784 {
785 if (fde->uses_eh_lsda)
786 any_eh_needed = any_lsda_needed = true;
787 else if (fde_needed_for_eh_p (fde))
788 any_eh_needed = true;
789 else if (TARGET_USES_WEAK_UNWIND_INFO)
790 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
791 }
792
793 if (!any_eh_needed)
794 return;
795 }
796
797 /* We're going to be generating comments, so turn on app. */
798 if (flag_debug_asm)
799 app_enable ();
800
801 /* Switch to the proper frame section, first time. */
802 switch_to_frame_table_section (for_eh, false);
803
804 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
805 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
806
807 /* Output the CIE. */
808 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
809 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
810 if (!XCOFF_DEBUGGING_INFO || for_eh)
811 {
812 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
813 dw2_asm_output_data (4, 0xffffffff,
814 "Initial length escape value indicating 64-bit DWARF extension");
815 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
816 "Length of Common Information Entry");
817 }
818 ASM_OUTPUT_LABEL (asm_out_file, l1);
819
820 /* Now that the CIE pointer is PC-relative for EH,
821 use 0 to identify the CIE. */
822 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
823 (for_eh ? 0 : DWARF_CIE_ID),
824 "CIE Identifier Tag");
825
826 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
827 use CIE version 1, unless that would produce incorrect results
828 due to overflowing the return register column. */
829 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
830 dw_cie_version = 1;
831 if (return_reg >= 256 || dwarf_version > 2)
832 dw_cie_version = 3;
833 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
834
835 augmentation[0] = 0;
836 augmentation_size = 0;
837
838 personality = current_unit_personality;
839 if (for_eh)
840 {
841 char *p;
842
843 /* Augmentation:
844 z Indicates that a uleb128 is present to size the
845 augmentation section.
846 L Indicates the encoding (and thus presence) of
847 an LSDA pointer in the FDE augmentation.
848 R Indicates a non-default pointer encoding for
849 FDE code pointers.
850 P Indicates the presence of an encoding + language
851 personality routine in the CIE augmentation. */
852
853 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
854 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
855 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
856
857 p = augmentation + 1;
858 if (personality)
859 {
860 *p++ = 'P';
861 augmentation_size += 1 + size_of_encoded_value (per_encoding);
862 assemble_external_libcall (personality);
863 }
864 if (any_lsda_needed)
865 {
866 *p++ = 'L';
867 augmentation_size += 1;
868 }
869 if (fde_encoding != DW_EH_PE_absptr)
870 {
871 *p++ = 'R';
872 augmentation_size += 1;
873 }
874 if (p > augmentation + 1)
875 {
876 augmentation[0] = 'z';
877 *p = '\0';
878 }
879
880 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
881 if (personality && per_encoding == DW_EH_PE_aligned)
882 {
883 int offset = ( 4 /* Length */
884 + 4 /* CIE Id */
885 + 1 /* CIE version */
886 + strlen (augmentation) + 1 /* Augmentation */
887 + size_of_uleb128 (1) /* Code alignment */
888 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
889 + 1 /* RA column */
890 + 1 /* Augmentation size */
891 + 1 /* Personality encoding */ );
892 int pad = -offset & (PTR_SIZE - 1);
893
894 augmentation_size += pad;
895
896 /* Augmentations should be small, so there's scarce need to
897 iterate for a solution. Die if we exceed one uleb128 byte. */
898 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
899 }
900 }
901
902 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
903 if (dw_cie_version >= 4)
904 {
905 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
906 dw2_asm_output_data (1, 0, "CIE Segment Size");
907 }
908 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
909 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
910 "CIE Data Alignment Factor");
911
912 if (dw_cie_version == 1)
913 dw2_asm_output_data (1, return_reg, "CIE RA Column");
914 else
915 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
916
917 if (augmentation[0])
918 {
919 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
920 if (personality)
921 {
922 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
923 eh_data_format_name (per_encoding));
924 dw2_asm_output_encoded_addr_rtx (per_encoding,
925 personality,
926 true, NULL);
927 }
928
929 if (any_lsda_needed)
930 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
931 eh_data_format_name (lsda_encoding));
932
933 if (fde_encoding != DW_EH_PE_absptr)
934 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
935 eh_data_format_name (fde_encoding));
936 }
937
938 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
939 output_cfi (cfi, NULL, for_eh);
940
941 /* Pad the CIE out to an address sized boundary. */
942 ASM_OUTPUT_ALIGN (asm_out_file,
943 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
944 ASM_OUTPUT_LABEL (asm_out_file, l2);
945
946 /* Loop through all of the FDE's. */
947 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
948 {
949 unsigned int k;
950
951 /* Don't emit EH unwind info for leaf functions that don't need it. */
952 if (for_eh && !fde_needed_for_eh_p (fde))
953 continue;
954
955 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
956 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
957 augmentation, any_lsda_needed, lsda_encoding);
958 }
959
960 if (for_eh && targetm.terminate_dw2_eh_frame_info)
961 dw2_asm_output_data (4, 0, "End of Table");
962
963 /* Turn off app to make assembly quicker. */
964 if (flag_debug_asm)
965 app_disable ();
966 }
967
968 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
969
970 static void
dwarf2out_do_cfi_startproc(bool second)971 dwarf2out_do_cfi_startproc (bool second)
972 {
973 int enc;
974 rtx ref;
975
976 fprintf (asm_out_file, "\t.cfi_startproc\n");
977
978 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
979 eh unwinders. */
980 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
981 return;
982
983 rtx personality = get_personality_function (current_function_decl);
984
985 if (personality)
986 {
987 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
988 ref = personality;
989
990 /* ??? The GAS support isn't entirely consistent. We have to
991 handle indirect support ourselves, but PC-relative is done
992 in the assembler. Further, the assembler can't handle any
993 of the weirder relocation types. */
994 if (enc & DW_EH_PE_indirect)
995 ref = dw2_force_const_mem (ref, true);
996
997 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
998 output_addr_const (asm_out_file, ref);
999 fputc ('\n', asm_out_file);
1000 }
1001
1002 if (crtl->uses_eh_lsda)
1003 {
1004 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1005
1006 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1007 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1008 current_function_funcdef_no);
1009 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1010 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1011
1012 if (enc & DW_EH_PE_indirect)
1013 ref = dw2_force_const_mem (ref, true);
1014
1015 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1016 output_addr_const (asm_out_file, ref);
1017 fputc ('\n', asm_out_file);
1018 }
1019 }
1020
1021 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1022 this allocation may be done before pass_final. */
1023
1024 dw_fde_ref
dwarf2out_alloc_current_fde(void)1025 dwarf2out_alloc_current_fde (void)
1026 {
1027 dw_fde_ref fde;
1028
1029 fde = ggc_cleared_alloc<dw_fde_node> ();
1030 fde->decl = current_function_decl;
1031 fde->funcdef_number = current_function_funcdef_no;
1032 fde->fde_index = vec_safe_length (fde_vec);
1033 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1034 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1035 fde->nothrow = crtl->nothrow;
1036 fde->drap_reg = INVALID_REGNUM;
1037 fde->vdrap_reg = INVALID_REGNUM;
1038
1039 /* Record the FDE associated with this function. */
1040 cfun->fde = fde;
1041 vec_safe_push (fde_vec, fde);
1042
1043 return fde;
1044 }
1045
1046 /* Output a marker (i.e. a label) for the beginning of a function, before
1047 the prologue. */
1048
1049 void
dwarf2out_begin_prologue(unsigned int line ATTRIBUTE_UNUSED,unsigned int column ATTRIBUTE_UNUSED,const char * file ATTRIBUTE_UNUSED)1050 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1051 unsigned int column ATTRIBUTE_UNUSED,
1052 const char *file ATTRIBUTE_UNUSED)
1053 {
1054 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1055 char * dup_label;
1056 dw_fde_ref fde;
1057 section *fnsec;
1058 bool do_frame;
1059
1060 current_function_func_begin_label = NULL;
1061
1062 do_frame = dwarf2out_do_frame ();
1063
1064 /* ??? current_function_func_begin_label is also used by except.c for
1065 call-site information. We must emit this label if it might be used. */
1066 if (!do_frame
1067 && (!flag_exceptions
1068 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1069 return;
1070
1071 fnsec = function_section (current_function_decl);
1072 switch_to_section (fnsec);
1073 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1074 current_function_funcdef_no);
1075 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1076 current_function_funcdef_no);
1077 dup_label = xstrdup (label);
1078 current_function_func_begin_label = dup_label;
1079
1080 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1081 if (!do_frame)
1082 return;
1083
1084 /* Unlike the debug version, the EH version of frame unwind info is a per-
1085 function setting so we need to record whether we need it for the unit. */
1086 do_eh_frame |= dwarf2out_do_eh_frame ();
1087
1088 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1089 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1090 would include pass_dwarf2_frame. If we've not created the FDE yet,
1091 do so now. */
1092 fde = cfun->fde;
1093 if (fde == NULL)
1094 fde = dwarf2out_alloc_current_fde ();
1095
1096 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1097 fde->dw_fde_begin = dup_label;
1098 fde->dw_fde_current_label = dup_label;
1099 fde->in_std_section = (fnsec == text_section
1100 || (cold_text_section && fnsec == cold_text_section));
1101
1102 /* We only want to output line number information for the genuine dwarf2
1103 prologue case, not the eh frame case. */
1104 #ifdef DWARF2_DEBUGGING_INFO
1105 if (file)
1106 dwarf2out_source_line (line, column, file, 0, true);
1107 #endif
1108
1109 if (dwarf2out_do_cfi_asm ())
1110 dwarf2out_do_cfi_startproc (false);
1111 else
1112 {
1113 rtx personality = get_personality_function (current_function_decl);
1114 if (!current_unit_personality)
1115 current_unit_personality = personality;
1116
1117 /* We cannot keep a current personality per function as without CFI
1118 asm, at the point where we emit the CFI data, there is no current
1119 function anymore. */
1120 if (personality && current_unit_personality != personality)
1121 sorry ("multiple EH personalities are supported only with assemblers "
1122 "supporting .cfi_personality directive");
1123 }
1124 }
1125
1126 /* Output a marker (i.e. a label) for the end of the generated code
1127 for a function prologue. This gets called *after* the prologue code has
1128 been generated. */
1129
1130 void
dwarf2out_vms_end_prologue(unsigned int line ATTRIBUTE_UNUSED,const char * file ATTRIBUTE_UNUSED)1131 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1132 const char *file ATTRIBUTE_UNUSED)
1133 {
1134 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1135
1136 /* Output a label to mark the endpoint of the code generated for this
1137 function. */
1138 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1139 current_function_funcdef_no);
1140 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1141 current_function_funcdef_no);
1142 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1143 }
1144
1145 /* Output a marker (i.e. a label) for the beginning of the generated code
1146 for a function epilogue. This gets called *before* the prologue code has
1147 been generated. */
1148
1149 void
dwarf2out_vms_begin_epilogue(unsigned int line ATTRIBUTE_UNUSED,const char * file ATTRIBUTE_UNUSED)1150 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1151 const char *file ATTRIBUTE_UNUSED)
1152 {
1153 dw_fde_ref fde = cfun->fde;
1154 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1155
1156 if (fde->dw_fde_vms_begin_epilogue)
1157 return;
1158
1159 /* Output a label to mark the endpoint of the code generated for this
1160 function. */
1161 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1162 current_function_funcdef_no);
1163 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1164 current_function_funcdef_no);
1165 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1166 }
1167
1168 /* Output a marker (i.e. a label) for the absolute end of the generated code
1169 for a function definition. This gets called *after* the epilogue code has
1170 been generated. */
1171
1172 void
dwarf2out_end_epilogue(unsigned int line ATTRIBUTE_UNUSED,const char * file ATTRIBUTE_UNUSED)1173 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1174 const char *file ATTRIBUTE_UNUSED)
1175 {
1176 dw_fde_ref fde;
1177 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1178
1179 last_var_location_insn = NULL;
1180 cached_next_real_insn = NULL;
1181
1182 if (dwarf2out_do_cfi_asm ())
1183 fprintf (asm_out_file, "\t.cfi_endproc\n");
1184
1185 /* Output a label to mark the endpoint of the code generated for this
1186 function. */
1187 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1188 current_function_funcdef_no);
1189 ASM_OUTPUT_LABEL (asm_out_file, label);
1190 fde = cfun->fde;
1191 gcc_assert (fde != NULL);
1192 if (fde->dw_fde_second_begin == NULL)
1193 fde->dw_fde_end = xstrdup (label);
1194 }
1195
1196 void
dwarf2out_frame_finish(void)1197 dwarf2out_frame_finish (void)
1198 {
1199 /* Output call frame information. */
1200 if (targetm.debug_unwind_info () == UI_DWARF2)
1201 output_call_frame_info (0);
1202
1203 /* Output another copy for the unwinder. */
1204 if (do_eh_frame)
1205 output_call_frame_info (1);
1206 }
1207
1208 /* Note that the current function section is being used for code. */
1209
1210 static void
dwarf2out_note_section_used(void)1211 dwarf2out_note_section_used (void)
1212 {
1213 section *sec = current_function_section ();
1214 if (sec == text_section)
1215 text_section_used = true;
1216 else if (sec == cold_text_section)
1217 cold_text_section_used = true;
1218 }
1219
1220 static void var_location_switch_text_section (void);
1221 static void set_cur_line_info_table (section *);
1222
1223 void
dwarf2out_switch_text_section(void)1224 dwarf2out_switch_text_section (void)
1225 {
1226 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1227 section *sect;
1228 dw_fde_ref fde = cfun->fde;
1229
1230 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1231
1232 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1233 current_function_funcdef_no);
1234
1235 fde->dw_fde_second_begin = ggc_strdup (label);
1236 if (!in_cold_section_p)
1237 {
1238 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1239 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1240 }
1241 else
1242 {
1243 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1244 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1245 }
1246 have_multiple_function_sections = true;
1247
1248 /* There is no need to mark used sections when not debugging. */
1249 if (cold_text_section != NULL)
1250 dwarf2out_note_section_used ();
1251
1252 if (dwarf2out_do_cfi_asm ())
1253 fprintf (asm_out_file, "\t.cfi_endproc\n");
1254
1255 /* Now do the real section switch. */
1256 sect = current_function_section ();
1257 switch_to_section (sect);
1258
1259 fde->second_in_std_section
1260 = (sect == text_section
1261 || (cold_text_section && sect == cold_text_section));
1262
1263 if (dwarf2out_do_cfi_asm ())
1264 dwarf2out_do_cfi_startproc (true);
1265
1266 var_location_switch_text_section ();
1267
1268 if (cold_text_section != NULL)
1269 set_cur_line_info_table (sect);
1270 }
1271
1272 /* And now, the subset of the debugging information support code necessary
1273 for emitting location expressions. */
1274
1275 /* Data about a single source file. */
1276 struct GTY((for_user)) dwarf_file_data {
1277 const char * filename;
1278 int emitted_number;
1279 };
1280
1281 /* Describe an entry into the .debug_addr section. */
1282
1283 enum ate_kind {
1284 ate_kind_rtx,
1285 ate_kind_rtx_dtprel,
1286 ate_kind_label
1287 };
1288
1289 struct GTY((for_user)) addr_table_entry {
1290 enum ate_kind kind;
1291 unsigned int refcount;
1292 unsigned int index;
1293 union addr_table_entry_struct_union
1294 {
1295 rtx GTY ((tag ("0"))) rtl;
1296 char * GTY ((tag ("1"))) label;
1297 }
1298 GTY ((desc ("%1.kind"))) addr;
1299 };
1300
1301 typedef unsigned int var_loc_view;
1302
1303 /* Location lists are ranges + location descriptions for that range,
1304 so you can track variables that are in different places over
1305 their entire life. */
1306 typedef struct GTY(()) dw_loc_list_struct {
1307 dw_loc_list_ref dw_loc_next;
1308 const char *begin; /* Label and addr_entry for start of range */
1309 addr_table_entry *begin_entry;
1310 const char *end; /* Label for end of range */
1311 char *ll_symbol; /* Label for beginning of location list.
1312 Only on head of list. */
1313 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1314 const char *section; /* Section this loclist is relative to */
1315 dw_loc_descr_ref expr;
1316 var_loc_view vbegin, vend;
1317 hashval_t hash;
1318 /* True if all addresses in this and subsequent lists are known to be
1319 resolved. */
1320 bool resolved_addr;
1321 /* True if this list has been replaced by dw_loc_next. */
1322 bool replaced;
1323 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1324 section. */
1325 unsigned char emitted : 1;
1326 /* True if hash field is index rather than hash value. */
1327 unsigned char num_assigned : 1;
1328 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1329 unsigned char offset_emitted : 1;
1330 /* True if note_variable_value_in_expr has been called on it. */
1331 unsigned char noted_variable_value : 1;
1332 /* True if the range should be emitted even if begin and end
1333 are the same. */
1334 bool force;
1335 } dw_loc_list_node;
1336
1337 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1338 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1339
1340 /* Convert a DWARF stack opcode into its string name. */
1341
1342 static const char *
dwarf_stack_op_name(unsigned int op)1343 dwarf_stack_op_name (unsigned int op)
1344 {
1345 const char *name = get_DW_OP_name (op);
1346
1347 if (name != NULL)
1348 return name;
1349
1350 return "OP_<unknown>";
1351 }
1352
1353 /* Return TRUE iff we're to output location view lists as a separate
1354 attribute next to the location lists, as an extension compatible
1355 with DWARF 2 and above. */
1356
1357 static inline bool
dwarf2out_locviews_in_attribute()1358 dwarf2out_locviews_in_attribute ()
1359 {
1360 return debug_variable_location_views == 1;
1361 }
1362
1363 /* Return TRUE iff we're to output location view lists as part of the
1364 location lists, as proposed for standardization after DWARF 5. */
1365
1366 static inline bool
dwarf2out_locviews_in_loclist()1367 dwarf2out_locviews_in_loclist ()
1368 {
1369 #ifndef DW_LLE_view_pair
1370 return false;
1371 #else
1372 return debug_variable_location_views == -1;
1373 #endif
1374 }
1375
1376 /* Return a pointer to a newly allocated location description. Location
1377 descriptions are simple expression terms that can be strung
1378 together to form more complicated location (address) descriptions. */
1379
1380 static inline dw_loc_descr_ref
new_loc_descr(enum dwarf_location_atom op,unsigned HOST_WIDE_INT oprnd1,unsigned HOST_WIDE_INT oprnd2)1381 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1382 unsigned HOST_WIDE_INT oprnd2)
1383 {
1384 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1385
1386 descr->dw_loc_opc = op;
1387 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1388 descr->dw_loc_oprnd1.val_entry = NULL;
1389 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1390 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1391 descr->dw_loc_oprnd2.val_entry = NULL;
1392 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1393
1394 return descr;
1395 }
1396
1397 /* Add a location description term to a location description expression. */
1398
1399 static inline void
add_loc_descr(dw_loc_descr_ref * list_head,dw_loc_descr_ref descr)1400 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1401 {
1402 dw_loc_descr_ref *d;
1403
1404 /* Find the end of the chain. */
1405 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1406 ;
1407
1408 *d = descr;
1409 }
1410
1411 /* Compare two location operands for exact equality. */
1412
1413 static bool
dw_val_equal_p(dw_val_node * a,dw_val_node * b)1414 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1415 {
1416 if (a->val_class != b->val_class)
1417 return false;
1418 switch (a->val_class)
1419 {
1420 case dw_val_class_none:
1421 return true;
1422 case dw_val_class_addr:
1423 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1424
1425 case dw_val_class_offset:
1426 case dw_val_class_unsigned_const:
1427 case dw_val_class_const:
1428 case dw_val_class_unsigned_const_implicit:
1429 case dw_val_class_const_implicit:
1430 case dw_val_class_range_list:
1431 /* These are all HOST_WIDE_INT, signed or unsigned. */
1432 return a->v.val_unsigned == b->v.val_unsigned;
1433
1434 case dw_val_class_loc:
1435 return a->v.val_loc == b->v.val_loc;
1436 case dw_val_class_loc_list:
1437 return a->v.val_loc_list == b->v.val_loc_list;
1438 case dw_val_class_view_list:
1439 return a->v.val_view_list == b->v.val_view_list;
1440 case dw_val_class_die_ref:
1441 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1442 case dw_val_class_fde_ref:
1443 return a->v.val_fde_index == b->v.val_fde_index;
1444 case dw_val_class_symview:
1445 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1446 case dw_val_class_lbl_id:
1447 case dw_val_class_lineptr:
1448 case dw_val_class_macptr:
1449 case dw_val_class_loclistsptr:
1450 case dw_val_class_high_pc:
1451 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1452 case dw_val_class_str:
1453 return a->v.val_str == b->v.val_str;
1454 case dw_val_class_flag:
1455 return a->v.val_flag == b->v.val_flag;
1456 case dw_val_class_file:
1457 case dw_val_class_file_implicit:
1458 return a->v.val_file == b->v.val_file;
1459 case dw_val_class_decl_ref:
1460 return a->v.val_decl_ref == b->v.val_decl_ref;
1461
1462 case dw_val_class_const_double:
1463 return (a->v.val_double.high == b->v.val_double.high
1464 && a->v.val_double.low == b->v.val_double.low);
1465
1466 case dw_val_class_wide_int:
1467 return *a->v.val_wide == *b->v.val_wide;
1468
1469 case dw_val_class_vec:
1470 {
1471 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1472 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1473
1474 return (a_len == b_len
1475 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1476 }
1477
1478 case dw_val_class_data8:
1479 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1480
1481 case dw_val_class_vms_delta:
1482 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1483 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1484
1485 case dw_val_class_discr_value:
1486 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1487 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1488 case dw_val_class_discr_list:
1489 /* It makes no sense comparing two discriminant value lists. */
1490 return false;
1491 }
1492 gcc_unreachable ();
1493 }
1494
1495 /* Compare two location atoms for exact equality. */
1496
1497 static bool
loc_descr_equal_p_1(dw_loc_descr_ref a,dw_loc_descr_ref b)1498 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1499 {
1500 if (a->dw_loc_opc != b->dw_loc_opc)
1501 return false;
1502
1503 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1504 address size, but since we always allocate cleared storage it
1505 should be zero for other types of locations. */
1506 if (a->dtprel != b->dtprel)
1507 return false;
1508
1509 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1510 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1511 }
1512
1513 /* Compare two complete location expressions for exact equality. */
1514
1515 bool
loc_descr_equal_p(dw_loc_descr_ref a,dw_loc_descr_ref b)1516 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1517 {
1518 while (1)
1519 {
1520 if (a == b)
1521 return true;
1522 if (a == NULL || b == NULL)
1523 return false;
1524 if (!loc_descr_equal_p_1 (a, b))
1525 return false;
1526
1527 a = a->dw_loc_next;
1528 b = b->dw_loc_next;
1529 }
1530 }
1531
1532
1533 /* Add a constant POLY_OFFSET to a location expression. */
1534
1535 static void
loc_descr_plus_const(dw_loc_descr_ref * list_head,poly_int64 poly_offset)1536 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1537 {
1538 dw_loc_descr_ref loc;
1539 HOST_WIDE_INT *p;
1540
1541 gcc_assert (*list_head != NULL);
1542
1543 if (known_eq (poly_offset, 0))
1544 return;
1545
1546 /* Find the end of the chain. */
1547 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1548 ;
1549
1550 HOST_WIDE_INT offset;
1551 if (!poly_offset.is_constant (&offset))
1552 {
1553 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1554 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1555 return;
1556 }
1557
1558 p = NULL;
1559 if (loc->dw_loc_opc == DW_OP_fbreg
1560 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1561 p = &loc->dw_loc_oprnd1.v.val_int;
1562 else if (loc->dw_loc_opc == DW_OP_bregx)
1563 p = &loc->dw_loc_oprnd2.v.val_int;
1564
1565 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1566 offset. Don't optimize if an signed integer overflow would happen. */
1567 if (p != NULL
1568 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1569 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1570 *p += offset;
1571
1572 else if (offset > 0)
1573 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1574
1575 else
1576 {
1577 loc->dw_loc_next
1578 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1579 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1580 }
1581 }
1582
1583 /* Return a pointer to a newly allocated location description for
1584 REG and OFFSET. */
1585
1586 static inline dw_loc_descr_ref
new_reg_loc_descr(unsigned int reg,poly_int64 offset)1587 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1588 {
1589 HOST_WIDE_INT const_offset;
1590 if (offset.is_constant (&const_offset))
1591 {
1592 if (reg <= 31)
1593 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1594 const_offset, 0);
1595 else
1596 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1597 }
1598 else
1599 {
1600 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1601 loc_descr_plus_const (&ret, offset);
1602 return ret;
1603 }
1604 }
1605
1606 /* Add a constant OFFSET to a location list. */
1607
1608 static void
loc_list_plus_const(dw_loc_list_ref list_head,poly_int64 offset)1609 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1610 {
1611 dw_loc_list_ref d;
1612 for (d = list_head; d != NULL; d = d->dw_loc_next)
1613 loc_descr_plus_const (&d->expr, offset);
1614 }
1615
1616 #define DWARF_REF_SIZE \
1617 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1618
1619 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1620 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1621 DW_FORM_data16 with 128 bits. */
1622 #define DWARF_LARGEST_DATA_FORM_BITS \
1623 (dwarf_version >= 5 ? 128 : 64)
1624
1625 /* Utility inline function for construction of ops that were GNU extension
1626 before DWARF 5. */
1627 static inline enum dwarf_location_atom
dwarf_OP(enum dwarf_location_atom op)1628 dwarf_OP (enum dwarf_location_atom op)
1629 {
1630 switch (op)
1631 {
1632 case DW_OP_implicit_pointer:
1633 if (dwarf_version < 5)
1634 return DW_OP_GNU_implicit_pointer;
1635 break;
1636
1637 case DW_OP_entry_value:
1638 if (dwarf_version < 5)
1639 return DW_OP_GNU_entry_value;
1640 break;
1641
1642 case DW_OP_const_type:
1643 if (dwarf_version < 5)
1644 return DW_OP_GNU_const_type;
1645 break;
1646
1647 case DW_OP_regval_type:
1648 if (dwarf_version < 5)
1649 return DW_OP_GNU_regval_type;
1650 break;
1651
1652 case DW_OP_deref_type:
1653 if (dwarf_version < 5)
1654 return DW_OP_GNU_deref_type;
1655 break;
1656
1657 case DW_OP_convert:
1658 if (dwarf_version < 5)
1659 return DW_OP_GNU_convert;
1660 break;
1661
1662 case DW_OP_reinterpret:
1663 if (dwarf_version < 5)
1664 return DW_OP_GNU_reinterpret;
1665 break;
1666
1667 default:
1668 break;
1669 }
1670 return op;
1671 }
1672
1673 /* Similarly for attributes. */
1674 static inline enum dwarf_attribute
dwarf_AT(enum dwarf_attribute at)1675 dwarf_AT (enum dwarf_attribute at)
1676 {
1677 switch (at)
1678 {
1679 case DW_AT_call_return_pc:
1680 if (dwarf_version < 5)
1681 return DW_AT_low_pc;
1682 break;
1683
1684 case DW_AT_call_tail_call:
1685 if (dwarf_version < 5)
1686 return DW_AT_GNU_tail_call;
1687 break;
1688
1689 case DW_AT_call_origin:
1690 if (dwarf_version < 5)
1691 return DW_AT_abstract_origin;
1692 break;
1693
1694 case DW_AT_call_target:
1695 if (dwarf_version < 5)
1696 return DW_AT_GNU_call_site_target;
1697 break;
1698
1699 case DW_AT_call_target_clobbered:
1700 if (dwarf_version < 5)
1701 return DW_AT_GNU_call_site_target_clobbered;
1702 break;
1703
1704 case DW_AT_call_parameter:
1705 if (dwarf_version < 5)
1706 return DW_AT_abstract_origin;
1707 break;
1708
1709 case DW_AT_call_value:
1710 if (dwarf_version < 5)
1711 return DW_AT_GNU_call_site_value;
1712 break;
1713
1714 case DW_AT_call_data_value:
1715 if (dwarf_version < 5)
1716 return DW_AT_GNU_call_site_data_value;
1717 break;
1718
1719 case DW_AT_call_all_calls:
1720 if (dwarf_version < 5)
1721 return DW_AT_GNU_all_call_sites;
1722 break;
1723
1724 case DW_AT_call_all_tail_calls:
1725 if (dwarf_version < 5)
1726 return DW_AT_GNU_all_tail_call_sites;
1727 break;
1728
1729 case DW_AT_dwo_name:
1730 if (dwarf_version < 5)
1731 return DW_AT_GNU_dwo_name;
1732 break;
1733
1734 default:
1735 break;
1736 }
1737 return at;
1738 }
1739
1740 /* And similarly for tags. */
1741 static inline enum dwarf_tag
dwarf_TAG(enum dwarf_tag tag)1742 dwarf_TAG (enum dwarf_tag tag)
1743 {
1744 switch (tag)
1745 {
1746 case DW_TAG_call_site:
1747 if (dwarf_version < 5)
1748 return DW_TAG_GNU_call_site;
1749 break;
1750
1751 case DW_TAG_call_site_parameter:
1752 if (dwarf_version < 5)
1753 return DW_TAG_GNU_call_site_parameter;
1754 break;
1755
1756 default:
1757 break;
1758 }
1759 return tag;
1760 }
1761
1762 static unsigned long int get_base_type_offset (dw_die_ref);
1763
1764 /* Return the size of a location descriptor. */
1765
1766 static unsigned long
size_of_loc_descr(dw_loc_descr_ref loc)1767 size_of_loc_descr (dw_loc_descr_ref loc)
1768 {
1769 unsigned long size = 1;
1770
1771 switch (loc->dw_loc_opc)
1772 {
1773 case DW_OP_addr:
1774 size += DWARF2_ADDR_SIZE;
1775 break;
1776 case DW_OP_GNU_addr_index:
1777 case DW_OP_GNU_const_index:
1778 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1779 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1780 break;
1781 case DW_OP_const1u:
1782 case DW_OP_const1s:
1783 size += 1;
1784 break;
1785 case DW_OP_const2u:
1786 case DW_OP_const2s:
1787 size += 2;
1788 break;
1789 case DW_OP_const4u:
1790 case DW_OP_const4s:
1791 size += 4;
1792 break;
1793 case DW_OP_const8u:
1794 case DW_OP_const8s:
1795 size += 8;
1796 break;
1797 case DW_OP_constu:
1798 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1799 break;
1800 case DW_OP_consts:
1801 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1802 break;
1803 case DW_OP_pick:
1804 size += 1;
1805 break;
1806 case DW_OP_plus_uconst:
1807 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1808 break;
1809 case DW_OP_skip:
1810 case DW_OP_bra:
1811 size += 2;
1812 break;
1813 case DW_OP_breg0:
1814 case DW_OP_breg1:
1815 case DW_OP_breg2:
1816 case DW_OP_breg3:
1817 case DW_OP_breg4:
1818 case DW_OP_breg5:
1819 case DW_OP_breg6:
1820 case DW_OP_breg7:
1821 case DW_OP_breg8:
1822 case DW_OP_breg9:
1823 case DW_OP_breg10:
1824 case DW_OP_breg11:
1825 case DW_OP_breg12:
1826 case DW_OP_breg13:
1827 case DW_OP_breg14:
1828 case DW_OP_breg15:
1829 case DW_OP_breg16:
1830 case DW_OP_breg17:
1831 case DW_OP_breg18:
1832 case DW_OP_breg19:
1833 case DW_OP_breg20:
1834 case DW_OP_breg21:
1835 case DW_OP_breg22:
1836 case DW_OP_breg23:
1837 case DW_OP_breg24:
1838 case DW_OP_breg25:
1839 case DW_OP_breg26:
1840 case DW_OP_breg27:
1841 case DW_OP_breg28:
1842 case DW_OP_breg29:
1843 case DW_OP_breg30:
1844 case DW_OP_breg31:
1845 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1846 break;
1847 case DW_OP_regx:
1848 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1849 break;
1850 case DW_OP_fbreg:
1851 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1852 break;
1853 case DW_OP_bregx:
1854 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1855 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1856 break;
1857 case DW_OP_piece:
1858 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1859 break;
1860 case DW_OP_bit_piece:
1861 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1862 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1863 break;
1864 case DW_OP_deref_size:
1865 case DW_OP_xderef_size:
1866 size += 1;
1867 break;
1868 case DW_OP_call2:
1869 size += 2;
1870 break;
1871 case DW_OP_call4:
1872 size += 4;
1873 break;
1874 case DW_OP_call_ref:
1875 case DW_OP_GNU_variable_value:
1876 size += DWARF_REF_SIZE;
1877 break;
1878 case DW_OP_implicit_value:
1879 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1880 + loc->dw_loc_oprnd1.v.val_unsigned;
1881 break;
1882 case DW_OP_implicit_pointer:
1883 case DW_OP_GNU_implicit_pointer:
1884 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1885 break;
1886 case DW_OP_entry_value:
1887 case DW_OP_GNU_entry_value:
1888 {
1889 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1890 size += size_of_uleb128 (op_size) + op_size;
1891 break;
1892 }
1893 case DW_OP_const_type:
1894 case DW_OP_GNU_const_type:
1895 {
1896 unsigned long o
1897 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1898 size += size_of_uleb128 (o) + 1;
1899 switch (loc->dw_loc_oprnd2.val_class)
1900 {
1901 case dw_val_class_vec:
1902 size += loc->dw_loc_oprnd2.v.val_vec.length
1903 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1904 break;
1905 case dw_val_class_const:
1906 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1907 break;
1908 case dw_val_class_const_double:
1909 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1910 break;
1911 case dw_val_class_wide_int:
1912 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1913 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1914 break;
1915 default:
1916 gcc_unreachable ();
1917 }
1918 break;
1919 }
1920 case DW_OP_regval_type:
1921 case DW_OP_GNU_regval_type:
1922 {
1923 unsigned long o
1924 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1925 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1926 + size_of_uleb128 (o);
1927 }
1928 break;
1929 case DW_OP_deref_type:
1930 case DW_OP_GNU_deref_type:
1931 {
1932 unsigned long o
1933 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1934 size += 1 + size_of_uleb128 (o);
1935 }
1936 break;
1937 case DW_OP_convert:
1938 case DW_OP_reinterpret:
1939 case DW_OP_GNU_convert:
1940 case DW_OP_GNU_reinterpret:
1941 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1942 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1943 else
1944 {
1945 unsigned long o
1946 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1947 size += size_of_uleb128 (o);
1948 }
1949 break;
1950 case DW_OP_GNU_parameter_ref:
1951 size += 4;
1952 break;
1953 default:
1954 break;
1955 }
1956
1957 return size;
1958 }
1959
1960 /* Return the size of a series of location descriptors. */
1961
1962 unsigned long
size_of_locs(dw_loc_descr_ref loc)1963 size_of_locs (dw_loc_descr_ref loc)
1964 {
1965 dw_loc_descr_ref l;
1966 unsigned long size;
1967
1968 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1969 field, to avoid writing to a PCH file. */
1970 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1971 {
1972 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1973 break;
1974 size += size_of_loc_descr (l);
1975 }
1976 if (! l)
1977 return size;
1978
1979 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1980 {
1981 l->dw_loc_addr = size;
1982 size += size_of_loc_descr (l);
1983 }
1984
1985 return size;
1986 }
1987
1988 /* Return the size of the value in a DW_AT_discr_value attribute. */
1989
1990 static int
size_of_discr_value(dw_discr_value * discr_value)1991 size_of_discr_value (dw_discr_value *discr_value)
1992 {
1993 if (discr_value->pos)
1994 return size_of_uleb128 (discr_value->v.uval);
1995 else
1996 return size_of_sleb128 (discr_value->v.sval);
1997 }
1998
1999 /* Return the size of the value in a DW_AT_discr_list attribute. */
2000
2001 static int
size_of_discr_list(dw_discr_list_ref discr_list)2002 size_of_discr_list (dw_discr_list_ref discr_list)
2003 {
2004 int size = 0;
2005
2006 for (dw_discr_list_ref list = discr_list;
2007 list != NULL;
2008 list = list->dw_discr_next)
2009 {
2010 /* One byte for the discriminant value descriptor, and then one or two
2011 LEB128 numbers, depending on whether it's a single case label or a
2012 range label. */
2013 size += 1;
2014 size += size_of_discr_value (&list->dw_discr_lower_bound);
2015 if (list->dw_discr_range != 0)
2016 size += size_of_discr_value (&list->dw_discr_upper_bound);
2017 }
2018 return size;
2019 }
2020
2021 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2022 static void get_ref_die_offset_label (char *, dw_die_ref);
2023 static unsigned long int get_ref_die_offset (dw_die_ref);
2024
2025 /* Output location description stack opcode's operands (if any).
2026 The for_eh_or_skip parameter controls whether register numbers are
2027 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2028 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2029 info). This should be suppressed for the cases that have not been converted
2030 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2031
2032 static void
output_loc_operands(dw_loc_descr_ref loc,int for_eh_or_skip)2033 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2034 {
2035 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2036 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2037
2038 switch (loc->dw_loc_opc)
2039 {
2040 #ifdef DWARF2_DEBUGGING_INFO
2041 case DW_OP_const2u:
2042 case DW_OP_const2s:
2043 dw2_asm_output_data (2, val1->v.val_int, NULL);
2044 break;
2045 case DW_OP_const4u:
2046 if (loc->dtprel)
2047 {
2048 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2049 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2050 val1->v.val_addr);
2051 fputc ('\n', asm_out_file);
2052 break;
2053 }
2054 /* FALLTHRU */
2055 case DW_OP_const4s:
2056 dw2_asm_output_data (4, val1->v.val_int, NULL);
2057 break;
2058 case DW_OP_const8u:
2059 if (loc->dtprel)
2060 {
2061 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2062 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2063 val1->v.val_addr);
2064 fputc ('\n', asm_out_file);
2065 break;
2066 }
2067 /* FALLTHRU */
2068 case DW_OP_const8s:
2069 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2070 dw2_asm_output_data (8, val1->v.val_int, NULL);
2071 break;
2072 case DW_OP_skip:
2073 case DW_OP_bra:
2074 {
2075 int offset;
2076
2077 gcc_assert (val1->val_class == dw_val_class_loc);
2078 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2079
2080 dw2_asm_output_data (2, offset, NULL);
2081 }
2082 break;
2083 case DW_OP_implicit_value:
2084 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2085 switch (val2->val_class)
2086 {
2087 case dw_val_class_const:
2088 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2089 break;
2090 case dw_val_class_vec:
2091 {
2092 unsigned int elt_size = val2->v.val_vec.elt_size;
2093 unsigned int len = val2->v.val_vec.length;
2094 unsigned int i;
2095 unsigned char *p;
2096
2097 if (elt_size > sizeof (HOST_WIDE_INT))
2098 {
2099 elt_size /= 2;
2100 len *= 2;
2101 }
2102 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2103 i < len;
2104 i++, p += elt_size)
2105 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2106 "fp or vector constant word %u", i);
2107 }
2108 break;
2109 case dw_val_class_const_double:
2110 {
2111 unsigned HOST_WIDE_INT first, second;
2112
2113 if (WORDS_BIG_ENDIAN)
2114 {
2115 first = val2->v.val_double.high;
2116 second = val2->v.val_double.low;
2117 }
2118 else
2119 {
2120 first = val2->v.val_double.low;
2121 second = val2->v.val_double.high;
2122 }
2123 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2124 first, NULL);
2125 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2126 second, NULL);
2127 }
2128 break;
2129 case dw_val_class_wide_int:
2130 {
2131 int i;
2132 int len = get_full_len (*val2->v.val_wide);
2133 if (WORDS_BIG_ENDIAN)
2134 for (i = len - 1; i >= 0; --i)
2135 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2136 val2->v.val_wide->elt (i), NULL);
2137 else
2138 for (i = 0; i < len; ++i)
2139 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2140 val2->v.val_wide->elt (i), NULL);
2141 }
2142 break;
2143 case dw_val_class_addr:
2144 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2145 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2146 break;
2147 default:
2148 gcc_unreachable ();
2149 }
2150 break;
2151 #else
2152 case DW_OP_const2u:
2153 case DW_OP_const2s:
2154 case DW_OP_const4u:
2155 case DW_OP_const4s:
2156 case DW_OP_const8u:
2157 case DW_OP_const8s:
2158 case DW_OP_skip:
2159 case DW_OP_bra:
2160 case DW_OP_implicit_value:
2161 /* We currently don't make any attempt to make sure these are
2162 aligned properly like we do for the main unwind info, so
2163 don't support emitting things larger than a byte if we're
2164 only doing unwinding. */
2165 gcc_unreachable ();
2166 #endif
2167 case DW_OP_const1u:
2168 case DW_OP_const1s:
2169 dw2_asm_output_data (1, val1->v.val_int, NULL);
2170 break;
2171 case DW_OP_constu:
2172 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2173 break;
2174 case DW_OP_consts:
2175 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2176 break;
2177 case DW_OP_pick:
2178 dw2_asm_output_data (1, val1->v.val_int, NULL);
2179 break;
2180 case DW_OP_plus_uconst:
2181 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2182 break;
2183 case DW_OP_breg0:
2184 case DW_OP_breg1:
2185 case DW_OP_breg2:
2186 case DW_OP_breg3:
2187 case DW_OP_breg4:
2188 case DW_OP_breg5:
2189 case DW_OP_breg6:
2190 case DW_OP_breg7:
2191 case DW_OP_breg8:
2192 case DW_OP_breg9:
2193 case DW_OP_breg10:
2194 case DW_OP_breg11:
2195 case DW_OP_breg12:
2196 case DW_OP_breg13:
2197 case DW_OP_breg14:
2198 case DW_OP_breg15:
2199 case DW_OP_breg16:
2200 case DW_OP_breg17:
2201 case DW_OP_breg18:
2202 case DW_OP_breg19:
2203 case DW_OP_breg20:
2204 case DW_OP_breg21:
2205 case DW_OP_breg22:
2206 case DW_OP_breg23:
2207 case DW_OP_breg24:
2208 case DW_OP_breg25:
2209 case DW_OP_breg26:
2210 case DW_OP_breg27:
2211 case DW_OP_breg28:
2212 case DW_OP_breg29:
2213 case DW_OP_breg30:
2214 case DW_OP_breg31:
2215 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2216 break;
2217 case DW_OP_regx:
2218 {
2219 unsigned r = val1->v.val_unsigned;
2220 if (for_eh_or_skip >= 0)
2221 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2222 gcc_assert (size_of_uleb128 (r)
2223 == size_of_uleb128 (val1->v.val_unsigned));
2224 dw2_asm_output_data_uleb128 (r, NULL);
2225 }
2226 break;
2227 case DW_OP_fbreg:
2228 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2229 break;
2230 case DW_OP_bregx:
2231 {
2232 unsigned r = val1->v.val_unsigned;
2233 if (for_eh_or_skip >= 0)
2234 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2235 gcc_assert (size_of_uleb128 (r)
2236 == size_of_uleb128 (val1->v.val_unsigned));
2237 dw2_asm_output_data_uleb128 (r, NULL);
2238 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2239 }
2240 break;
2241 case DW_OP_piece:
2242 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2243 break;
2244 case DW_OP_bit_piece:
2245 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2246 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2247 break;
2248 case DW_OP_deref_size:
2249 case DW_OP_xderef_size:
2250 dw2_asm_output_data (1, val1->v.val_int, NULL);
2251 break;
2252
2253 case DW_OP_addr:
2254 if (loc->dtprel)
2255 {
2256 if (targetm.asm_out.output_dwarf_dtprel)
2257 {
2258 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2259 DWARF2_ADDR_SIZE,
2260 val1->v.val_addr);
2261 fputc ('\n', asm_out_file);
2262 }
2263 else
2264 gcc_unreachable ();
2265 }
2266 else
2267 {
2268 #ifdef DWARF2_DEBUGGING_INFO
2269 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2270 #else
2271 gcc_unreachable ();
2272 #endif
2273 }
2274 break;
2275
2276 case DW_OP_GNU_addr_index:
2277 case DW_OP_GNU_const_index:
2278 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2279 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2280 "(index into .debug_addr)");
2281 break;
2282
2283 case DW_OP_call2:
2284 case DW_OP_call4:
2285 {
2286 unsigned long die_offset
2287 = get_ref_die_offset (val1->v.val_die_ref.die);
2288 /* Make sure the offset has been computed and that we can encode it as
2289 an operand. */
2290 gcc_assert (die_offset > 0
2291 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2292 ? 0xffff
2293 : 0xffffffff));
2294 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2295 die_offset, NULL);
2296 }
2297 break;
2298
2299 case DW_OP_call_ref:
2300 case DW_OP_GNU_variable_value:
2301 {
2302 char label[MAX_ARTIFICIAL_LABEL_BYTES
2303 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2304 gcc_assert (val1->val_class == dw_val_class_die_ref);
2305 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2306 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2307 }
2308 break;
2309
2310 case DW_OP_implicit_pointer:
2311 case DW_OP_GNU_implicit_pointer:
2312 {
2313 char label[MAX_ARTIFICIAL_LABEL_BYTES
2314 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2315 gcc_assert (val1->val_class == dw_val_class_die_ref);
2316 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2317 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2318 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2319 }
2320 break;
2321
2322 case DW_OP_entry_value:
2323 case DW_OP_GNU_entry_value:
2324 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2325 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2326 break;
2327
2328 case DW_OP_const_type:
2329 case DW_OP_GNU_const_type:
2330 {
2331 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2332 gcc_assert (o);
2333 dw2_asm_output_data_uleb128 (o, NULL);
2334 switch (val2->val_class)
2335 {
2336 case dw_val_class_const:
2337 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2338 dw2_asm_output_data (1, l, NULL);
2339 dw2_asm_output_data (l, val2->v.val_int, NULL);
2340 break;
2341 case dw_val_class_vec:
2342 {
2343 unsigned int elt_size = val2->v.val_vec.elt_size;
2344 unsigned int len = val2->v.val_vec.length;
2345 unsigned int i;
2346 unsigned char *p;
2347
2348 l = len * elt_size;
2349 dw2_asm_output_data (1, l, NULL);
2350 if (elt_size > sizeof (HOST_WIDE_INT))
2351 {
2352 elt_size /= 2;
2353 len *= 2;
2354 }
2355 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2356 i < len;
2357 i++, p += elt_size)
2358 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2359 "fp or vector constant word %u", i);
2360 }
2361 break;
2362 case dw_val_class_const_double:
2363 {
2364 unsigned HOST_WIDE_INT first, second;
2365 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2366
2367 dw2_asm_output_data (1, 2 * l, NULL);
2368 if (WORDS_BIG_ENDIAN)
2369 {
2370 first = val2->v.val_double.high;
2371 second = val2->v.val_double.low;
2372 }
2373 else
2374 {
2375 first = val2->v.val_double.low;
2376 second = val2->v.val_double.high;
2377 }
2378 dw2_asm_output_data (l, first, NULL);
2379 dw2_asm_output_data (l, second, NULL);
2380 }
2381 break;
2382 case dw_val_class_wide_int:
2383 {
2384 int i;
2385 int len = get_full_len (*val2->v.val_wide);
2386 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2387
2388 dw2_asm_output_data (1, len * l, NULL);
2389 if (WORDS_BIG_ENDIAN)
2390 for (i = len - 1; i >= 0; --i)
2391 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2392 else
2393 for (i = 0; i < len; ++i)
2394 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2395 }
2396 break;
2397 default:
2398 gcc_unreachable ();
2399 }
2400 }
2401 break;
2402 case DW_OP_regval_type:
2403 case DW_OP_GNU_regval_type:
2404 {
2405 unsigned r = val1->v.val_unsigned;
2406 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2407 gcc_assert (o);
2408 if (for_eh_or_skip >= 0)
2409 {
2410 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2411 gcc_assert (size_of_uleb128 (r)
2412 == size_of_uleb128 (val1->v.val_unsigned));
2413 }
2414 dw2_asm_output_data_uleb128 (r, NULL);
2415 dw2_asm_output_data_uleb128 (o, NULL);
2416 }
2417 break;
2418 case DW_OP_deref_type:
2419 case DW_OP_GNU_deref_type:
2420 {
2421 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2422 gcc_assert (o);
2423 dw2_asm_output_data (1, val1->v.val_int, NULL);
2424 dw2_asm_output_data_uleb128 (o, NULL);
2425 }
2426 break;
2427 case DW_OP_convert:
2428 case DW_OP_reinterpret:
2429 case DW_OP_GNU_convert:
2430 case DW_OP_GNU_reinterpret:
2431 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2432 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2433 else
2434 {
2435 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2436 gcc_assert (o);
2437 dw2_asm_output_data_uleb128 (o, NULL);
2438 }
2439 break;
2440
2441 case DW_OP_GNU_parameter_ref:
2442 {
2443 unsigned long o;
2444 gcc_assert (val1->val_class == dw_val_class_die_ref);
2445 o = get_ref_die_offset (val1->v.val_die_ref.die);
2446 dw2_asm_output_data (4, o, NULL);
2447 }
2448 break;
2449
2450 default:
2451 /* Other codes have no operands. */
2452 break;
2453 }
2454 }
2455
2456 /* Output a sequence of location operations.
2457 The for_eh_or_skip parameter controls whether register numbers are
2458 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2459 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2460 info). This should be suppressed for the cases that have not been converted
2461 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2462
2463 void
output_loc_sequence(dw_loc_descr_ref loc,int for_eh_or_skip)2464 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2465 {
2466 for (; loc != NULL; loc = loc->dw_loc_next)
2467 {
2468 enum dwarf_location_atom opc = loc->dw_loc_opc;
2469 /* Output the opcode. */
2470 if (for_eh_or_skip >= 0
2471 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2472 {
2473 unsigned r = (opc - DW_OP_breg0);
2474 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2475 gcc_assert (r <= 31);
2476 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2477 }
2478 else if (for_eh_or_skip >= 0
2479 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2480 {
2481 unsigned r = (opc - DW_OP_reg0);
2482 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2483 gcc_assert (r <= 31);
2484 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2485 }
2486
2487 dw2_asm_output_data (1, opc,
2488 "%s", dwarf_stack_op_name (opc));
2489
2490 /* Output the operand(s) (if any). */
2491 output_loc_operands (loc, for_eh_or_skip);
2492 }
2493 }
2494
2495 /* Output location description stack opcode's operands (if any).
2496 The output is single bytes on a line, suitable for .cfi_escape. */
2497
2498 static void
output_loc_operands_raw(dw_loc_descr_ref loc)2499 output_loc_operands_raw (dw_loc_descr_ref loc)
2500 {
2501 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2502 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2503
2504 switch (loc->dw_loc_opc)
2505 {
2506 case DW_OP_addr:
2507 case DW_OP_GNU_addr_index:
2508 case DW_OP_GNU_const_index:
2509 case DW_OP_implicit_value:
2510 /* We cannot output addresses in .cfi_escape, only bytes. */
2511 gcc_unreachable ();
2512
2513 case DW_OP_const1u:
2514 case DW_OP_const1s:
2515 case DW_OP_pick:
2516 case DW_OP_deref_size:
2517 case DW_OP_xderef_size:
2518 fputc (',', asm_out_file);
2519 dw2_asm_output_data_raw (1, val1->v.val_int);
2520 break;
2521
2522 case DW_OP_const2u:
2523 case DW_OP_const2s:
2524 fputc (',', asm_out_file);
2525 dw2_asm_output_data_raw (2, val1->v.val_int);
2526 break;
2527
2528 case DW_OP_const4u:
2529 case DW_OP_const4s:
2530 fputc (',', asm_out_file);
2531 dw2_asm_output_data_raw (4, val1->v.val_int);
2532 break;
2533
2534 case DW_OP_const8u:
2535 case DW_OP_const8s:
2536 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2537 fputc (',', asm_out_file);
2538 dw2_asm_output_data_raw (8, val1->v.val_int);
2539 break;
2540
2541 case DW_OP_skip:
2542 case DW_OP_bra:
2543 {
2544 int offset;
2545
2546 gcc_assert (val1->val_class == dw_val_class_loc);
2547 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2548
2549 fputc (',', asm_out_file);
2550 dw2_asm_output_data_raw (2, offset);
2551 }
2552 break;
2553
2554 case DW_OP_regx:
2555 {
2556 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2557 gcc_assert (size_of_uleb128 (r)
2558 == size_of_uleb128 (val1->v.val_unsigned));
2559 fputc (',', asm_out_file);
2560 dw2_asm_output_data_uleb128_raw (r);
2561 }
2562 break;
2563
2564 case DW_OP_constu:
2565 case DW_OP_plus_uconst:
2566 case DW_OP_piece:
2567 fputc (',', asm_out_file);
2568 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2569 break;
2570
2571 case DW_OP_bit_piece:
2572 fputc (',', asm_out_file);
2573 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2574 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2575 break;
2576
2577 case DW_OP_consts:
2578 case DW_OP_breg0:
2579 case DW_OP_breg1:
2580 case DW_OP_breg2:
2581 case DW_OP_breg3:
2582 case DW_OP_breg4:
2583 case DW_OP_breg5:
2584 case DW_OP_breg6:
2585 case DW_OP_breg7:
2586 case DW_OP_breg8:
2587 case DW_OP_breg9:
2588 case DW_OP_breg10:
2589 case DW_OP_breg11:
2590 case DW_OP_breg12:
2591 case DW_OP_breg13:
2592 case DW_OP_breg14:
2593 case DW_OP_breg15:
2594 case DW_OP_breg16:
2595 case DW_OP_breg17:
2596 case DW_OP_breg18:
2597 case DW_OP_breg19:
2598 case DW_OP_breg20:
2599 case DW_OP_breg21:
2600 case DW_OP_breg22:
2601 case DW_OP_breg23:
2602 case DW_OP_breg24:
2603 case DW_OP_breg25:
2604 case DW_OP_breg26:
2605 case DW_OP_breg27:
2606 case DW_OP_breg28:
2607 case DW_OP_breg29:
2608 case DW_OP_breg30:
2609 case DW_OP_breg31:
2610 case DW_OP_fbreg:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2613 break;
2614
2615 case DW_OP_bregx:
2616 {
2617 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2618 gcc_assert (size_of_uleb128 (r)
2619 == size_of_uleb128 (val1->v.val_unsigned));
2620 fputc (',', asm_out_file);
2621 dw2_asm_output_data_uleb128_raw (r);
2622 fputc (',', asm_out_file);
2623 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2624 }
2625 break;
2626
2627 case DW_OP_implicit_pointer:
2628 case DW_OP_entry_value:
2629 case DW_OP_const_type:
2630 case DW_OP_regval_type:
2631 case DW_OP_deref_type:
2632 case DW_OP_convert:
2633 case DW_OP_reinterpret:
2634 case DW_OP_GNU_implicit_pointer:
2635 case DW_OP_GNU_entry_value:
2636 case DW_OP_GNU_const_type:
2637 case DW_OP_GNU_regval_type:
2638 case DW_OP_GNU_deref_type:
2639 case DW_OP_GNU_convert:
2640 case DW_OP_GNU_reinterpret:
2641 case DW_OP_GNU_parameter_ref:
2642 gcc_unreachable ();
2643 break;
2644
2645 default:
2646 /* Other codes have no operands. */
2647 break;
2648 }
2649 }
2650
2651 void
output_loc_sequence_raw(dw_loc_descr_ref loc)2652 output_loc_sequence_raw (dw_loc_descr_ref loc)
2653 {
2654 while (1)
2655 {
2656 enum dwarf_location_atom opc = loc->dw_loc_opc;
2657 /* Output the opcode. */
2658 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2659 {
2660 unsigned r = (opc - DW_OP_breg0);
2661 r = DWARF2_FRAME_REG_OUT (r, 1);
2662 gcc_assert (r <= 31);
2663 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2664 }
2665 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2666 {
2667 unsigned r = (opc - DW_OP_reg0);
2668 r = DWARF2_FRAME_REG_OUT (r, 1);
2669 gcc_assert (r <= 31);
2670 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2671 }
2672 /* Output the opcode. */
2673 fprintf (asm_out_file, "%#x", opc);
2674 output_loc_operands_raw (loc);
2675
2676 if (!loc->dw_loc_next)
2677 break;
2678 loc = loc->dw_loc_next;
2679
2680 fputc (',', asm_out_file);
2681 }
2682 }
2683
2684 /* This function builds a dwarf location descriptor sequence from a
2685 dw_cfa_location, adding the given OFFSET to the result of the
2686 expression. */
2687
2688 struct dw_loc_descr_node *
build_cfa_loc(dw_cfa_location * cfa,poly_int64 offset)2689 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2690 {
2691 struct dw_loc_descr_node *head, *tmp;
2692
2693 offset += cfa->offset;
2694
2695 if (cfa->indirect)
2696 {
2697 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2698 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2699 head->dw_loc_oprnd1.val_entry = NULL;
2700 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2701 add_loc_descr (&head, tmp);
2702 loc_descr_plus_const (&head, offset);
2703 }
2704 else
2705 head = new_reg_loc_descr (cfa->reg, offset);
2706
2707 return head;
2708 }
2709
2710 /* This function builds a dwarf location descriptor sequence for
2711 the address at OFFSET from the CFA when stack is aligned to
2712 ALIGNMENT byte. */
2713
2714 struct dw_loc_descr_node *
build_cfa_aligned_loc(dw_cfa_location * cfa,poly_int64 offset,HOST_WIDE_INT alignment)2715 build_cfa_aligned_loc (dw_cfa_location *cfa,
2716 poly_int64 offset, HOST_WIDE_INT alignment)
2717 {
2718 struct dw_loc_descr_node *head;
2719 unsigned int dwarf_fp
2720 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2721
2722 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2723 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2724 {
2725 head = new_reg_loc_descr (dwarf_fp, 0);
2726 add_loc_descr (&head, int_loc_descriptor (alignment));
2727 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2728 loc_descr_plus_const (&head, offset);
2729 }
2730 else
2731 head = new_reg_loc_descr (dwarf_fp, offset);
2732 return head;
2733 }
2734
2735 /* And now, the support for symbolic debugging information. */
2736
2737 /* .debug_str support. */
2738
2739 static void dwarf2out_init (const char *);
2740 static void dwarf2out_finish (const char *);
2741 static void dwarf2out_early_finish (const char *);
2742 static void dwarf2out_assembly_start (void);
2743 static void dwarf2out_define (unsigned int, const char *);
2744 static void dwarf2out_undef (unsigned int, const char *);
2745 static void dwarf2out_start_source_file (unsigned, const char *);
2746 static void dwarf2out_end_source_file (unsigned);
2747 static void dwarf2out_function_decl (tree);
2748 static void dwarf2out_begin_block (unsigned, unsigned);
2749 static void dwarf2out_end_block (unsigned, unsigned);
2750 static bool dwarf2out_ignore_block (const_tree);
2751 static void dwarf2out_early_global_decl (tree);
2752 static void dwarf2out_late_global_decl (tree);
2753 static void dwarf2out_type_decl (tree, int);
2754 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2755 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2756 dw_die_ref);
2757 static void dwarf2out_abstract_function (tree);
2758 static void dwarf2out_var_location (rtx_insn *);
2759 static void dwarf2out_inline_entry (tree);
2760 static void dwarf2out_size_function (tree);
2761 static void dwarf2out_begin_function (tree);
2762 static void dwarf2out_end_function (unsigned int);
2763 static void dwarf2out_register_main_translation_unit (tree unit);
2764 static void dwarf2out_set_name (tree, tree);
2765 static void dwarf2out_register_external_die (tree decl, const char *sym,
2766 unsigned HOST_WIDE_INT off);
2767 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2768 unsigned HOST_WIDE_INT *off);
2769
2770 /* The debug hooks structure. */
2771
2772 const struct gcc_debug_hooks dwarf2_debug_hooks =
2773 {
2774 dwarf2out_init,
2775 dwarf2out_finish,
2776 dwarf2out_early_finish,
2777 dwarf2out_assembly_start,
2778 dwarf2out_define,
2779 dwarf2out_undef,
2780 dwarf2out_start_source_file,
2781 dwarf2out_end_source_file,
2782 dwarf2out_begin_block,
2783 dwarf2out_end_block,
2784 dwarf2out_ignore_block,
2785 dwarf2out_source_line,
2786 dwarf2out_begin_prologue,
2787 #if VMS_DEBUGGING_INFO
2788 dwarf2out_vms_end_prologue,
2789 dwarf2out_vms_begin_epilogue,
2790 #else
2791 debug_nothing_int_charstar,
2792 debug_nothing_int_charstar,
2793 #endif
2794 dwarf2out_end_epilogue,
2795 dwarf2out_begin_function,
2796 dwarf2out_end_function, /* end_function */
2797 dwarf2out_register_main_translation_unit,
2798 dwarf2out_function_decl, /* function_decl */
2799 dwarf2out_early_global_decl,
2800 dwarf2out_late_global_decl,
2801 dwarf2out_type_decl, /* type_decl */
2802 dwarf2out_imported_module_or_decl,
2803 dwarf2out_die_ref_for_decl,
2804 dwarf2out_register_external_die,
2805 debug_nothing_tree, /* deferred_inline_function */
2806 /* The DWARF 2 backend tries to reduce debugging bloat by not
2807 emitting the abstract description of inline functions until
2808 something tries to reference them. */
2809 dwarf2out_abstract_function, /* outlining_inline_function */
2810 debug_nothing_rtx_code_label, /* label */
2811 debug_nothing_int, /* handle_pch */
2812 dwarf2out_var_location,
2813 dwarf2out_inline_entry, /* inline_entry */
2814 dwarf2out_size_function, /* size_function */
2815 dwarf2out_switch_text_section,
2816 dwarf2out_set_name,
2817 1, /* start_end_main_source_file */
2818 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2819 };
2820
2821 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2822 {
2823 dwarf2out_init,
2824 debug_nothing_charstar,
2825 debug_nothing_charstar,
2826 dwarf2out_assembly_start,
2827 debug_nothing_int_charstar,
2828 debug_nothing_int_charstar,
2829 debug_nothing_int_charstar,
2830 debug_nothing_int,
2831 debug_nothing_int_int, /* begin_block */
2832 debug_nothing_int_int, /* end_block */
2833 debug_true_const_tree, /* ignore_block */
2834 dwarf2out_source_line, /* source_line */
2835 debug_nothing_int_int_charstar, /* begin_prologue */
2836 debug_nothing_int_charstar, /* end_prologue */
2837 debug_nothing_int_charstar, /* begin_epilogue */
2838 debug_nothing_int_charstar, /* end_epilogue */
2839 debug_nothing_tree, /* begin_function */
2840 debug_nothing_int, /* end_function */
2841 debug_nothing_tree, /* register_main_translation_unit */
2842 debug_nothing_tree, /* function_decl */
2843 debug_nothing_tree, /* early_global_decl */
2844 debug_nothing_tree, /* late_global_decl */
2845 debug_nothing_tree_int, /* type_decl */
2846 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2847 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2848 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2849 debug_nothing_tree, /* deferred_inline_function */
2850 debug_nothing_tree, /* outlining_inline_function */
2851 debug_nothing_rtx_code_label, /* label */
2852 debug_nothing_int, /* handle_pch */
2853 debug_nothing_rtx_insn, /* var_location */
2854 debug_nothing_tree, /* inline_entry */
2855 debug_nothing_tree, /* size_function */
2856 debug_nothing_void, /* switch_text_section */
2857 debug_nothing_tree_tree, /* set_name */
2858 0, /* start_end_main_source_file */
2859 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2860 };
2861
2862 /* NOTE: In the comments in this file, many references are made to
2863 "Debugging Information Entries". This term is abbreviated as `DIE'
2864 throughout the remainder of this file. */
2865
2866 /* An internal representation of the DWARF output is built, and then
2867 walked to generate the DWARF debugging info. The walk of the internal
2868 representation is done after the entire program has been compiled.
2869 The types below are used to describe the internal representation. */
2870
2871 /* Whether to put type DIEs into their own section .debug_types instead
2872 of making them part of the .debug_info section. Only supported for
2873 Dwarf V4 or higher and the user didn't disable them through
2874 -fno-debug-types-section. It is more efficient to put them in a
2875 separate comdat sections since the linker will then be able to
2876 remove duplicates. But not all tools support .debug_types sections
2877 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2878 it is DW_UT_type unit type in .debug_info section. For late LTO
2879 debug there should be almost no types emitted so avoid enabling
2880 -fdebug-types-section there. */
2881
2882 #define use_debug_types (dwarf_version >= 4 \
2883 && flag_debug_types_section \
2884 && !in_lto_p)
2885
2886 /* Various DIE's use offsets relative to the beginning of the
2887 .debug_info section to refer to each other. */
2888
2889 typedef long int dw_offset;
2890
2891 struct comdat_type_node;
2892
2893 /* The entries in the line_info table more-or-less mirror the opcodes
2894 that are used in the real dwarf line table. Arrays of these entries
2895 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2896 supported. */
2897
2898 enum dw_line_info_opcode {
2899 /* Emit DW_LNE_set_address; the operand is the label index. */
2900 LI_set_address,
2901
2902 /* Emit a row to the matrix with the given line. This may be done
2903 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2904 special opcodes. */
2905 LI_set_line,
2906
2907 /* Emit a DW_LNS_set_file. */
2908 LI_set_file,
2909
2910 /* Emit a DW_LNS_set_column. */
2911 LI_set_column,
2912
2913 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2914 LI_negate_stmt,
2915
2916 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2917 LI_set_prologue_end,
2918 LI_set_epilogue_begin,
2919
2920 /* Emit a DW_LNE_set_discriminator. */
2921 LI_set_discriminator,
2922
2923 /* Output a Fixed Advance PC; the target PC is the label index; the
2924 base PC is the previous LI_adv_address or LI_set_address entry.
2925 We only use this when emitting debug views without assembler
2926 support, at explicit user request. Ideally, we should only use
2927 it when the offset might be zero but we can't tell: it's the only
2928 way to maybe change the PC without resetting the view number. */
2929 LI_adv_address
2930 };
2931
2932 typedef struct GTY(()) dw_line_info_struct {
2933 enum dw_line_info_opcode opcode;
2934 unsigned int val;
2935 } dw_line_info_entry;
2936
2937
2938 struct GTY(()) dw_line_info_table {
2939 /* The label that marks the end of this section. */
2940 const char *end_label;
2941
2942 /* The values for the last row of the matrix, as collected in the table.
2943 These are used to minimize the changes to the next row. */
2944 unsigned int file_num;
2945 unsigned int line_num;
2946 unsigned int column_num;
2947 int discrim_num;
2948 bool is_stmt;
2949 bool in_use;
2950
2951 /* This denotes the NEXT view number.
2952
2953 If it is 0, it is known that the NEXT view will be the first view
2954 at the given PC.
2955
2956 If it is -1, we're forcing the view number to be reset, e.g. at a
2957 function entry.
2958
2959 The meaning of other nonzero values depends on whether we're
2960 computing views internally or leaving it for the assembler to do
2961 so. If we're emitting them internally, view denotes the view
2962 number since the last known advance of PC. If we're leaving it
2963 for the assembler, it denotes the LVU label number that we're
2964 going to ask the assembler to assign. */
2965 var_loc_view view;
2966
2967 /* This counts the number of symbolic views emitted in this table
2968 since the latest view reset. Its max value, over all tables,
2969 sets symview_upper_bound. */
2970 var_loc_view symviews_since_reset;
2971
2972 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2973 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2974 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
2975 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
2976
2977 vec<dw_line_info_entry, va_gc> *entries;
2978 };
2979
2980 /* This is an upper bound for view numbers that the assembler may
2981 assign to symbolic views output in this translation. It is used to
2982 decide how big a field to use to represent view numbers in
2983 symview-classed attributes. */
2984
2985 static var_loc_view symview_upper_bound;
2986
2987 /* If we're keep track of location views and their reset points, and
2988 INSN is a reset point (i.e., it necessarily advances the PC), mark
2989 the next view in TABLE as reset. */
2990
2991 static void
maybe_reset_location_view(rtx_insn * insn,dw_line_info_table * table)2992 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
2993 {
2994 if (!debug_internal_reset_location_views)
2995 return;
2996
2997 /* Maybe turn (part of?) this test into a default target hook. */
2998 int reset = 0;
2999
3000 if (targetm.reset_location_view)
3001 reset = targetm.reset_location_view (insn);
3002
3003 if (reset)
3004 ;
3005 else if (JUMP_TABLE_DATA_P (insn))
3006 reset = 1;
3007 else if (GET_CODE (insn) == USE
3008 || GET_CODE (insn) == CLOBBER
3009 || GET_CODE (insn) == ASM_INPUT
3010 || asm_noperands (insn) >= 0)
3011 ;
3012 else if (get_attr_min_length (insn) > 0)
3013 reset = 1;
3014
3015 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3016 RESET_NEXT_VIEW (table->view);
3017 }
3018
3019 /* Each DIE attribute has a field specifying the attribute kind,
3020 a link to the next attribute in the chain, and an attribute value.
3021 Attributes are typically linked below the DIE they modify. */
3022
3023 typedef struct GTY(()) dw_attr_struct {
3024 enum dwarf_attribute dw_attr;
3025 dw_val_node dw_attr_val;
3026 }
3027 dw_attr_node;
3028
3029
3030 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3031 The children of each node form a circular list linked by
3032 die_sib. die_child points to the node *before* the "first" child node. */
3033
3034 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3035 union die_symbol_or_type_node
3036 {
3037 const char * GTY ((tag ("0"))) die_symbol;
3038 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3039 }
3040 GTY ((desc ("%0.comdat_type_p"))) die_id;
3041 vec<dw_attr_node, va_gc> *die_attr;
3042 dw_die_ref die_parent;
3043 dw_die_ref die_child;
3044 dw_die_ref die_sib;
3045 dw_die_ref die_definition; /* ref from a specification to its definition */
3046 dw_offset die_offset;
3047 unsigned long die_abbrev;
3048 int die_mark;
3049 unsigned int decl_id;
3050 enum dwarf_tag die_tag;
3051 /* Die is used and must not be pruned as unused. */
3052 BOOL_BITFIELD die_perennial_p : 1;
3053 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3054 /* For an external ref to die_symbol if die_offset contains an extra
3055 offset to that symbol. */
3056 BOOL_BITFIELD with_offset : 1;
3057 /* Whether this DIE was removed from the DIE tree, for example via
3058 prune_unused_types. We don't consider those present from the
3059 DIE lookup routines. */
3060 BOOL_BITFIELD removed : 1;
3061 /* Lots of spare bits. */
3062 }
3063 die_node;
3064
3065 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3066 static bool early_dwarf;
3067 static bool early_dwarf_finished;
3068 struct set_early_dwarf {
3069 bool saved;
set_early_dwarfset_early_dwarf3070 set_early_dwarf () : saved(early_dwarf)
3071 {
3072 gcc_assert (! early_dwarf_finished);
3073 early_dwarf = true;
3074 }
~set_early_dwarfset_early_dwarf3075 ~set_early_dwarf () { early_dwarf = saved; }
3076 };
3077
3078 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3079 #define FOR_EACH_CHILD(die, c, expr) do { \
3080 c = die->die_child; \
3081 if (c) do { \
3082 c = c->die_sib; \
3083 expr; \
3084 } while (c != die->die_child); \
3085 } while (0)
3086
3087 /* The pubname structure */
3088
3089 typedef struct GTY(()) pubname_struct {
3090 dw_die_ref die;
3091 const char *name;
3092 }
3093 pubname_entry;
3094
3095
3096 struct GTY(()) dw_ranges {
3097 const char *label;
3098 /* If this is positive, it's a block number, otherwise it's a
3099 bitwise-negated index into dw_ranges_by_label. */
3100 int num;
3101 /* Index for the range list for DW_FORM_rnglistx. */
3102 unsigned int idx : 31;
3103 /* True if this range might be possibly in a different section
3104 from previous entry. */
3105 unsigned int maybe_new_sec : 1;
3106 };
3107
3108 /* A structure to hold a macinfo entry. */
3109
3110 typedef struct GTY(()) macinfo_struct {
3111 unsigned char code;
3112 unsigned HOST_WIDE_INT lineno;
3113 const char *info;
3114 }
3115 macinfo_entry;
3116
3117
3118 struct GTY(()) dw_ranges_by_label {
3119 const char *begin;
3120 const char *end;
3121 };
3122
3123 /* The comdat type node structure. */
3124 struct GTY(()) comdat_type_node
3125 {
3126 dw_die_ref root_die;
3127 dw_die_ref type_die;
3128 dw_die_ref skeleton_die;
3129 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3130 comdat_type_node *next;
3131 };
3132
3133 /* A list of DIEs for which we can't determine ancestry (parent_die
3134 field) just yet. Later in dwarf2out_finish we will fill in the
3135 missing bits. */
3136 typedef struct GTY(()) limbo_die_struct {
3137 dw_die_ref die;
3138 /* The tree for which this DIE was created. We use this to
3139 determine ancestry later. */
3140 tree created_for;
3141 struct limbo_die_struct *next;
3142 }
3143 limbo_die_node;
3144
3145 typedef struct skeleton_chain_struct
3146 {
3147 dw_die_ref old_die;
3148 dw_die_ref new_die;
3149 struct skeleton_chain_struct *parent;
3150 }
3151 skeleton_chain_node;
3152
3153 /* Define a macro which returns nonzero for a TYPE_DECL which was
3154 implicitly generated for a type.
3155
3156 Note that, unlike the C front-end (which generates a NULL named
3157 TYPE_DECL node for each complete tagged type, each array type,
3158 and each function type node created) the C++ front-end generates
3159 a _named_ TYPE_DECL node for each tagged type node created.
3160 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3161 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3162 front-end, but for each type, tagged or not. */
3163
3164 #define TYPE_DECL_IS_STUB(decl) \
3165 (DECL_NAME (decl) == NULL_TREE \
3166 || (DECL_ARTIFICIAL (decl) \
3167 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3168 /* This is necessary for stub decls that \
3169 appear in nested inline functions. */ \
3170 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3171 && (decl_ultimate_origin (decl) \
3172 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3173
3174 /* Information concerning the compilation unit's programming
3175 language, and compiler version. */
3176
3177 /* Fixed size portion of the DWARF compilation unit header. */
3178 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3179 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3180 + (dwarf_version >= 5 ? 4 : 3))
3181
3182 /* Fixed size portion of the DWARF comdat type unit header. */
3183 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3184 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3185 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3186
3187 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3188 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3189 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3190
3191 /* Fixed size portion of public names info. */
3192 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3193
3194 /* Fixed size portion of the address range info. */
3195 #define DWARF_ARANGES_HEADER_SIZE \
3196 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3197 DWARF2_ADDR_SIZE * 2) \
3198 - DWARF_INITIAL_LENGTH_SIZE)
3199
3200 /* Size of padding portion in the address range info. It must be
3201 aligned to twice the pointer size. */
3202 #define DWARF_ARANGES_PAD_SIZE \
3203 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3204 DWARF2_ADDR_SIZE * 2) \
3205 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3206
3207 /* Use assembler line directives if available. */
3208 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3209 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3210 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3211 #else
3212 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3213 #endif
3214 #endif
3215
3216 /* Use assembler views in line directives if available. */
3217 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3218 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3219 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3220 #else
3221 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3222 #endif
3223 #endif
3224
3225 /* Return true if GCC configure detected assembler support for .loc. */
3226
3227 bool
dwarf2out_default_as_loc_support(void)3228 dwarf2out_default_as_loc_support (void)
3229 {
3230 return DWARF2_ASM_LINE_DEBUG_INFO;
3231 #if (GCC_VERSION >= 3000)
3232 # undef DWARF2_ASM_LINE_DEBUG_INFO
3233 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3234 #endif
3235 }
3236
3237 /* Return true if GCC configure detected assembler support for views
3238 in .loc directives. */
3239
3240 bool
dwarf2out_default_as_locview_support(void)3241 dwarf2out_default_as_locview_support (void)
3242 {
3243 return DWARF2_ASM_VIEW_DEBUG_INFO;
3244 #if (GCC_VERSION >= 3000)
3245 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3246 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3247 #endif
3248 }
3249
3250 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3251 view computation, and it refers to a view identifier for which we
3252 will not emit a label because it is known to map to a view number
3253 zero. We won't allocate the bitmap if we're not using assembler
3254 support for location views, but we have to make the variable
3255 visible for GGC and for code that will be optimized out for lack of
3256 support but that's still parsed and compiled. We could abstract it
3257 out with macros, but it's not worth it. */
3258 static GTY(()) bitmap zero_view_p;
3259
3260 /* Evaluate to TRUE iff N is known to identify the first location view
3261 at its PC. When not using assembler location view computation,
3262 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3263 and views label numbers recorded in it are the ones known to be
3264 zero. */
3265 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3266 || (N) == (var_loc_view)-1 \
3267 || (zero_view_p \
3268 && bitmap_bit_p (zero_view_p, (N))))
3269
3270 /* Return true iff we're to emit .loc directives for the assembler to
3271 generate line number sections.
3272
3273 When we're not emitting views, all we need from the assembler is
3274 support for .loc directives.
3275
3276 If we are emitting views, we can only use the assembler's .loc
3277 support if it also supports views.
3278
3279 When the compiler is emitting the line number programs and
3280 computing view numbers itself, it resets view numbers at known PC
3281 changes and counts from that, and then it emits view numbers as
3282 literal constants in locviewlists. There are cases in which the
3283 compiler is not sure about PC changes, e.g. when extra alignment is
3284 requested for a label. In these cases, the compiler may not reset
3285 the view counter, and the potential PC advance in the line number
3286 program will use an opcode that does not reset the view counter
3287 even if the PC actually changes, so that compiler and debug info
3288 consumer can keep view numbers in sync.
3289
3290 When the compiler defers view computation to the assembler, it
3291 emits symbolic view numbers in locviewlists, with the exception of
3292 views known to be zero (forced resets, or reset after
3293 compiler-visible PC changes): instead of emitting symbols for
3294 these, we emit literal zero and assert the assembler agrees with
3295 the compiler's assessment. We could use symbolic views everywhere,
3296 instead of special-casing zero views, but then we'd be unable to
3297 optimize out locviewlists that contain only zeros. */
3298
3299 static bool
output_asm_line_debug_info(void)3300 output_asm_line_debug_info (void)
3301 {
3302 return (dwarf2out_as_loc_support
3303 && (dwarf2out_as_locview_support
3304 || !debug_variable_location_views));
3305 }
3306
3307 /* Minimum line offset in a special line info. opcode.
3308 This value was chosen to give a reasonable range of values. */
3309 #define DWARF_LINE_BASE -10
3310
3311 /* First special line opcode - leave room for the standard opcodes. */
3312 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3313
3314 /* Range of line offsets in a special line info. opcode. */
3315 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3316
3317 /* Flag that indicates the initial value of the is_stmt_start flag.
3318 In the present implementation, we do not mark any lines as
3319 the beginning of a source statement, because that information
3320 is not made available by the GCC front-end. */
3321 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3322
3323 /* Maximum number of operations per instruction bundle. */
3324 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3325 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3326 #endif
3327
3328 /* This location is used by calc_die_sizes() to keep track
3329 the offset of each DIE within the .debug_info section. */
3330 static unsigned long next_die_offset;
3331
3332 /* Record the root of the DIE's built for the current compilation unit. */
3333 static GTY(()) dw_die_ref single_comp_unit_die;
3334
3335 /* A list of type DIEs that have been separated into comdat sections. */
3336 static GTY(()) comdat_type_node *comdat_type_list;
3337
3338 /* A list of CU DIEs that have been separated. */
3339 static GTY(()) limbo_die_node *cu_die_list;
3340
3341 /* A list of DIEs with a NULL parent waiting to be relocated. */
3342 static GTY(()) limbo_die_node *limbo_die_list;
3343
3344 /* A list of DIEs for which we may have to generate
3345 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3346 static GTY(()) limbo_die_node *deferred_asm_name;
3347
3348 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3349 {
3350 typedef const char *compare_type;
3351
3352 static hashval_t hash (dwarf_file_data *);
3353 static bool equal (dwarf_file_data *, const char *);
3354 };
3355
3356 /* Filenames referenced by this compilation unit. */
3357 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3358
3359 struct decl_die_hasher : ggc_ptr_hash<die_node>
3360 {
3361 typedef tree compare_type;
3362
3363 static hashval_t hash (die_node *);
3364 static bool equal (die_node *, tree);
3365 };
3366 /* A hash table of references to DIE's that describe declarations.
3367 The key is a DECL_UID() which is a unique number identifying each decl. */
3368 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3369
3370 struct GTY ((for_user)) variable_value_struct {
3371 unsigned int decl_id;
3372 vec<dw_die_ref, va_gc> *dies;
3373 };
3374
3375 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3376 {
3377 typedef tree compare_type;
3378
3379 static hashval_t hash (variable_value_struct *);
3380 static bool equal (variable_value_struct *, tree);
3381 };
3382 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3383 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3384 DECL_CONTEXT of the referenced VAR_DECLs. */
3385 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3386
3387 struct block_die_hasher : ggc_ptr_hash<die_struct>
3388 {
3389 static hashval_t hash (die_struct *);
3390 static bool equal (die_struct *, die_struct *);
3391 };
3392
3393 /* A hash table of references to DIE's that describe COMMON blocks.
3394 The key is DECL_UID() ^ die_parent. */
3395 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3396
3397 typedef struct GTY(()) die_arg_entry_struct {
3398 dw_die_ref die;
3399 tree arg;
3400 } die_arg_entry;
3401
3402
3403 /* Node of the variable location list. */
3404 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3405 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3406 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3407 in mode of the EXPR_LIST node and first EXPR_LIST operand
3408 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3409 location or NULL for padding. For larger bitsizes,
3410 mode is 0 and first operand is a CONCAT with bitsize
3411 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3412 NULL as second operand. */
3413 rtx GTY (()) loc;
3414 const char * GTY (()) label;
3415 struct var_loc_node * GTY (()) next;
3416 var_loc_view view;
3417 };
3418
3419 /* Variable location list. */
3420 struct GTY ((for_user)) var_loc_list_def {
3421 struct var_loc_node * GTY (()) first;
3422
3423 /* Pointer to the last but one or last element of the
3424 chained list. If the list is empty, both first and
3425 last are NULL, if the list contains just one node
3426 or the last node certainly is not redundant, it points
3427 to the last node, otherwise points to the last but one.
3428 Do not mark it for GC because it is marked through the chain. */
3429 struct var_loc_node * GTY ((skip ("%h"))) last;
3430
3431 /* Pointer to the last element before section switch,
3432 if NULL, either sections weren't switched or first
3433 is after section switch. */
3434 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3435
3436 /* DECL_UID of the variable decl. */
3437 unsigned int decl_id;
3438 };
3439 typedef struct var_loc_list_def var_loc_list;
3440
3441 /* Call argument location list. */
3442 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3443 rtx GTY (()) call_arg_loc_note;
3444 const char * GTY (()) label;
3445 tree GTY (()) block;
3446 bool tail_call_p;
3447 rtx GTY (()) symbol_ref;
3448 struct call_arg_loc_node * GTY (()) next;
3449 };
3450
3451
3452 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3453 {
3454 typedef const_tree compare_type;
3455
3456 static hashval_t hash (var_loc_list *);
3457 static bool equal (var_loc_list *, const_tree);
3458 };
3459
3460 /* Table of decl location linked lists. */
3461 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3462
3463 /* Head and tail of call_arg_loc chain. */
3464 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3465 static struct call_arg_loc_node *call_arg_loc_last;
3466
3467 /* Number of call sites in the current function. */
3468 static int call_site_count = -1;
3469 /* Number of tail call sites in the current function. */
3470 static int tail_call_site_count = -1;
3471
3472 /* A cached location list. */
3473 struct GTY ((for_user)) cached_dw_loc_list_def {
3474 /* The DECL_UID of the decl that this entry describes. */
3475 unsigned int decl_id;
3476
3477 /* The cached location list. */
3478 dw_loc_list_ref loc_list;
3479 };
3480 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3481
3482 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3483 {
3484
3485 typedef const_tree compare_type;
3486
3487 static hashval_t hash (cached_dw_loc_list *);
3488 static bool equal (cached_dw_loc_list *, const_tree);
3489 };
3490
3491 /* Table of cached location lists. */
3492 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3493
3494 /* A vector of references to DIE's that are uniquely identified by their tag,
3495 presence/absence of children DIE's, and list of attribute/value pairs. */
3496 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3497
3498 /* A hash map to remember the stack usage for DWARF procedures. The value
3499 stored is the stack size difference between before the DWARF procedure
3500 invokation and after it returned. In other words, for a DWARF procedure
3501 that consumes N stack slots and that pushes M ones, this stores M - N. */
3502 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3503
3504 /* A global counter for generating labels for line number data. */
3505 static unsigned int line_info_label_num;
3506
3507 /* The current table to which we should emit line number information
3508 for the current function. This will be set up at the beginning of
3509 assembly for the function. */
3510 static GTY(()) dw_line_info_table *cur_line_info_table;
3511
3512 /* The two default tables of line number info. */
3513 static GTY(()) dw_line_info_table *text_section_line_info;
3514 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3515
3516 /* The set of all non-default tables of line number info. */
3517 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3518
3519 /* A flag to tell pubnames/types export if there is an info section to
3520 refer to. */
3521 static bool info_section_emitted;
3522
3523 /* A pointer to the base of a table that contains a list of publicly
3524 accessible names. */
3525 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3526
3527 /* A pointer to the base of a table that contains a list of publicly
3528 accessible types. */
3529 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3530
3531 /* A pointer to the base of a table that contains a list of macro
3532 defines/undefines (and file start/end markers). */
3533 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3534
3535 /* True if .debug_macinfo or .debug_macros section is going to be
3536 emitted. */
3537 #define have_macinfo \
3538 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3539 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3540 && !macinfo_table->is_empty ())
3541
3542 /* Vector of dies for which we should generate .debug_ranges info. */
3543 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3544
3545 /* Vector of pairs of labels referenced in ranges_table. */
3546 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3547
3548 /* Whether we have location lists that need outputting */
3549 static GTY(()) bool have_location_lists;
3550
3551 /* Unique label counter. */
3552 static GTY(()) unsigned int loclabel_num;
3553
3554 /* Unique label counter for point-of-call tables. */
3555 static GTY(()) unsigned int poc_label_num;
3556
3557 /* The last file entry emitted by maybe_emit_file(). */
3558 static GTY(()) struct dwarf_file_data * last_emitted_file;
3559
3560 /* Number of internal labels generated by gen_internal_sym(). */
3561 static GTY(()) int label_num;
3562
3563 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3564
3565 /* Instances of generic types for which we need to generate debug
3566 info that describe their generic parameters and arguments. That
3567 generation needs to happen once all types are properly laid out so
3568 we do it at the end of compilation. */
3569 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3570
3571 /* Offset from the "steady-state frame pointer" to the frame base,
3572 within the current function. */
3573 static poly_int64 frame_pointer_fb_offset;
3574 static bool frame_pointer_fb_offset_valid;
3575
3576 static vec<dw_die_ref> base_types;
3577
3578 /* Flags to represent a set of attribute classes for attributes that represent
3579 a scalar value (bounds, pointers, ...). */
3580 enum dw_scalar_form
3581 {
3582 dw_scalar_form_constant = 0x01,
3583 dw_scalar_form_exprloc = 0x02,
3584 dw_scalar_form_reference = 0x04
3585 };
3586
3587 /* Forward declarations for functions defined in this file. */
3588
3589 static int is_pseudo_reg (const_rtx);
3590 static tree type_main_variant (tree);
3591 static int is_tagged_type (const_tree);
3592 static const char *dwarf_tag_name (unsigned);
3593 static const char *dwarf_attr_name (unsigned);
3594 static const char *dwarf_form_name (unsigned);
3595 static tree decl_ultimate_origin (const_tree);
3596 static tree decl_class_context (tree);
3597 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3598 static inline enum dw_val_class AT_class (dw_attr_node *);
3599 static inline unsigned int AT_index (dw_attr_node *);
3600 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3601 static inline unsigned AT_flag (dw_attr_node *);
3602 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3603 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3604 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3605 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3606 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3607 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3608 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3609 unsigned int, unsigned char *);
3610 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3611 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3612 static inline const char *AT_string (dw_attr_node *);
3613 static enum dwarf_form AT_string_form (dw_attr_node *);
3614 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3615 static void add_AT_specification (dw_die_ref, dw_die_ref);
3616 static inline dw_die_ref AT_ref (dw_attr_node *);
3617 static inline int AT_ref_external (dw_attr_node *);
3618 static inline void set_AT_ref_external (dw_attr_node *, int);
3619 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3620 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3621 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3622 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3623 dw_loc_list_ref);
3624 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3625 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3626 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3627 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3628 static void remove_addr_table_entry (addr_table_entry *);
3629 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3630 static inline rtx AT_addr (dw_attr_node *);
3631 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3632 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3633 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3634 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3635 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3636 const char *);
3637 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3638 unsigned HOST_WIDE_INT);
3639 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3640 unsigned long, bool);
3641 static inline const char *AT_lbl (dw_attr_node *);
3642 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3643 static const char *get_AT_low_pc (dw_die_ref);
3644 static const char *get_AT_hi_pc (dw_die_ref);
3645 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3646 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3647 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3648 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3649 static bool is_cxx (void);
3650 static bool is_cxx (const_tree);
3651 static bool is_fortran (void);
3652 static bool is_ada (void);
3653 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3654 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3655 static void add_child_die (dw_die_ref, dw_die_ref);
3656 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3657 static dw_die_ref lookup_type_die (tree);
3658 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3659 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3660 static void equate_type_number_to_die (tree, dw_die_ref);
3661 static dw_die_ref lookup_decl_die (tree);
3662 static var_loc_list *lookup_decl_loc (const_tree);
3663 static void equate_decl_number_to_die (tree, dw_die_ref);
3664 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3665 static void print_spaces (FILE *);
3666 static void print_die (dw_die_ref, FILE *);
3667 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3668 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3669 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3670 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3671 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3672 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3673 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3674 struct md5_ctx *, int *);
3675 struct checksum_attributes;
3676 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3677 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3678 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3679 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3680 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3681 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3682 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3683 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3684 static int is_type_die (dw_die_ref);
3685 static int is_comdat_die (dw_die_ref);
3686 static inline bool is_template_instantiation (dw_die_ref);
3687 static int is_declaration_die (dw_die_ref);
3688 static int should_move_die_to_comdat (dw_die_ref);
3689 static dw_die_ref clone_as_declaration (dw_die_ref);
3690 static dw_die_ref clone_die (dw_die_ref);
3691 static dw_die_ref clone_tree (dw_die_ref);
3692 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3693 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3694 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3695 static dw_die_ref generate_skeleton (dw_die_ref);
3696 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3697 dw_die_ref,
3698 dw_die_ref);
3699 static void break_out_comdat_types (dw_die_ref);
3700 static void copy_decls_for_unworthy_types (dw_die_ref);
3701
3702 static void add_sibling_attributes (dw_die_ref);
3703 static void output_location_lists (dw_die_ref);
3704 static int constant_size (unsigned HOST_WIDE_INT);
3705 static unsigned long size_of_die (dw_die_ref);
3706 static void calc_die_sizes (dw_die_ref);
3707 static void calc_base_type_die_sizes (void);
3708 static void mark_dies (dw_die_ref);
3709 static void unmark_dies (dw_die_ref);
3710 static void unmark_all_dies (dw_die_ref);
3711 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3712 static unsigned long size_of_aranges (void);
3713 static enum dwarf_form value_format (dw_attr_node *);
3714 static void output_value_format (dw_attr_node *);
3715 static void output_abbrev_section (void);
3716 static void output_die_abbrevs (unsigned long, dw_die_ref);
3717 static void output_die (dw_die_ref);
3718 static void output_compilation_unit_header (enum dwarf_unit_type);
3719 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3720 static void output_comdat_type_unit (comdat_type_node *, bool);
3721 static const char *dwarf2_name (tree, int);
3722 static void add_pubname (tree, dw_die_ref);
3723 static void add_enumerator_pubname (const char *, dw_die_ref);
3724 static void add_pubname_string (const char *, dw_die_ref);
3725 static void add_pubtype (tree, dw_die_ref);
3726 static void output_pubnames (vec<pubname_entry, va_gc> *);
3727 static void output_aranges (void);
3728 static unsigned int add_ranges (const_tree, bool = false);
3729 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3730 bool *, bool);
3731 static void output_ranges (void);
3732 static dw_line_info_table *new_line_info_table (void);
3733 static void output_line_info (bool);
3734 static void output_file_names (void);
3735 static dw_die_ref base_type_die (tree, bool);
3736 static int is_base_type (tree);
3737 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3738 static int decl_quals (const_tree);
3739 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3740 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3741 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3742 static int type_is_enum (const_tree);
3743 static unsigned int dbx_reg_number (const_rtx);
3744 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3745 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3746 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3747 enum var_init_status);
3748 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3749 enum var_init_status);
3750 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3751 enum var_init_status);
3752 static int is_based_loc (const_rtx);
3753 static bool resolve_one_addr (rtx *);
3754 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3755 enum var_init_status);
3756 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3757 enum var_init_status);
3758 struct loc_descr_context;
3759 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3760 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3761 static dw_loc_list_ref loc_list_from_tree (tree, int,
3762 struct loc_descr_context *);
3763 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3764 struct loc_descr_context *);
3765 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3766 static tree field_type (const_tree);
3767 static unsigned int simple_type_align_in_bits (const_tree);
3768 static unsigned int simple_decl_align_in_bits (const_tree);
3769 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3770 struct vlr_context;
3771 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3772 HOST_WIDE_INT *);
3773 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3774 dw_loc_list_ref);
3775 static void add_data_member_location_attribute (dw_die_ref, tree,
3776 struct vlr_context *);
3777 static bool add_const_value_attribute (dw_die_ref, rtx);
3778 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3779 static void insert_wide_int (const wide_int &, unsigned char *, int);
3780 static void insert_float (const_rtx, unsigned char *);
3781 static rtx rtl_for_decl_location (tree);
3782 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3783 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3784 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3785 static void add_name_attribute (dw_die_ref, const char *);
3786 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3787 static void add_comp_dir_attribute (dw_die_ref);
3788 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3789 struct loc_descr_context *);
3790 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3791 struct loc_descr_context *);
3792 static void add_subscript_info (dw_die_ref, tree, bool);
3793 static void add_byte_size_attribute (dw_die_ref, tree);
3794 static void add_alignment_attribute (dw_die_ref, tree);
3795 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3796 struct vlr_context *);
3797 static void add_bit_size_attribute (dw_die_ref, tree);
3798 static void add_prototyped_attribute (dw_die_ref, tree);
3799 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3800 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3801 static void add_src_coords_attributes (dw_die_ref, tree);
3802 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3803 static void add_discr_value (dw_die_ref, dw_discr_value *);
3804 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3805 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3806 static void push_decl_scope (tree);
3807 static void pop_decl_scope (void);
3808 static dw_die_ref scope_die_for (tree, dw_die_ref);
3809 static inline int local_scope_p (dw_die_ref);
3810 static inline int class_scope_p (dw_die_ref);
3811 static inline int class_or_namespace_scope_p (dw_die_ref);
3812 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3813 static void add_calling_convention_attribute (dw_die_ref, tree);
3814 static const char *type_tag (const_tree);
3815 static tree member_declared_type (const_tree);
3816 #if 0
3817 static const char *decl_start_label (tree);
3818 #endif
3819 static void gen_array_type_die (tree, dw_die_ref);
3820 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3821 #if 0
3822 static void gen_entry_point_die (tree, dw_die_ref);
3823 #endif
3824 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3825 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3826 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3827 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3828 static void gen_formal_types_die (tree, dw_die_ref);
3829 static void gen_subprogram_die (tree, dw_die_ref);
3830 static void gen_variable_die (tree, tree, dw_die_ref);
3831 static void gen_const_die (tree, dw_die_ref);
3832 static void gen_label_die (tree, dw_die_ref);
3833 static void gen_lexical_block_die (tree, dw_die_ref);
3834 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3835 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3836 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3837 static dw_die_ref gen_compile_unit_die (const char *);
3838 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3839 static void gen_member_die (tree, dw_die_ref);
3840 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3841 enum debug_info_usage);
3842 static void gen_subroutine_type_die (tree, dw_die_ref);
3843 static void gen_typedef_die (tree, dw_die_ref);
3844 static void gen_type_die (tree, dw_die_ref);
3845 static void gen_block_die (tree, dw_die_ref);
3846 static void decls_for_scope (tree, dw_die_ref);
3847 static bool is_naming_typedef_decl (const_tree);
3848 static inline dw_die_ref get_context_die (tree);
3849 static void gen_namespace_die (tree, dw_die_ref);
3850 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3851 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3852 static dw_die_ref force_decl_die (tree);
3853 static dw_die_ref force_type_die (tree);
3854 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3855 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3856 static struct dwarf_file_data * lookup_filename (const char *);
3857 static void retry_incomplete_types (void);
3858 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3859 static void gen_generic_params_dies (tree);
3860 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3861 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3862 static void splice_child_die (dw_die_ref, dw_die_ref);
3863 static int file_info_cmp (const void *, const void *);
3864 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3865 const char *, var_loc_view, const char *);
3866 static void output_loc_list (dw_loc_list_ref);
3867 static char *gen_internal_sym (const char *);
3868 static bool want_pubnames (void);
3869
3870 static void prune_unmark_dies (dw_die_ref);
3871 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3872 static void prune_unused_types_mark (dw_die_ref, int);
3873 static void prune_unused_types_walk (dw_die_ref);
3874 static void prune_unused_types_walk_attribs (dw_die_ref);
3875 static void prune_unused_types_prune (dw_die_ref);
3876 static void prune_unused_types (void);
3877 static int maybe_emit_file (struct dwarf_file_data *fd);
3878 static inline const char *AT_vms_delta1 (dw_attr_node *);
3879 static inline const char *AT_vms_delta2 (dw_attr_node *);
3880 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3881 const char *, const char *);
3882 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3883 static void gen_remaining_tmpl_value_param_die_attribute (void);
3884 static bool generic_type_p (tree);
3885 static void schedule_generic_params_dies_gen (tree t);
3886 static void gen_scheduled_generic_parms_dies (void);
3887 static void resolve_variable_values (void);
3888
3889 static const char *comp_dir_string (void);
3890
3891 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3892
3893 /* enum for tracking thread-local variables whose address is really an offset
3894 relative to the TLS pointer, which will need link-time relocation, but will
3895 not need relocation by the DWARF consumer. */
3896
3897 enum dtprel_bool
3898 {
3899 dtprel_false = 0,
3900 dtprel_true = 1
3901 };
3902
3903 /* Return the operator to use for an address of a variable. For dtprel_true, we
3904 use DW_OP_const*. For regular variables, which need both link-time
3905 relocation and consumer-level relocation (e.g., to account for shared objects
3906 loaded at a random address), we use DW_OP_addr*. */
3907
3908 static inline enum dwarf_location_atom
dw_addr_op(enum dtprel_bool dtprel)3909 dw_addr_op (enum dtprel_bool dtprel)
3910 {
3911 if (dtprel == dtprel_true)
3912 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3913 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3914 else
3915 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3916 }
3917
3918 /* Return a pointer to a newly allocated address location description. If
3919 dwarf_split_debug_info is true, then record the address with the appropriate
3920 relocation. */
3921 static inline dw_loc_descr_ref
new_addr_loc_descr(rtx addr,enum dtprel_bool dtprel)3922 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3923 {
3924 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3925
3926 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3927 ref->dw_loc_oprnd1.v.val_addr = addr;
3928 ref->dtprel = dtprel;
3929 if (dwarf_split_debug_info)
3930 ref->dw_loc_oprnd1.val_entry
3931 = add_addr_table_entry (addr,
3932 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3933 else
3934 ref->dw_loc_oprnd1.val_entry = NULL;
3935
3936 return ref;
3937 }
3938
3939 /* Section names used to hold DWARF debugging information. */
3940
3941 #ifndef DEBUG_INFO_SECTION
3942 #define DEBUG_INFO_SECTION ".debug_info"
3943 #endif
3944 #ifndef DEBUG_DWO_INFO_SECTION
3945 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3946 #endif
3947 #ifndef DEBUG_LTO_INFO_SECTION
3948 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3949 #endif
3950 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3951 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3952 #endif
3953 #ifndef DEBUG_ABBREV_SECTION
3954 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3955 #endif
3956 #ifndef DEBUG_LTO_ABBREV_SECTION
3957 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3958 #endif
3959 #ifndef DEBUG_DWO_ABBREV_SECTION
3960 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3961 #endif
3962 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3963 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3964 #endif
3965 #ifndef DEBUG_ARANGES_SECTION
3966 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3967 #endif
3968 #ifndef DEBUG_ADDR_SECTION
3969 #define DEBUG_ADDR_SECTION ".debug_addr"
3970 #endif
3971 #ifndef DEBUG_MACINFO_SECTION
3972 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3973 #endif
3974 #ifndef DEBUG_LTO_MACINFO_SECTION
3975 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3976 #endif
3977 #ifndef DEBUG_DWO_MACINFO_SECTION
3978 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3979 #endif
3980 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3981 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3982 #endif
3983 #ifndef DEBUG_MACRO_SECTION
3984 #define DEBUG_MACRO_SECTION ".debug_macro"
3985 #endif
3986 #ifndef DEBUG_LTO_MACRO_SECTION
3987 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3988 #endif
3989 #ifndef DEBUG_DWO_MACRO_SECTION
3990 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3991 #endif
3992 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3993 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3994 #endif
3995 #ifndef DEBUG_LINE_SECTION
3996 #define DEBUG_LINE_SECTION ".debug_line"
3997 #endif
3998 #ifndef DEBUG_LTO_LINE_SECTION
3999 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4000 #endif
4001 #ifndef DEBUG_DWO_LINE_SECTION
4002 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4003 #endif
4004 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4005 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4006 #endif
4007 #ifndef DEBUG_LOC_SECTION
4008 #define DEBUG_LOC_SECTION ".debug_loc"
4009 #endif
4010 #ifndef DEBUG_DWO_LOC_SECTION
4011 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4012 #endif
4013 #ifndef DEBUG_LOCLISTS_SECTION
4014 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4015 #endif
4016 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4017 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4018 #endif
4019 #ifndef DEBUG_PUBNAMES_SECTION
4020 #define DEBUG_PUBNAMES_SECTION \
4021 ((debug_generate_pub_sections == 2) \
4022 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4023 #endif
4024 #ifndef DEBUG_PUBTYPES_SECTION
4025 #define DEBUG_PUBTYPES_SECTION \
4026 ((debug_generate_pub_sections == 2) \
4027 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4028 #endif
4029 #ifndef DEBUG_STR_OFFSETS_SECTION
4030 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4031 #endif
4032 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4033 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4034 #endif
4035 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4036 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4037 #endif
4038 #ifndef DEBUG_STR_SECTION
4039 #define DEBUG_STR_SECTION ".debug_str"
4040 #endif
4041 #ifndef DEBUG_LTO_STR_SECTION
4042 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4043 #endif
4044 #ifndef DEBUG_STR_DWO_SECTION
4045 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4046 #endif
4047 #ifndef DEBUG_LTO_STR_DWO_SECTION
4048 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4049 #endif
4050 #ifndef DEBUG_RANGES_SECTION
4051 #define DEBUG_RANGES_SECTION ".debug_ranges"
4052 #endif
4053 #ifndef DEBUG_RNGLISTS_SECTION
4054 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4055 #endif
4056 #ifndef DEBUG_LINE_STR_SECTION
4057 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4058 #endif
4059 #ifndef DEBUG_LTO_LINE_STR_SECTION
4060 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4061 #endif
4062
4063 /* Standard ELF section names for compiled code and data. */
4064 #ifndef TEXT_SECTION_NAME
4065 #define TEXT_SECTION_NAME ".text"
4066 #endif
4067
4068 /* Section flags for .debug_str section. */
4069 #define DEBUG_STR_SECTION_FLAGS \
4070 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4071 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4072 : SECTION_DEBUG)
4073
4074 /* Section flags for .debug_str.dwo section. */
4075 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4076
4077 /* Attribute used to refer to the macro section. */
4078 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4079 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4080
4081 /* Labels we insert at beginning sections we can reference instead of
4082 the section names themselves. */
4083
4084 #ifndef TEXT_SECTION_LABEL
4085 #define TEXT_SECTION_LABEL "Ltext"
4086 #endif
4087 #ifndef COLD_TEXT_SECTION_LABEL
4088 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4089 #endif
4090 #ifndef DEBUG_LINE_SECTION_LABEL
4091 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4092 #endif
4093 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4094 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4095 #endif
4096 #ifndef DEBUG_INFO_SECTION_LABEL
4097 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4098 #endif
4099 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4100 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4101 #endif
4102 #ifndef DEBUG_ABBREV_SECTION_LABEL
4103 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4104 #endif
4105 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4106 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4107 #endif
4108 #ifndef DEBUG_ADDR_SECTION_LABEL
4109 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4110 #endif
4111 #ifndef DEBUG_LOC_SECTION_LABEL
4112 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4113 #endif
4114 #ifndef DEBUG_RANGES_SECTION_LABEL
4115 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4116 #endif
4117 #ifndef DEBUG_MACINFO_SECTION_LABEL
4118 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4119 #endif
4120 #ifndef DEBUG_MACRO_SECTION_LABEL
4121 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4122 #endif
4123 #define SKELETON_COMP_DIE_ABBREV 1
4124 #define SKELETON_TYPE_DIE_ABBREV 2
4125
4126 /* Definitions of defaults for formats and names of various special
4127 (artificial) labels which may be generated within this file (when the -g
4128 options is used and DWARF2_DEBUGGING_INFO is in effect.
4129 If necessary, these may be overridden from within the tm.h file, but
4130 typically, overriding these defaults is unnecessary. */
4131
4132 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4133 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4134 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4135 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4136 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4137 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4138 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4139 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4140 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4141 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4142 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4143 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4144 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4145 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4146 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4147
4148 #ifndef TEXT_END_LABEL
4149 #define TEXT_END_LABEL "Letext"
4150 #endif
4151 #ifndef COLD_END_LABEL
4152 #define COLD_END_LABEL "Letext_cold"
4153 #endif
4154 #ifndef BLOCK_BEGIN_LABEL
4155 #define BLOCK_BEGIN_LABEL "LBB"
4156 #endif
4157 #ifndef BLOCK_INLINE_ENTRY_LABEL
4158 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4159 #endif
4160 #ifndef BLOCK_END_LABEL
4161 #define BLOCK_END_LABEL "LBE"
4162 #endif
4163 #ifndef LINE_CODE_LABEL
4164 #define LINE_CODE_LABEL "LM"
4165 #endif
4166
4167
4168 /* Return the root of the DIE's built for the current compilation unit. */
4169 static dw_die_ref
comp_unit_die(void)4170 comp_unit_die (void)
4171 {
4172 if (!single_comp_unit_die)
4173 single_comp_unit_die = gen_compile_unit_die (NULL);
4174 return single_comp_unit_die;
4175 }
4176
4177 /* We allow a language front-end to designate a function that is to be
4178 called to "demangle" any name before it is put into a DIE. */
4179
4180 static const char *(*demangle_name_func) (const char *);
4181
4182 void
dwarf2out_set_demangle_name_func(const char * (* func)(const char *))4183 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4184 {
4185 demangle_name_func = func;
4186 }
4187
4188 /* Test if rtl node points to a pseudo register. */
4189
4190 static inline int
is_pseudo_reg(const_rtx rtl)4191 is_pseudo_reg (const_rtx rtl)
4192 {
4193 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4194 || (GET_CODE (rtl) == SUBREG
4195 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4196 }
4197
4198 /* Return a reference to a type, with its const and volatile qualifiers
4199 removed. */
4200
4201 static inline tree
type_main_variant(tree type)4202 type_main_variant (tree type)
4203 {
4204 type = TYPE_MAIN_VARIANT (type);
4205
4206 /* ??? There really should be only one main variant among any group of
4207 variants of a given type (and all of the MAIN_VARIANT values for all
4208 members of the group should point to that one type) but sometimes the C
4209 front-end messes this up for array types, so we work around that bug
4210 here. */
4211 if (TREE_CODE (type) == ARRAY_TYPE)
4212 while (type != TYPE_MAIN_VARIANT (type))
4213 type = TYPE_MAIN_VARIANT (type);
4214
4215 return type;
4216 }
4217
4218 /* Return nonzero if the given type node represents a tagged type. */
4219
4220 static inline int
is_tagged_type(const_tree type)4221 is_tagged_type (const_tree type)
4222 {
4223 enum tree_code code = TREE_CODE (type);
4224
4225 return (code == RECORD_TYPE || code == UNION_TYPE
4226 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4227 }
4228
4229 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4230
4231 static void
get_ref_die_offset_label(char * label,dw_die_ref ref)4232 get_ref_die_offset_label (char *label, dw_die_ref ref)
4233 {
4234 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4235 }
4236
4237 /* Return die_offset of a DIE reference to a base type. */
4238
4239 static unsigned long int
get_base_type_offset(dw_die_ref ref)4240 get_base_type_offset (dw_die_ref ref)
4241 {
4242 if (ref->die_offset)
4243 return ref->die_offset;
4244 if (comp_unit_die ()->die_abbrev)
4245 {
4246 calc_base_type_die_sizes ();
4247 gcc_assert (ref->die_offset);
4248 }
4249 return ref->die_offset;
4250 }
4251
4252 /* Return die_offset of a DIE reference other than base type. */
4253
4254 static unsigned long int
get_ref_die_offset(dw_die_ref ref)4255 get_ref_die_offset (dw_die_ref ref)
4256 {
4257 gcc_assert (ref->die_offset);
4258 return ref->die_offset;
4259 }
4260
4261 /* Convert a DIE tag into its string name. */
4262
4263 static const char *
dwarf_tag_name(unsigned int tag)4264 dwarf_tag_name (unsigned int tag)
4265 {
4266 const char *name = get_DW_TAG_name (tag);
4267
4268 if (name != NULL)
4269 return name;
4270
4271 return "DW_TAG_<unknown>";
4272 }
4273
4274 /* Convert a DWARF attribute code into its string name. */
4275
4276 static const char *
dwarf_attr_name(unsigned int attr)4277 dwarf_attr_name (unsigned int attr)
4278 {
4279 const char *name;
4280
4281 switch (attr)
4282 {
4283 #if VMS_DEBUGGING_INFO
4284 case DW_AT_HP_prologue:
4285 return "DW_AT_HP_prologue";
4286 #else
4287 case DW_AT_MIPS_loop_unroll_factor:
4288 return "DW_AT_MIPS_loop_unroll_factor";
4289 #endif
4290
4291 #if VMS_DEBUGGING_INFO
4292 case DW_AT_HP_epilogue:
4293 return "DW_AT_HP_epilogue";
4294 #else
4295 case DW_AT_MIPS_stride:
4296 return "DW_AT_MIPS_stride";
4297 #endif
4298 }
4299
4300 name = get_DW_AT_name (attr);
4301
4302 if (name != NULL)
4303 return name;
4304
4305 return "DW_AT_<unknown>";
4306 }
4307
4308 /* Convert a DWARF value form code into its string name. */
4309
4310 static const char *
dwarf_form_name(unsigned int form)4311 dwarf_form_name (unsigned int form)
4312 {
4313 const char *name = get_DW_FORM_name (form);
4314
4315 if (name != NULL)
4316 return name;
4317
4318 return "DW_FORM_<unknown>";
4319 }
4320
4321 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4322 instance of an inlined instance of a decl which is local to an inline
4323 function, so we have to trace all of the way back through the origin chain
4324 to find out what sort of node actually served as the original seed for the
4325 given block. */
4326
4327 static tree
decl_ultimate_origin(const_tree decl)4328 decl_ultimate_origin (const_tree decl)
4329 {
4330 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4331 return NULL_TREE;
4332
4333 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4334 we're trying to output the abstract instance of this function. */
4335 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4336 return NULL_TREE;
4337
4338 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4339 most distant ancestor, this should never happen. */
4340 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4341
4342 return DECL_ABSTRACT_ORIGIN (decl);
4343 }
4344
4345 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4346 of a virtual function may refer to a base class, so we check the 'this'
4347 parameter. */
4348
4349 static tree
decl_class_context(tree decl)4350 decl_class_context (tree decl)
4351 {
4352 tree context = NULL_TREE;
4353
4354 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4355 context = DECL_CONTEXT (decl);
4356 else
4357 context = TYPE_MAIN_VARIANT
4358 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4359
4360 if (context && !TYPE_P (context))
4361 context = NULL_TREE;
4362
4363 return context;
4364 }
4365
4366 /* Add an attribute/value pair to a DIE. */
4367
4368 static inline void
add_dwarf_attr(dw_die_ref die,dw_attr_node * attr)4369 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4370 {
4371 /* Maybe this should be an assert? */
4372 if (die == NULL)
4373 return;
4374
4375 if (flag_checking)
4376 {
4377 /* Check we do not add duplicate attrs. Can't use get_AT here
4378 because that recurses to the specification/abstract origin DIE. */
4379 dw_attr_node *a;
4380 unsigned ix;
4381 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4382 gcc_assert (a->dw_attr != attr->dw_attr);
4383 }
4384
4385 vec_safe_reserve (die->die_attr, 1);
4386 vec_safe_push (die->die_attr, *attr);
4387 }
4388
4389 static inline enum dw_val_class
AT_class(dw_attr_node * a)4390 AT_class (dw_attr_node *a)
4391 {
4392 return a->dw_attr_val.val_class;
4393 }
4394
4395 /* Return the index for any attribute that will be referenced with a
4396 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4397 are stored in dw_attr_val.v.val_str for reference counting
4398 pruning. */
4399
4400 static inline unsigned int
AT_index(dw_attr_node * a)4401 AT_index (dw_attr_node *a)
4402 {
4403 if (AT_class (a) == dw_val_class_str)
4404 return a->dw_attr_val.v.val_str->index;
4405 else if (a->dw_attr_val.val_entry != NULL)
4406 return a->dw_attr_val.val_entry->index;
4407 return NOT_INDEXED;
4408 }
4409
4410 /* Add a flag value attribute to a DIE. */
4411
4412 static inline void
add_AT_flag(dw_die_ref die,enum dwarf_attribute attr_kind,unsigned int flag)4413 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4414 {
4415 dw_attr_node attr;
4416
4417 attr.dw_attr = attr_kind;
4418 attr.dw_attr_val.val_class = dw_val_class_flag;
4419 attr.dw_attr_val.val_entry = NULL;
4420 attr.dw_attr_val.v.val_flag = flag;
4421 add_dwarf_attr (die, &attr);
4422 }
4423
4424 static inline unsigned
AT_flag(dw_attr_node * a)4425 AT_flag (dw_attr_node *a)
4426 {
4427 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4428 return a->dw_attr_val.v.val_flag;
4429 }
4430
4431 /* Add a signed integer attribute value to a DIE. */
4432
4433 static inline void
add_AT_int(dw_die_ref die,enum dwarf_attribute attr_kind,HOST_WIDE_INT int_val)4434 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4435 {
4436 dw_attr_node attr;
4437
4438 attr.dw_attr = attr_kind;
4439 attr.dw_attr_val.val_class = dw_val_class_const;
4440 attr.dw_attr_val.val_entry = NULL;
4441 attr.dw_attr_val.v.val_int = int_val;
4442 add_dwarf_attr (die, &attr);
4443 }
4444
4445 static inline HOST_WIDE_INT
AT_int(dw_attr_node * a)4446 AT_int (dw_attr_node *a)
4447 {
4448 gcc_assert (a && (AT_class (a) == dw_val_class_const
4449 || AT_class (a) == dw_val_class_const_implicit));
4450 return a->dw_attr_val.v.val_int;
4451 }
4452
4453 /* Add an unsigned integer attribute value to a DIE. */
4454
4455 static inline void
add_AT_unsigned(dw_die_ref die,enum dwarf_attribute attr_kind,unsigned HOST_WIDE_INT unsigned_val)4456 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4457 unsigned HOST_WIDE_INT unsigned_val)
4458 {
4459 dw_attr_node attr;
4460
4461 attr.dw_attr = attr_kind;
4462 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4463 attr.dw_attr_val.val_entry = NULL;
4464 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4465 add_dwarf_attr (die, &attr);
4466 }
4467
4468 static inline unsigned HOST_WIDE_INT
AT_unsigned(dw_attr_node * a)4469 AT_unsigned (dw_attr_node *a)
4470 {
4471 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4472 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4473 return a->dw_attr_val.v.val_unsigned;
4474 }
4475
4476 /* Add an unsigned wide integer attribute value to a DIE. */
4477
4478 static inline void
add_AT_wide(dw_die_ref die,enum dwarf_attribute attr_kind,const wide_int & w)4479 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4480 const wide_int& w)
4481 {
4482 dw_attr_node attr;
4483
4484 attr.dw_attr = attr_kind;
4485 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4486 attr.dw_attr_val.val_entry = NULL;
4487 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4488 *attr.dw_attr_val.v.val_wide = w;
4489 add_dwarf_attr (die, &attr);
4490 }
4491
4492 /* Add an unsigned double integer attribute value to a DIE. */
4493
4494 static inline void
add_AT_double(dw_die_ref die,enum dwarf_attribute attr_kind,HOST_WIDE_INT high,unsigned HOST_WIDE_INT low)4495 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4496 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4497 {
4498 dw_attr_node attr;
4499
4500 attr.dw_attr = attr_kind;
4501 attr.dw_attr_val.val_class = dw_val_class_const_double;
4502 attr.dw_attr_val.val_entry = NULL;
4503 attr.dw_attr_val.v.val_double.high = high;
4504 attr.dw_attr_val.v.val_double.low = low;
4505 add_dwarf_attr (die, &attr);
4506 }
4507
4508 /* Add a floating point attribute value to a DIE and return it. */
4509
4510 static inline void
add_AT_vec(dw_die_ref die,enum dwarf_attribute attr_kind,unsigned int length,unsigned int elt_size,unsigned char * array)4511 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4512 unsigned int length, unsigned int elt_size, unsigned char *array)
4513 {
4514 dw_attr_node attr;
4515
4516 attr.dw_attr = attr_kind;
4517 attr.dw_attr_val.val_class = dw_val_class_vec;
4518 attr.dw_attr_val.val_entry = NULL;
4519 attr.dw_attr_val.v.val_vec.length = length;
4520 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4521 attr.dw_attr_val.v.val_vec.array = array;
4522 add_dwarf_attr (die, &attr);
4523 }
4524
4525 /* Add an 8-byte data attribute value to a DIE. */
4526
4527 static inline void
add_AT_data8(dw_die_ref die,enum dwarf_attribute attr_kind,unsigned char data8[8])4528 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4529 unsigned char data8[8])
4530 {
4531 dw_attr_node attr;
4532
4533 attr.dw_attr = attr_kind;
4534 attr.dw_attr_val.val_class = dw_val_class_data8;
4535 attr.dw_attr_val.val_entry = NULL;
4536 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4537 add_dwarf_attr (die, &attr);
4538 }
4539
4540 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4541 dwarf_split_debug_info, address attributes in dies destined for the
4542 final executable have force_direct set to avoid using indexed
4543 references. */
4544
4545 static inline void
add_AT_low_high_pc(dw_die_ref die,const char * lbl_low,const char * lbl_high,bool force_direct)4546 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4547 bool force_direct)
4548 {
4549 dw_attr_node attr;
4550 char * lbl_id;
4551
4552 lbl_id = xstrdup (lbl_low);
4553 attr.dw_attr = DW_AT_low_pc;
4554 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4555 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4556 if (dwarf_split_debug_info && !force_direct)
4557 attr.dw_attr_val.val_entry
4558 = add_addr_table_entry (lbl_id, ate_kind_label);
4559 else
4560 attr.dw_attr_val.val_entry = NULL;
4561 add_dwarf_attr (die, &attr);
4562
4563 attr.dw_attr = DW_AT_high_pc;
4564 if (dwarf_version < 4)
4565 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4566 else
4567 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4568 lbl_id = xstrdup (lbl_high);
4569 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4570 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4571 && dwarf_split_debug_info && !force_direct)
4572 attr.dw_attr_val.val_entry
4573 = add_addr_table_entry (lbl_id, ate_kind_label);
4574 else
4575 attr.dw_attr_val.val_entry = NULL;
4576 add_dwarf_attr (die, &attr);
4577 }
4578
4579 /* Hash and equality functions for debug_str_hash. */
4580
4581 hashval_t
hash(indirect_string_node * x)4582 indirect_string_hasher::hash (indirect_string_node *x)
4583 {
4584 return htab_hash_string (x->str);
4585 }
4586
4587 bool
equal(indirect_string_node * x1,const char * x2)4588 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4589 {
4590 return strcmp (x1->str, x2) == 0;
4591 }
4592
4593 /* Add STR to the given string hash table. */
4594
4595 static struct indirect_string_node *
find_AT_string_in_table(const char * str,hash_table<indirect_string_hasher> * table)4596 find_AT_string_in_table (const char *str,
4597 hash_table<indirect_string_hasher> *table)
4598 {
4599 struct indirect_string_node *node;
4600
4601 indirect_string_node **slot
4602 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4603 if (*slot == NULL)
4604 {
4605 node = ggc_cleared_alloc<indirect_string_node> ();
4606 node->str = ggc_strdup (str);
4607 *slot = node;
4608 }
4609 else
4610 node = *slot;
4611
4612 node->refcount++;
4613 return node;
4614 }
4615
4616 /* Add STR to the indirect string hash table. */
4617
4618 static struct indirect_string_node *
find_AT_string(const char * str)4619 find_AT_string (const char *str)
4620 {
4621 if (! debug_str_hash)
4622 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4623
4624 return find_AT_string_in_table (str, debug_str_hash);
4625 }
4626
4627 /* Add a string attribute value to a DIE. */
4628
4629 static inline void
add_AT_string(dw_die_ref die,enum dwarf_attribute attr_kind,const char * str)4630 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4631 {
4632 dw_attr_node attr;
4633 struct indirect_string_node *node;
4634
4635 node = find_AT_string (str);
4636
4637 attr.dw_attr = attr_kind;
4638 attr.dw_attr_val.val_class = dw_val_class_str;
4639 attr.dw_attr_val.val_entry = NULL;
4640 attr.dw_attr_val.v.val_str = node;
4641 add_dwarf_attr (die, &attr);
4642 }
4643
4644 static inline const char *
AT_string(dw_attr_node * a)4645 AT_string (dw_attr_node *a)
4646 {
4647 gcc_assert (a && AT_class (a) == dw_val_class_str);
4648 return a->dw_attr_val.v.val_str->str;
4649 }
4650
4651 /* Call this function directly to bypass AT_string_form's logic to put
4652 the string inline in the die. */
4653
4654 static void
set_indirect_string(struct indirect_string_node * node)4655 set_indirect_string (struct indirect_string_node *node)
4656 {
4657 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4658 /* Already indirect is a no op. */
4659 if (node->form == DW_FORM_strp
4660 || node->form == DW_FORM_line_strp
4661 || node->form == DW_FORM_GNU_str_index)
4662 {
4663 gcc_assert (node->label);
4664 return;
4665 }
4666 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4667 ++dw2_string_counter;
4668 node->label = xstrdup (label);
4669
4670 if (!dwarf_split_debug_info)
4671 {
4672 node->form = DW_FORM_strp;
4673 node->index = NOT_INDEXED;
4674 }
4675 else
4676 {
4677 node->form = DW_FORM_GNU_str_index;
4678 node->index = NO_INDEX_ASSIGNED;
4679 }
4680 }
4681
4682 /* A helper function for dwarf2out_finish, called to reset indirect
4683 string decisions done for early LTO dwarf output before fat object
4684 dwarf output. */
4685
4686 int
reset_indirect_string(indirect_string_node ** h,void *)4687 reset_indirect_string (indirect_string_node **h, void *)
4688 {
4689 struct indirect_string_node *node = *h;
4690 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4691 {
4692 free (node->label);
4693 node->label = NULL;
4694 node->form = (dwarf_form) 0;
4695 node->index = 0;
4696 }
4697 return 1;
4698 }
4699
4700 /* Find out whether a string should be output inline in DIE
4701 or out-of-line in .debug_str section. */
4702
4703 static enum dwarf_form
find_string_form(struct indirect_string_node * node)4704 find_string_form (struct indirect_string_node *node)
4705 {
4706 unsigned int len;
4707
4708 if (node->form)
4709 return node->form;
4710
4711 len = strlen (node->str) + 1;
4712
4713 /* If the string is shorter or equal to the size of the reference, it is
4714 always better to put it inline. */
4715 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4716 return node->form = DW_FORM_string;
4717
4718 /* If we cannot expect the linker to merge strings in .debug_str
4719 section, only put it into .debug_str if it is worth even in this
4720 single module. */
4721 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4722 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4723 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4724 return node->form = DW_FORM_string;
4725
4726 set_indirect_string (node);
4727
4728 return node->form;
4729 }
4730
4731 /* Find out whether the string referenced from the attribute should be
4732 output inline in DIE or out-of-line in .debug_str section. */
4733
4734 static enum dwarf_form
AT_string_form(dw_attr_node * a)4735 AT_string_form (dw_attr_node *a)
4736 {
4737 gcc_assert (a && AT_class (a) == dw_val_class_str);
4738 return find_string_form (a->dw_attr_val.v.val_str);
4739 }
4740
4741 /* Add a DIE reference attribute value to a DIE. */
4742
4743 static inline void
add_AT_die_ref(dw_die_ref die,enum dwarf_attribute attr_kind,dw_die_ref targ_die)4744 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4745 {
4746 dw_attr_node attr;
4747 gcc_checking_assert (targ_die != NULL);
4748
4749 /* With LTO we can end up trying to reference something we didn't create
4750 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4751 if (targ_die == NULL)
4752 return;
4753
4754 attr.dw_attr = attr_kind;
4755 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4756 attr.dw_attr_val.val_entry = NULL;
4757 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4758 attr.dw_attr_val.v.val_die_ref.external = 0;
4759 add_dwarf_attr (die, &attr);
4760 }
4761
4762 /* Change DIE reference REF to point to NEW_DIE instead. */
4763
4764 static inline void
change_AT_die_ref(dw_attr_node * ref,dw_die_ref new_die)4765 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4766 {
4767 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4768 ref->dw_attr_val.v.val_die_ref.die = new_die;
4769 ref->dw_attr_val.v.val_die_ref.external = 0;
4770 }
4771
4772 /* Add an AT_specification attribute to a DIE, and also make the back
4773 pointer from the specification to the definition. */
4774
4775 static inline void
add_AT_specification(dw_die_ref die,dw_die_ref targ_die)4776 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4777 {
4778 add_AT_die_ref (die, DW_AT_specification, targ_die);
4779 gcc_assert (!targ_die->die_definition);
4780 targ_die->die_definition = die;
4781 }
4782
4783 static inline dw_die_ref
AT_ref(dw_attr_node * a)4784 AT_ref (dw_attr_node *a)
4785 {
4786 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4787 return a->dw_attr_val.v.val_die_ref.die;
4788 }
4789
4790 static inline int
AT_ref_external(dw_attr_node * a)4791 AT_ref_external (dw_attr_node *a)
4792 {
4793 if (a && AT_class (a) == dw_val_class_die_ref)
4794 return a->dw_attr_val.v.val_die_ref.external;
4795
4796 return 0;
4797 }
4798
4799 static inline void
set_AT_ref_external(dw_attr_node * a,int i)4800 set_AT_ref_external (dw_attr_node *a, int i)
4801 {
4802 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4803 a->dw_attr_val.v.val_die_ref.external = i;
4804 }
4805
4806 /* Add an FDE reference attribute value to a DIE. */
4807
4808 static inline void
add_AT_fde_ref(dw_die_ref die,enum dwarf_attribute attr_kind,unsigned int targ_fde)4809 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4810 {
4811 dw_attr_node attr;
4812
4813 attr.dw_attr = attr_kind;
4814 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4815 attr.dw_attr_val.val_entry = NULL;
4816 attr.dw_attr_val.v.val_fde_index = targ_fde;
4817 add_dwarf_attr (die, &attr);
4818 }
4819
4820 /* Add a location description attribute value to a DIE. */
4821
4822 static inline void
add_AT_loc(dw_die_ref die,enum dwarf_attribute attr_kind,dw_loc_descr_ref loc)4823 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4824 {
4825 dw_attr_node attr;
4826
4827 attr.dw_attr = attr_kind;
4828 attr.dw_attr_val.val_class = dw_val_class_loc;
4829 attr.dw_attr_val.val_entry = NULL;
4830 attr.dw_attr_val.v.val_loc = loc;
4831 add_dwarf_attr (die, &attr);
4832 }
4833
4834 static inline dw_loc_descr_ref
AT_loc(dw_attr_node * a)4835 AT_loc (dw_attr_node *a)
4836 {
4837 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4838 return a->dw_attr_val.v.val_loc;
4839 }
4840
4841 static inline void
add_AT_loc_list(dw_die_ref die,enum dwarf_attribute attr_kind,dw_loc_list_ref loc_list)4842 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4843 {
4844 dw_attr_node attr;
4845
4846 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4847 return;
4848
4849 attr.dw_attr = attr_kind;
4850 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4851 attr.dw_attr_val.val_entry = NULL;
4852 attr.dw_attr_val.v.val_loc_list = loc_list;
4853 add_dwarf_attr (die, &attr);
4854 have_location_lists = true;
4855 }
4856
4857 static inline dw_loc_list_ref
AT_loc_list(dw_attr_node * a)4858 AT_loc_list (dw_attr_node *a)
4859 {
4860 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4861 return a->dw_attr_val.v.val_loc_list;
4862 }
4863
4864 /* Add a view list attribute to DIE. It must have a DW_AT_location
4865 attribute, because the view list complements the location list. */
4866
4867 static inline void
add_AT_view_list(dw_die_ref die,enum dwarf_attribute attr_kind)4868 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4869 {
4870 dw_attr_node attr;
4871
4872 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4873 return;
4874
4875 attr.dw_attr = attr_kind;
4876 attr.dw_attr_val.val_class = dw_val_class_view_list;
4877 attr.dw_attr_val.val_entry = NULL;
4878 attr.dw_attr_val.v.val_view_list = die;
4879 add_dwarf_attr (die, &attr);
4880 gcc_checking_assert (get_AT (die, DW_AT_location));
4881 gcc_assert (have_location_lists);
4882 }
4883
4884 /* Return a pointer to the location list referenced by the attribute.
4885 If the named attribute is a view list, look up the corresponding
4886 DW_AT_location attribute and return its location list. */
4887
4888 static inline dw_loc_list_ref *
AT_loc_list_ptr(dw_attr_node * a)4889 AT_loc_list_ptr (dw_attr_node *a)
4890 {
4891 gcc_assert (a);
4892 switch (AT_class (a))
4893 {
4894 case dw_val_class_loc_list:
4895 return &a->dw_attr_val.v.val_loc_list;
4896 case dw_val_class_view_list:
4897 {
4898 dw_attr_node *l;
4899 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4900 if (!l)
4901 return NULL;
4902 gcc_checking_assert (l + 1 == a);
4903 return AT_loc_list_ptr (l);
4904 }
4905 default:
4906 gcc_unreachable ();
4907 }
4908 }
4909
4910 /* Return the location attribute value associated with a view list
4911 attribute value. */
4912
4913 static inline dw_val_node *
view_list_to_loc_list_val_node(dw_val_node * val)4914 view_list_to_loc_list_val_node (dw_val_node *val)
4915 {
4916 gcc_assert (val->val_class == dw_val_class_view_list);
4917 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4918 if (!loc)
4919 return NULL;
4920 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4921 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4922 return &loc->dw_attr_val;
4923 }
4924
4925 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4926 {
4927 static hashval_t hash (addr_table_entry *);
4928 static bool equal (addr_table_entry *, addr_table_entry *);
4929 };
4930
4931 /* Table of entries into the .debug_addr section. */
4932
4933 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4934
4935 /* Hash an address_table_entry. */
4936
4937 hashval_t
hash(addr_table_entry * a)4938 addr_hasher::hash (addr_table_entry *a)
4939 {
4940 inchash::hash hstate;
4941 switch (a->kind)
4942 {
4943 case ate_kind_rtx:
4944 hstate.add_int (0);
4945 break;
4946 case ate_kind_rtx_dtprel:
4947 hstate.add_int (1);
4948 break;
4949 case ate_kind_label:
4950 return htab_hash_string (a->addr.label);
4951 default:
4952 gcc_unreachable ();
4953 }
4954 inchash::add_rtx (a->addr.rtl, hstate);
4955 return hstate.end ();
4956 }
4957
4958 /* Determine equality for two address_table_entries. */
4959
4960 bool
equal(addr_table_entry * a1,addr_table_entry * a2)4961 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4962 {
4963 if (a1->kind != a2->kind)
4964 return 0;
4965 switch (a1->kind)
4966 {
4967 case ate_kind_rtx:
4968 case ate_kind_rtx_dtprel:
4969 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4970 case ate_kind_label:
4971 return strcmp (a1->addr.label, a2->addr.label) == 0;
4972 default:
4973 gcc_unreachable ();
4974 }
4975 }
4976
4977 /* Initialize an addr_table_entry. */
4978
4979 void
init_addr_table_entry(addr_table_entry * e,enum ate_kind kind,void * addr)4980 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4981 {
4982 e->kind = kind;
4983 switch (kind)
4984 {
4985 case ate_kind_rtx:
4986 case ate_kind_rtx_dtprel:
4987 e->addr.rtl = (rtx) addr;
4988 break;
4989 case ate_kind_label:
4990 e->addr.label = (char *) addr;
4991 break;
4992 }
4993 e->refcount = 0;
4994 e->index = NO_INDEX_ASSIGNED;
4995 }
4996
4997 /* Add attr to the address table entry to the table. Defer setting an
4998 index until output time. */
4999
5000 static addr_table_entry *
add_addr_table_entry(void * addr,enum ate_kind kind)5001 add_addr_table_entry (void *addr, enum ate_kind kind)
5002 {
5003 addr_table_entry *node;
5004 addr_table_entry finder;
5005
5006 gcc_assert (dwarf_split_debug_info);
5007 if (! addr_index_table)
5008 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5009 init_addr_table_entry (&finder, kind, addr);
5010 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5011
5012 if (*slot == HTAB_EMPTY_ENTRY)
5013 {
5014 node = ggc_cleared_alloc<addr_table_entry> ();
5015 init_addr_table_entry (node, kind, addr);
5016 *slot = node;
5017 }
5018 else
5019 node = *slot;
5020
5021 node->refcount++;
5022 return node;
5023 }
5024
5025 /* Remove an entry from the addr table by decrementing its refcount.
5026 Strictly, decrementing the refcount would be enough, but the
5027 assertion that the entry is actually in the table has found
5028 bugs. */
5029
5030 static void
remove_addr_table_entry(addr_table_entry * entry)5031 remove_addr_table_entry (addr_table_entry *entry)
5032 {
5033 gcc_assert (dwarf_split_debug_info && addr_index_table);
5034 /* After an index is assigned, the table is frozen. */
5035 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5036 entry->refcount--;
5037 }
5038
5039 /* Given a location list, remove all addresses it refers to from the
5040 address_table. */
5041
5042 static void
remove_loc_list_addr_table_entries(dw_loc_descr_ref descr)5043 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5044 {
5045 for (; descr; descr = descr->dw_loc_next)
5046 if (descr->dw_loc_oprnd1.val_entry != NULL)
5047 {
5048 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5049 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5050 }
5051 }
5052
5053 /* A helper function for dwarf2out_finish called through
5054 htab_traverse. Assign an addr_table_entry its index. All entries
5055 must be collected into the table when this function is called,
5056 because the indexing code relies on htab_traverse to traverse nodes
5057 in the same order for each run. */
5058
5059 int
index_addr_table_entry(addr_table_entry ** h,unsigned int * index)5060 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5061 {
5062 addr_table_entry *node = *h;
5063
5064 /* Don't index unreferenced nodes. */
5065 if (node->refcount == 0)
5066 return 1;
5067
5068 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5069 node->index = *index;
5070 *index += 1;
5071
5072 return 1;
5073 }
5074
5075 /* Add an address constant attribute value to a DIE. When using
5076 dwarf_split_debug_info, address attributes in dies destined for the
5077 final executable should be direct references--setting the parameter
5078 force_direct ensures this behavior. */
5079
5080 static inline void
add_AT_addr(dw_die_ref die,enum dwarf_attribute attr_kind,rtx addr,bool force_direct)5081 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5082 bool force_direct)
5083 {
5084 dw_attr_node attr;
5085
5086 attr.dw_attr = attr_kind;
5087 attr.dw_attr_val.val_class = dw_val_class_addr;
5088 attr.dw_attr_val.v.val_addr = addr;
5089 if (dwarf_split_debug_info && !force_direct)
5090 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5091 else
5092 attr.dw_attr_val.val_entry = NULL;
5093 add_dwarf_attr (die, &attr);
5094 }
5095
5096 /* Get the RTX from to an address DIE attribute. */
5097
5098 static inline rtx
AT_addr(dw_attr_node * a)5099 AT_addr (dw_attr_node *a)
5100 {
5101 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5102 return a->dw_attr_val.v.val_addr;
5103 }
5104
5105 /* Add a file attribute value to a DIE. */
5106
5107 static inline void
add_AT_file(dw_die_ref die,enum dwarf_attribute attr_kind,struct dwarf_file_data * fd)5108 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5109 struct dwarf_file_data *fd)
5110 {
5111 dw_attr_node attr;
5112
5113 attr.dw_attr = attr_kind;
5114 attr.dw_attr_val.val_class = dw_val_class_file;
5115 attr.dw_attr_val.val_entry = NULL;
5116 attr.dw_attr_val.v.val_file = fd;
5117 add_dwarf_attr (die, &attr);
5118 }
5119
5120 /* Get the dwarf_file_data from a file DIE attribute. */
5121
5122 static inline struct dwarf_file_data *
AT_file(dw_attr_node * a)5123 AT_file (dw_attr_node *a)
5124 {
5125 gcc_assert (a && (AT_class (a) == dw_val_class_file
5126 || AT_class (a) == dw_val_class_file_implicit));
5127 return a->dw_attr_val.v.val_file;
5128 }
5129
5130 /* Add a vms delta attribute value to a DIE. */
5131
5132 static inline void
add_AT_vms_delta(dw_die_ref die,enum dwarf_attribute attr_kind,const char * lbl1,const char * lbl2)5133 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5134 const char *lbl1, const char *lbl2)
5135 {
5136 dw_attr_node attr;
5137
5138 attr.dw_attr = attr_kind;
5139 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5140 attr.dw_attr_val.val_entry = NULL;
5141 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5142 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5143 add_dwarf_attr (die, &attr);
5144 }
5145
5146 /* Add a symbolic view identifier attribute value to a DIE. */
5147
5148 static inline void
add_AT_symview(dw_die_ref die,enum dwarf_attribute attr_kind,const char * view_label)5149 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5150 const char *view_label)
5151 {
5152 dw_attr_node attr;
5153
5154 attr.dw_attr = attr_kind;
5155 attr.dw_attr_val.val_class = dw_val_class_symview;
5156 attr.dw_attr_val.val_entry = NULL;
5157 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5158 add_dwarf_attr (die, &attr);
5159 }
5160
5161 /* Add a label identifier attribute value to a DIE. */
5162
5163 static inline void
add_AT_lbl_id(dw_die_ref die,enum dwarf_attribute attr_kind,const char * lbl_id)5164 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5165 const char *lbl_id)
5166 {
5167 dw_attr_node attr;
5168
5169 attr.dw_attr = attr_kind;
5170 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5171 attr.dw_attr_val.val_entry = NULL;
5172 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5173 if (dwarf_split_debug_info)
5174 attr.dw_attr_val.val_entry
5175 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5176 ate_kind_label);
5177 add_dwarf_attr (die, &attr);
5178 }
5179
5180 /* Add a section offset attribute value to a DIE, an offset into the
5181 debug_line section. */
5182
5183 static inline void
add_AT_lineptr(dw_die_ref die,enum dwarf_attribute attr_kind,const char * label)5184 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5185 const char *label)
5186 {
5187 dw_attr_node attr;
5188
5189 attr.dw_attr = attr_kind;
5190 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5191 attr.dw_attr_val.val_entry = NULL;
5192 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5193 add_dwarf_attr (die, &attr);
5194 }
5195
5196 /* Add a section offset attribute value to a DIE, an offset into the
5197 debug_loclists section. */
5198
5199 static inline void
add_AT_loclistsptr(dw_die_ref die,enum dwarf_attribute attr_kind,const char * label)5200 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5201 const char *label)
5202 {
5203 dw_attr_node attr;
5204
5205 attr.dw_attr = attr_kind;
5206 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5207 attr.dw_attr_val.val_entry = NULL;
5208 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5209 add_dwarf_attr (die, &attr);
5210 }
5211
5212 /* Add a section offset attribute value to a DIE, an offset into the
5213 debug_macinfo section. */
5214
5215 static inline void
add_AT_macptr(dw_die_ref die,enum dwarf_attribute attr_kind,const char * label)5216 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 const char *label)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_macptr;
5223 attr.dw_attr_val.val_entry = NULL;
5224 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5225 add_dwarf_attr (die, &attr);
5226 }
5227
5228 /* Add an offset attribute value to a DIE. */
5229
5230 static inline void
add_AT_offset(dw_die_ref die,enum dwarf_attribute attr_kind,unsigned HOST_WIDE_INT offset)5231 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5232 unsigned HOST_WIDE_INT offset)
5233 {
5234 dw_attr_node attr;
5235
5236 attr.dw_attr = attr_kind;
5237 attr.dw_attr_val.val_class = dw_val_class_offset;
5238 attr.dw_attr_val.val_entry = NULL;
5239 attr.dw_attr_val.v.val_offset = offset;
5240 add_dwarf_attr (die, &attr);
5241 }
5242
5243 /* Add a range_list attribute value to a DIE. When using
5244 dwarf_split_debug_info, address attributes in dies destined for the
5245 final executable should be direct references--setting the parameter
5246 force_direct ensures this behavior. */
5247
5248 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5249 #define RELOCATED_OFFSET (NULL)
5250
5251 static void
add_AT_range_list(dw_die_ref die,enum dwarf_attribute attr_kind,long unsigned int offset,bool force_direct)5252 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5253 long unsigned int offset, bool force_direct)
5254 {
5255 dw_attr_node attr;
5256
5257 attr.dw_attr = attr_kind;
5258 attr.dw_attr_val.val_class = dw_val_class_range_list;
5259 /* For the range_list attribute, use val_entry to store whether the
5260 offset should follow split-debug-info or normal semantics. This
5261 value is read in output_range_list_offset. */
5262 if (dwarf_split_debug_info && !force_direct)
5263 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5264 else
5265 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5266 attr.dw_attr_val.v.val_offset = offset;
5267 add_dwarf_attr (die, &attr);
5268 }
5269
5270 /* Return the start label of a delta attribute. */
5271
5272 static inline const char *
AT_vms_delta1(dw_attr_node * a)5273 AT_vms_delta1 (dw_attr_node *a)
5274 {
5275 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5276 return a->dw_attr_val.v.val_vms_delta.lbl1;
5277 }
5278
5279 /* Return the end label of a delta attribute. */
5280
5281 static inline const char *
AT_vms_delta2(dw_attr_node * a)5282 AT_vms_delta2 (dw_attr_node *a)
5283 {
5284 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5285 return a->dw_attr_val.v.val_vms_delta.lbl2;
5286 }
5287
5288 static inline const char *
AT_lbl(dw_attr_node * a)5289 AT_lbl (dw_attr_node *a)
5290 {
5291 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5292 || AT_class (a) == dw_val_class_lineptr
5293 || AT_class (a) == dw_val_class_macptr
5294 || AT_class (a) == dw_val_class_loclistsptr
5295 || AT_class (a) == dw_val_class_high_pc));
5296 return a->dw_attr_val.v.val_lbl_id;
5297 }
5298
5299 /* Get the attribute of type attr_kind. */
5300
5301 static dw_attr_node *
get_AT(dw_die_ref die,enum dwarf_attribute attr_kind)5302 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5303 {
5304 dw_attr_node *a;
5305 unsigned ix;
5306 dw_die_ref spec = NULL;
5307
5308 if (! die)
5309 return NULL;
5310
5311 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5312 if (a->dw_attr == attr_kind)
5313 return a;
5314 else if (a->dw_attr == DW_AT_specification
5315 || a->dw_attr == DW_AT_abstract_origin)
5316 spec = AT_ref (a);
5317
5318 if (spec)
5319 return get_AT (spec, attr_kind);
5320
5321 return NULL;
5322 }
5323
5324 /* Returns the parent of the declaration of DIE. */
5325
5326 static dw_die_ref
get_die_parent(dw_die_ref die)5327 get_die_parent (dw_die_ref die)
5328 {
5329 dw_die_ref t;
5330
5331 if (!die)
5332 return NULL;
5333
5334 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5335 || (t = get_AT_ref (die, DW_AT_specification)))
5336 die = t;
5337
5338 return die->die_parent;
5339 }
5340
5341 /* Return the "low pc" attribute value, typically associated with a subprogram
5342 DIE. Return null if the "low pc" attribute is either not present, or if it
5343 cannot be represented as an assembler label identifier. */
5344
5345 static inline const char *
get_AT_low_pc(dw_die_ref die)5346 get_AT_low_pc (dw_die_ref die)
5347 {
5348 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5349
5350 return a ? AT_lbl (a) : NULL;
5351 }
5352
5353 /* Return the "high pc" attribute value, typically associated with a subprogram
5354 DIE. Return null if the "high pc" attribute is either not present, or if it
5355 cannot be represented as an assembler label identifier. */
5356
5357 static inline const char *
get_AT_hi_pc(dw_die_ref die)5358 get_AT_hi_pc (dw_die_ref die)
5359 {
5360 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5361
5362 return a ? AT_lbl (a) : NULL;
5363 }
5364
5365 /* Return the value of the string attribute designated by ATTR_KIND, or
5366 NULL if it is not present. */
5367
5368 static inline const char *
get_AT_string(dw_die_ref die,enum dwarf_attribute attr_kind)5369 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5370 {
5371 dw_attr_node *a = get_AT (die, attr_kind);
5372
5373 return a ? AT_string (a) : NULL;
5374 }
5375
5376 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5377 if it is not present. */
5378
5379 static inline int
get_AT_flag(dw_die_ref die,enum dwarf_attribute attr_kind)5380 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5381 {
5382 dw_attr_node *a = get_AT (die, attr_kind);
5383
5384 return a ? AT_flag (a) : 0;
5385 }
5386
5387 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5388 if it is not present. */
5389
5390 static inline unsigned
get_AT_unsigned(dw_die_ref die,enum dwarf_attribute attr_kind)5391 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5392 {
5393 dw_attr_node *a = get_AT (die, attr_kind);
5394
5395 return a ? AT_unsigned (a) : 0;
5396 }
5397
5398 static inline dw_die_ref
get_AT_ref(dw_die_ref die,enum dwarf_attribute attr_kind)5399 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5400 {
5401 dw_attr_node *a = get_AT (die, attr_kind);
5402
5403 return a ? AT_ref (a) : NULL;
5404 }
5405
5406 static inline struct dwarf_file_data *
get_AT_file(dw_die_ref die,enum dwarf_attribute attr_kind)5407 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5408 {
5409 dw_attr_node *a = get_AT (die, attr_kind);
5410
5411 return a ? AT_file (a) : NULL;
5412 }
5413
5414 /* Return TRUE if the language is C++. */
5415
5416 static inline bool
is_cxx(void)5417 is_cxx (void)
5418 {
5419 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5420
5421 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5422 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5423 }
5424
5425 /* Return TRUE if DECL was created by the C++ frontend. */
5426
5427 static bool
is_cxx(const_tree decl)5428 is_cxx (const_tree decl)
5429 {
5430 if (in_lto_p)
5431 {
5432 const_tree context = get_ultimate_context (decl);
5433 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5434 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5435 }
5436 return is_cxx ();
5437 }
5438
5439 /* Return TRUE if the language is Fortran. */
5440
5441 static inline bool
is_fortran(void)5442 is_fortran (void)
5443 {
5444 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5445
5446 return (lang == DW_LANG_Fortran77
5447 || lang == DW_LANG_Fortran90
5448 || lang == DW_LANG_Fortran95
5449 || lang == DW_LANG_Fortran03
5450 || lang == DW_LANG_Fortran08);
5451 }
5452
5453 static inline bool
is_fortran(const_tree decl)5454 is_fortran (const_tree decl)
5455 {
5456 if (in_lto_p)
5457 {
5458 const_tree context = get_ultimate_context (decl);
5459 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5460 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5461 "GNU Fortran", 11) == 0
5462 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5463 "GNU F77") == 0);
5464 }
5465 return is_fortran ();
5466 }
5467
5468 /* Return TRUE if the language is Ada. */
5469
5470 static inline bool
is_ada(void)5471 is_ada (void)
5472 {
5473 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5474
5475 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5476 }
5477
5478 /* Remove the specified attribute if present. Return TRUE if removal
5479 was successful. */
5480
5481 static bool
remove_AT(dw_die_ref die,enum dwarf_attribute attr_kind)5482 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5483 {
5484 dw_attr_node *a;
5485 unsigned ix;
5486
5487 if (! die)
5488 return false;
5489
5490 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5491 if (a->dw_attr == attr_kind)
5492 {
5493 if (AT_class (a) == dw_val_class_str)
5494 if (a->dw_attr_val.v.val_str->refcount)
5495 a->dw_attr_val.v.val_str->refcount--;
5496
5497 /* vec::ordered_remove should help reduce the number of abbrevs
5498 that are needed. */
5499 die->die_attr->ordered_remove (ix);
5500 return true;
5501 }
5502 return false;
5503 }
5504
5505 /* Remove CHILD from its parent. PREV must have the property that
5506 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5507
5508 static void
remove_child_with_prev(dw_die_ref child,dw_die_ref prev)5509 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5510 {
5511 gcc_assert (child->die_parent == prev->die_parent);
5512 gcc_assert (prev->die_sib == child);
5513 if (prev == child)
5514 {
5515 gcc_assert (child->die_parent->die_child == child);
5516 prev = NULL;
5517 }
5518 else
5519 prev->die_sib = child->die_sib;
5520 if (child->die_parent->die_child == child)
5521 child->die_parent->die_child = prev;
5522 child->die_sib = NULL;
5523 }
5524
5525 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5526 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5527
5528 static void
replace_child(dw_die_ref old_child,dw_die_ref new_child,dw_die_ref prev)5529 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5530 {
5531 dw_die_ref parent = old_child->die_parent;
5532
5533 gcc_assert (parent == prev->die_parent);
5534 gcc_assert (prev->die_sib == old_child);
5535
5536 new_child->die_parent = parent;
5537 if (prev == old_child)
5538 {
5539 gcc_assert (parent->die_child == old_child);
5540 new_child->die_sib = new_child;
5541 }
5542 else
5543 {
5544 prev->die_sib = new_child;
5545 new_child->die_sib = old_child->die_sib;
5546 }
5547 if (old_child->die_parent->die_child == old_child)
5548 old_child->die_parent->die_child = new_child;
5549 old_child->die_sib = NULL;
5550 }
5551
5552 /* Move all children from OLD_PARENT to NEW_PARENT. */
5553
5554 static void
move_all_children(dw_die_ref old_parent,dw_die_ref new_parent)5555 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5556 {
5557 dw_die_ref c;
5558 new_parent->die_child = old_parent->die_child;
5559 old_parent->die_child = NULL;
5560 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5561 }
5562
5563 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5564 matches TAG. */
5565
5566 static void
remove_child_TAG(dw_die_ref die,enum dwarf_tag tag)5567 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5568 {
5569 dw_die_ref c;
5570
5571 c = die->die_child;
5572 if (c) do {
5573 dw_die_ref prev = c;
5574 c = c->die_sib;
5575 while (c->die_tag == tag)
5576 {
5577 remove_child_with_prev (c, prev);
5578 c->die_parent = NULL;
5579 /* Might have removed every child. */
5580 if (die->die_child == NULL)
5581 return;
5582 c = prev->die_sib;
5583 }
5584 } while (c != die->die_child);
5585 }
5586
5587 /* Add a CHILD_DIE as the last child of DIE. */
5588
5589 static void
add_child_die(dw_die_ref die,dw_die_ref child_die)5590 add_child_die (dw_die_ref die, dw_die_ref child_die)
5591 {
5592 /* FIXME this should probably be an assert. */
5593 if (! die || ! child_die)
5594 return;
5595 gcc_assert (die != child_die);
5596
5597 child_die->die_parent = die;
5598 if (die->die_child)
5599 {
5600 child_die->die_sib = die->die_child->die_sib;
5601 die->die_child->die_sib = child_die;
5602 }
5603 else
5604 child_die->die_sib = child_die;
5605 die->die_child = child_die;
5606 }
5607
5608 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5609
5610 static void
add_child_die_after(dw_die_ref die,dw_die_ref child_die,dw_die_ref after_die)5611 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5612 dw_die_ref after_die)
5613 {
5614 gcc_assert (die
5615 && child_die
5616 && after_die
5617 && die->die_child
5618 && die != child_die);
5619
5620 child_die->die_parent = die;
5621 child_die->die_sib = after_die->die_sib;
5622 after_die->die_sib = child_die;
5623 if (die->die_child == after_die)
5624 die->die_child = child_die;
5625 }
5626
5627 /* Unassociate CHILD from its parent, and make its parent be
5628 NEW_PARENT. */
5629
5630 static void
reparent_child(dw_die_ref child,dw_die_ref new_parent)5631 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5632 {
5633 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5634 if (p->die_sib == child)
5635 {
5636 remove_child_with_prev (child, p);
5637 break;
5638 }
5639 add_child_die (new_parent, child);
5640 }
5641
5642 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5643 is the specification, to the end of PARENT's list of children.
5644 This is done by removing and re-adding it. */
5645
5646 static void
splice_child_die(dw_die_ref parent,dw_die_ref child)5647 splice_child_die (dw_die_ref parent, dw_die_ref child)
5648 {
5649 /* We want the declaration DIE from inside the class, not the
5650 specification DIE at toplevel. */
5651 if (child->die_parent != parent)
5652 {
5653 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5654
5655 if (tmp)
5656 child = tmp;
5657 }
5658
5659 gcc_assert (child->die_parent == parent
5660 || (child->die_parent
5661 == get_AT_ref (parent, DW_AT_specification)));
5662
5663 reparent_child (child, parent);
5664 }
5665
5666 /* Create and return a new die with TAG_VALUE as tag. */
5667
5668 static inline dw_die_ref
new_die_raw(enum dwarf_tag tag_value)5669 new_die_raw (enum dwarf_tag tag_value)
5670 {
5671 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5672 die->die_tag = tag_value;
5673 return die;
5674 }
5675
5676 /* Create and return a new die with a parent of PARENT_DIE. If
5677 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5678 associated tree T must be supplied to determine parenthood
5679 later. */
5680
5681 static inline dw_die_ref
new_die(enum dwarf_tag tag_value,dw_die_ref parent_die,tree t)5682 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5683 {
5684 dw_die_ref die = new_die_raw (tag_value);
5685
5686 if (parent_die != NULL)
5687 add_child_die (parent_die, die);
5688 else
5689 {
5690 limbo_die_node *limbo_node;
5691
5692 /* No DIEs created after early dwarf should end up in limbo,
5693 because the limbo list should not persist past LTO
5694 streaming. */
5695 if (tag_value != DW_TAG_compile_unit
5696 /* These are allowed because they're generated while
5697 breaking out COMDAT units late. */
5698 && tag_value != DW_TAG_type_unit
5699 && tag_value != DW_TAG_skeleton_unit
5700 && !early_dwarf
5701 /* Allow nested functions to live in limbo because they will
5702 only temporarily live there, as decls_for_scope will fix
5703 them up. */
5704 && (TREE_CODE (t) != FUNCTION_DECL
5705 || !decl_function_context (t))
5706 /* Same as nested functions above but for types. Types that
5707 are local to a function will be fixed in
5708 decls_for_scope. */
5709 && (!RECORD_OR_UNION_TYPE_P (t)
5710 || !TYPE_CONTEXT (t)
5711 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5712 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5713 especially in the ltrans stage, but once we implement LTO
5714 dwarf streaming, we should remove this exception. */
5715 && !in_lto_p)
5716 {
5717 fprintf (stderr, "symbol ended up in limbo too late:");
5718 debug_generic_stmt (t);
5719 gcc_unreachable ();
5720 }
5721
5722 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5723 limbo_node->die = die;
5724 limbo_node->created_for = t;
5725 limbo_node->next = limbo_die_list;
5726 limbo_die_list = limbo_node;
5727 }
5728
5729 return die;
5730 }
5731
5732 /* Return the DIE associated with the given type specifier. */
5733
5734 static inline dw_die_ref
lookup_type_die(tree type)5735 lookup_type_die (tree type)
5736 {
5737 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5738 if (die && die->removed)
5739 {
5740 TYPE_SYMTAB_DIE (type) = NULL;
5741 return NULL;
5742 }
5743 return die;
5744 }
5745
5746 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5747 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5748 anonymous type instead the one of the naming typedef. */
5749
5750 static inline dw_die_ref
strip_naming_typedef(tree type,dw_die_ref type_die)5751 strip_naming_typedef (tree type, dw_die_ref type_die)
5752 {
5753 if (type
5754 && TREE_CODE (type) == RECORD_TYPE
5755 && type_die
5756 && type_die->die_tag == DW_TAG_typedef
5757 && is_naming_typedef_decl (TYPE_NAME (type)))
5758 type_die = get_AT_ref (type_die, DW_AT_type);
5759 return type_die;
5760 }
5761
5762 /* Like lookup_type_die, but if type is an anonymous type named by a
5763 typedef[1], return the DIE of the anonymous type instead the one of
5764 the naming typedef. This is because in gen_typedef_die, we did
5765 equate the anonymous struct named by the typedef with the DIE of
5766 the naming typedef. So by default, lookup_type_die on an anonymous
5767 struct yields the DIE of the naming typedef.
5768
5769 [1]: Read the comment of is_naming_typedef_decl to learn about what
5770 a naming typedef is. */
5771
5772 static inline dw_die_ref
lookup_type_die_strip_naming_typedef(tree type)5773 lookup_type_die_strip_naming_typedef (tree type)
5774 {
5775 dw_die_ref die = lookup_type_die (type);
5776 return strip_naming_typedef (type, die);
5777 }
5778
5779 /* Equate a DIE to a given type specifier. */
5780
5781 static inline void
equate_type_number_to_die(tree type,dw_die_ref type_die)5782 equate_type_number_to_die (tree type, dw_die_ref type_die)
5783 {
5784 TYPE_SYMTAB_DIE (type) = type_die;
5785 }
5786
5787 /* Returns a hash value for X (which really is a die_struct). */
5788
5789 inline hashval_t
hash(die_node * x)5790 decl_die_hasher::hash (die_node *x)
5791 {
5792 return (hashval_t) x->decl_id;
5793 }
5794
5795 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5796
5797 inline bool
equal(die_node * x,tree y)5798 decl_die_hasher::equal (die_node *x, tree y)
5799 {
5800 return (x->decl_id == DECL_UID (y));
5801 }
5802
5803 /* Return the DIE associated with a given declaration. */
5804
5805 static inline dw_die_ref
lookup_decl_die(tree decl)5806 lookup_decl_die (tree decl)
5807 {
5808 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5809 NO_INSERT);
5810 if (!die)
5811 return NULL;
5812 if ((*die)->removed)
5813 {
5814 decl_die_table->clear_slot (die);
5815 return NULL;
5816 }
5817 return *die;
5818 }
5819
5820
5821 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5822 style reference. Return true if we found one refering to a DIE for
5823 DECL, otherwise return false. */
5824
5825 static bool
dwarf2out_die_ref_for_decl(tree decl,const char ** sym,unsigned HOST_WIDE_INT * off)5826 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5827 unsigned HOST_WIDE_INT *off)
5828 {
5829 dw_die_ref die;
5830
5831 if (flag_wpa && !decl_die_table)
5832 return false;
5833
5834 if (TREE_CODE (decl) == BLOCK)
5835 die = BLOCK_DIE (decl);
5836 else
5837 die = lookup_decl_die (decl);
5838 if (!die)
5839 return false;
5840
5841 /* During WPA stage we currently use DIEs to store the
5842 decl <-> label + offset map. That's quite inefficient but it
5843 works for now. */
5844 if (flag_wpa)
5845 {
5846 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5847 if (!ref)
5848 {
5849 gcc_assert (die == comp_unit_die ());
5850 return false;
5851 }
5852 *off = ref->die_offset;
5853 *sym = ref->die_id.die_symbol;
5854 return true;
5855 }
5856
5857 /* Similar to get_ref_die_offset_label, but using the "correct"
5858 label. */
5859 *off = die->die_offset;
5860 while (die->die_parent)
5861 die = die->die_parent;
5862 /* For the containing CU DIE we compute a die_symbol in
5863 compute_comp_unit_symbol. */
5864 gcc_assert (die->die_tag == DW_TAG_compile_unit
5865 && die->die_id.die_symbol != NULL);
5866 *sym = die->die_id.die_symbol;
5867 return true;
5868 }
5869
5870 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5871
5872 static void
add_AT_external_die_ref(dw_die_ref die,enum dwarf_attribute attr_kind,const char * symbol,HOST_WIDE_INT offset)5873 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5874 const char *symbol, HOST_WIDE_INT offset)
5875 {
5876 /* Create a fake DIE that contains the reference. Don't use
5877 new_die because we don't want to end up in the limbo list. */
5878 dw_die_ref ref = new_die_raw (die->die_tag);
5879 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5880 ref->die_offset = offset;
5881 ref->with_offset = 1;
5882 add_AT_die_ref (die, attr_kind, ref);
5883 }
5884
5885 /* Create a DIE for DECL if required and add a reference to a DIE
5886 at SYMBOL + OFFSET which contains attributes dumped early. */
5887
5888 static void
dwarf2out_register_external_die(tree decl,const char * sym,unsigned HOST_WIDE_INT off)5889 dwarf2out_register_external_die (tree decl, const char *sym,
5890 unsigned HOST_WIDE_INT off)
5891 {
5892 if (debug_info_level == DINFO_LEVEL_NONE)
5893 return;
5894
5895 if (flag_wpa && !decl_die_table)
5896 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5897
5898 dw_die_ref die
5899 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5900 gcc_assert (!die);
5901
5902 tree ctx;
5903 dw_die_ref parent = NULL;
5904 /* Need to lookup a DIE for the decls context - the containing
5905 function or translation unit. */
5906 if (TREE_CODE (decl) == BLOCK)
5907 {
5908 ctx = BLOCK_SUPERCONTEXT (decl);
5909 /* ??? We do not output DIEs for all scopes thus skip as
5910 many DIEs as needed. */
5911 while (TREE_CODE (ctx) == BLOCK
5912 && !BLOCK_DIE (ctx))
5913 ctx = BLOCK_SUPERCONTEXT (ctx);
5914 }
5915 else
5916 ctx = DECL_CONTEXT (decl);
5917 /* Peel types in the context stack. */
5918 while (ctx && TYPE_P (ctx))
5919 ctx = TYPE_CONTEXT (ctx);
5920 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5921 if (debug_info_level <= DINFO_LEVEL_TERSE)
5922 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5923 ctx = DECL_CONTEXT (ctx);
5924 if (ctx)
5925 {
5926 if (TREE_CODE (ctx) == BLOCK)
5927 parent = BLOCK_DIE (ctx);
5928 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5929 /* Keep the 1:1 association during WPA. */
5930 && !flag_wpa)
5931 /* Otherwise all late annotations go to the main CU which
5932 imports the original CUs. */
5933 parent = comp_unit_die ();
5934 else if (TREE_CODE (ctx) == FUNCTION_DECL
5935 && TREE_CODE (decl) != PARM_DECL
5936 && TREE_CODE (decl) != BLOCK)
5937 /* Leave function local entities parent determination to when
5938 we process scope vars. */
5939 ;
5940 else
5941 parent = lookup_decl_die (ctx);
5942 }
5943 else
5944 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5945 Handle this case gracefully by globalizing stuff. */
5946 parent = comp_unit_die ();
5947 /* Create a DIE "stub". */
5948 switch (TREE_CODE (decl))
5949 {
5950 case TRANSLATION_UNIT_DECL:
5951 if (! flag_wpa)
5952 {
5953 die = comp_unit_die ();
5954 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5955 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5956 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5957 to create a DIE for the original CUs. */
5958 return;
5959 }
5960 /* Keep the 1:1 association during WPA. */
5961 die = new_die (DW_TAG_compile_unit, NULL, decl);
5962 break;
5963 case NAMESPACE_DECL:
5964 if (is_fortran (decl))
5965 die = new_die (DW_TAG_module, parent, decl);
5966 else
5967 die = new_die (DW_TAG_namespace, parent, decl);
5968 break;
5969 case FUNCTION_DECL:
5970 die = new_die (DW_TAG_subprogram, parent, decl);
5971 break;
5972 case VAR_DECL:
5973 die = new_die (DW_TAG_variable, parent, decl);
5974 break;
5975 case RESULT_DECL:
5976 die = new_die (DW_TAG_variable, parent, decl);
5977 break;
5978 case PARM_DECL:
5979 die = new_die (DW_TAG_formal_parameter, parent, decl);
5980 break;
5981 case CONST_DECL:
5982 die = new_die (DW_TAG_constant, parent, decl);
5983 break;
5984 case LABEL_DECL:
5985 die = new_die (DW_TAG_label, parent, decl);
5986 break;
5987 case BLOCK:
5988 die = new_die (DW_TAG_lexical_block, parent, decl);
5989 break;
5990 default:
5991 gcc_unreachable ();
5992 }
5993 if (TREE_CODE (decl) == BLOCK)
5994 BLOCK_DIE (decl) = die;
5995 else
5996 equate_decl_number_to_die (decl, die);
5997
5998 /* Add a reference to the DIE providing early debug at $sym + off. */
5999 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6000 }
6001
6002 /* Returns a hash value for X (which really is a var_loc_list). */
6003
6004 inline hashval_t
hash(var_loc_list * x)6005 decl_loc_hasher::hash (var_loc_list *x)
6006 {
6007 return (hashval_t) x->decl_id;
6008 }
6009
6010 /* Return nonzero if decl_id of var_loc_list X is the same as
6011 UID of decl *Y. */
6012
6013 inline bool
equal(var_loc_list * x,const_tree y)6014 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6015 {
6016 return (x->decl_id == DECL_UID (y));
6017 }
6018
6019 /* Return the var_loc list associated with a given declaration. */
6020
6021 static inline var_loc_list *
lookup_decl_loc(const_tree decl)6022 lookup_decl_loc (const_tree decl)
6023 {
6024 if (!decl_loc_table)
6025 return NULL;
6026 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6027 }
6028
6029 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6030
6031 inline hashval_t
hash(cached_dw_loc_list * x)6032 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
equal(cached_dw_loc_list * x,const_tree y)6041 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Equate a DIE to a particular declaration. */
6047
6048 static void
equate_decl_number_to_die(tree decl,dw_die_ref decl_die)6049 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6050 {
6051 unsigned int decl_id = DECL_UID (decl);
6052
6053 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6054 decl_die->decl_id = decl_id;
6055 }
6056
6057 /* Return how many bits covers PIECE EXPR_LIST. */
6058
6059 static HOST_WIDE_INT
decl_piece_bitsize(rtx piece)6060 decl_piece_bitsize (rtx piece)
6061 {
6062 int ret = (int) GET_MODE (piece);
6063 if (ret)
6064 return ret;
6065 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6066 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6067 return INTVAL (XEXP (XEXP (piece, 0), 0));
6068 }
6069
6070 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6071
6072 static rtx *
decl_piece_varloc_ptr(rtx piece)6073 decl_piece_varloc_ptr (rtx piece)
6074 {
6075 if ((int) GET_MODE (piece))
6076 return &XEXP (piece, 0);
6077 else
6078 return &XEXP (XEXP (piece, 0), 1);
6079 }
6080
6081 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6082 Next is the chain of following piece nodes. */
6083
6084 static rtx_expr_list *
decl_piece_node(rtx loc_note,HOST_WIDE_INT bitsize,rtx next)6085 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6086 {
6087 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6088 return alloc_EXPR_LIST (bitsize, loc_note, next);
6089 else
6090 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6091 GEN_INT (bitsize),
6092 loc_note), next);
6093 }
6094
6095 /* Return rtx that should be stored into loc field for
6096 LOC_NOTE and BITPOS/BITSIZE. */
6097
6098 static rtx
construct_piece_list(rtx loc_note,HOST_WIDE_INT bitpos,HOST_WIDE_INT bitsize)6099 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6100 HOST_WIDE_INT bitsize)
6101 {
6102 if (bitsize != -1)
6103 {
6104 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6105 if (bitpos != 0)
6106 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6107 }
6108 return loc_note;
6109 }
6110
6111 /* This function either modifies location piece list *DEST in
6112 place (if SRC and INNER is NULL), or copies location piece list
6113 *SRC to *DEST while modifying it. Location BITPOS is modified
6114 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6115 not copied and if needed some padding around it is added.
6116 When modifying in place, DEST should point to EXPR_LIST where
6117 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6118 to the start of the whole list and INNER points to the EXPR_LIST
6119 where earlier pieces cover PIECE_BITPOS bits. */
6120
6121 static void
adjust_piece_list(rtx * dest,rtx * src,rtx * inner,HOST_WIDE_INT bitpos,HOST_WIDE_INT piece_bitpos,HOST_WIDE_INT bitsize,rtx loc_note)6122 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6123 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6124 HOST_WIDE_INT bitsize, rtx loc_note)
6125 {
6126 HOST_WIDE_INT diff;
6127 bool copy = inner != NULL;
6128
6129 if (copy)
6130 {
6131 /* First copy all nodes preceding the current bitpos. */
6132 while (src != inner)
6133 {
6134 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6135 decl_piece_bitsize (*src), NULL_RTX);
6136 dest = &XEXP (*dest, 1);
6137 src = &XEXP (*src, 1);
6138 }
6139 }
6140 /* Add padding if needed. */
6141 if (bitpos != piece_bitpos)
6142 {
6143 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6144 copy ? NULL_RTX : *dest);
6145 dest = &XEXP (*dest, 1);
6146 }
6147 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6148 {
6149 gcc_assert (!copy);
6150 /* A piece with correct bitpos and bitsize already exist,
6151 just update the location for it and return. */
6152 *decl_piece_varloc_ptr (*dest) = loc_note;
6153 return;
6154 }
6155 /* Add the piece that changed. */
6156 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6157 dest = &XEXP (*dest, 1);
6158 /* Skip over pieces that overlap it. */
6159 diff = bitpos - piece_bitpos + bitsize;
6160 if (!copy)
6161 src = dest;
6162 while (diff > 0 && *src)
6163 {
6164 rtx piece = *src;
6165 diff -= decl_piece_bitsize (piece);
6166 if (copy)
6167 src = &XEXP (piece, 1);
6168 else
6169 {
6170 *src = XEXP (piece, 1);
6171 free_EXPR_LIST_node (piece);
6172 }
6173 }
6174 /* Add padding if needed. */
6175 if (diff < 0 && *src)
6176 {
6177 if (!copy)
6178 dest = src;
6179 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6180 dest = &XEXP (*dest, 1);
6181 }
6182 if (!copy)
6183 return;
6184 /* Finally copy all nodes following it. */
6185 while (*src)
6186 {
6187 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6188 decl_piece_bitsize (*src), NULL_RTX);
6189 dest = &XEXP (*dest, 1);
6190 src = &XEXP (*src, 1);
6191 }
6192 }
6193
6194 /* Add a variable location node to the linked list for DECL. */
6195
6196 static struct var_loc_node *
add_var_loc_to_decl(tree decl,rtx loc_note,const char * label,var_loc_view view)6197 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6198 {
6199 unsigned int decl_id;
6200 var_loc_list *temp;
6201 struct var_loc_node *loc = NULL;
6202 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6203
6204 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6205 {
6206 tree realdecl = DECL_DEBUG_EXPR (decl);
6207 if (handled_component_p (realdecl)
6208 || (TREE_CODE (realdecl) == MEM_REF
6209 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6210 {
6211 bool reverse;
6212 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6213 &bitsize, &reverse);
6214 if (!innerdecl
6215 || !DECL_P (innerdecl)
6216 || DECL_IGNORED_P (innerdecl)
6217 || TREE_STATIC (innerdecl)
6218 || bitsize == 0
6219 || bitpos + bitsize > 256)
6220 return NULL;
6221 decl = innerdecl;
6222 }
6223 }
6224
6225 decl_id = DECL_UID (decl);
6226 var_loc_list **slot
6227 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6228 if (*slot == NULL)
6229 {
6230 temp = ggc_cleared_alloc<var_loc_list> ();
6231 temp->decl_id = decl_id;
6232 *slot = temp;
6233 }
6234 else
6235 temp = *slot;
6236
6237 /* For PARM_DECLs try to keep around the original incoming value,
6238 even if that means we'll emit a zero-range .debug_loc entry. */
6239 if (temp->last
6240 && temp->first == temp->last
6241 && TREE_CODE (decl) == PARM_DECL
6242 && NOTE_P (temp->first->loc)
6243 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6244 && DECL_INCOMING_RTL (decl)
6245 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6246 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6247 == GET_CODE (DECL_INCOMING_RTL (decl))
6248 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6249 && (bitsize != -1
6250 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6251 NOTE_VAR_LOCATION_LOC (loc_note))
6252 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6253 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6254 {
6255 loc = ggc_cleared_alloc<var_loc_node> ();
6256 temp->first->next = loc;
6257 temp->last = loc;
6258 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6259 }
6260 else if (temp->last)
6261 {
6262 struct var_loc_node *last = temp->last, *unused = NULL;
6263 rtx *piece_loc = NULL, last_loc_note;
6264 HOST_WIDE_INT piece_bitpos = 0;
6265 if (last->next)
6266 {
6267 last = last->next;
6268 gcc_assert (last->next == NULL);
6269 }
6270 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6271 {
6272 piece_loc = &last->loc;
6273 do
6274 {
6275 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6276 if (piece_bitpos + cur_bitsize > bitpos)
6277 break;
6278 piece_bitpos += cur_bitsize;
6279 piece_loc = &XEXP (*piece_loc, 1);
6280 }
6281 while (*piece_loc);
6282 }
6283 /* TEMP->LAST here is either pointer to the last but one or
6284 last element in the chained list, LAST is pointer to the
6285 last element. */
6286 if (label && strcmp (last->label, label) == 0 && last->view == view)
6287 {
6288 /* For SRA optimized variables if there weren't any real
6289 insns since last note, just modify the last node. */
6290 if (piece_loc != NULL)
6291 {
6292 adjust_piece_list (piece_loc, NULL, NULL,
6293 bitpos, piece_bitpos, bitsize, loc_note);
6294 return NULL;
6295 }
6296 /* If the last note doesn't cover any instructions, remove it. */
6297 if (temp->last != last)
6298 {
6299 temp->last->next = NULL;
6300 unused = last;
6301 last = temp->last;
6302 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6303 }
6304 else
6305 {
6306 gcc_assert (temp->first == temp->last
6307 || (temp->first->next == temp->last
6308 && TREE_CODE (decl) == PARM_DECL));
6309 memset (temp->last, '\0', sizeof (*temp->last));
6310 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6311 return temp->last;
6312 }
6313 }
6314 if (bitsize == -1 && NOTE_P (last->loc))
6315 last_loc_note = last->loc;
6316 else if (piece_loc != NULL
6317 && *piece_loc != NULL_RTX
6318 && piece_bitpos == bitpos
6319 && decl_piece_bitsize (*piece_loc) == bitsize)
6320 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6321 else
6322 last_loc_note = NULL_RTX;
6323 /* If the current location is the same as the end of the list,
6324 and either both or neither of the locations is uninitialized,
6325 we have nothing to do. */
6326 if (last_loc_note == NULL_RTX
6327 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6328 NOTE_VAR_LOCATION_LOC (loc_note)))
6329 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6330 != NOTE_VAR_LOCATION_STATUS (loc_note))
6331 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6332 == VAR_INIT_STATUS_UNINITIALIZED)
6333 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6334 == VAR_INIT_STATUS_UNINITIALIZED))))
6335 {
6336 /* Add LOC to the end of list and update LAST. If the last
6337 element of the list has been removed above, reuse its
6338 memory for the new node, otherwise allocate a new one. */
6339 if (unused)
6340 {
6341 loc = unused;
6342 memset (loc, '\0', sizeof (*loc));
6343 }
6344 else
6345 loc = ggc_cleared_alloc<var_loc_node> ();
6346 if (bitsize == -1 || piece_loc == NULL)
6347 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6348 else
6349 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6350 bitpos, piece_bitpos, bitsize, loc_note);
6351 last->next = loc;
6352 /* Ensure TEMP->LAST will point either to the new last but one
6353 element of the chain, or to the last element in it. */
6354 if (last != temp->last)
6355 temp->last = last;
6356 }
6357 else if (unused)
6358 ggc_free (unused);
6359 }
6360 else
6361 {
6362 loc = ggc_cleared_alloc<var_loc_node> ();
6363 temp->first = loc;
6364 temp->last = loc;
6365 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6366 }
6367 return loc;
6368 }
6369
6370 /* Keep track of the number of spaces used to indent the
6371 output of the debugging routines that print the structure of
6372 the DIE internal representation. */
6373 static int print_indent;
6374
6375 /* Indent the line the number of spaces given by print_indent. */
6376
6377 static inline void
print_spaces(FILE * outfile)6378 print_spaces (FILE *outfile)
6379 {
6380 fprintf (outfile, "%*s", print_indent, "");
6381 }
6382
6383 /* Print a type signature in hex. */
6384
6385 static inline void
print_signature(FILE * outfile,char * sig)6386 print_signature (FILE *outfile, char *sig)
6387 {
6388 int i;
6389
6390 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6391 fprintf (outfile, "%02x", sig[i] & 0xff);
6392 }
6393
6394 static inline void
print_discr_value(FILE * outfile,dw_discr_value * discr_value)6395 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6396 {
6397 if (discr_value->pos)
6398 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6399 else
6400 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6401 }
6402
6403 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6404
6405 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6406 RECURSE, output location descriptor operations. */
6407
6408 static void
print_dw_val(dw_val_node * val,bool recurse,FILE * outfile)6409 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6410 {
6411 switch (val->val_class)
6412 {
6413 case dw_val_class_addr:
6414 fprintf (outfile, "address");
6415 break;
6416 case dw_val_class_offset:
6417 fprintf (outfile, "offset");
6418 break;
6419 case dw_val_class_loc:
6420 fprintf (outfile, "location descriptor");
6421 if (val->v.val_loc == NULL)
6422 fprintf (outfile, " -> <null>\n");
6423 else if (recurse)
6424 {
6425 fprintf (outfile, ":\n");
6426 print_indent += 4;
6427 print_loc_descr (val->v.val_loc, outfile);
6428 print_indent -= 4;
6429 }
6430 else
6431 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6432 break;
6433 case dw_val_class_loc_list:
6434 fprintf (outfile, "location list -> label:%s",
6435 val->v.val_loc_list->ll_symbol);
6436 break;
6437 case dw_val_class_view_list:
6438 val = view_list_to_loc_list_val_node (val);
6439 fprintf (outfile, "location list with views -> labels:%s and %s",
6440 val->v.val_loc_list->ll_symbol,
6441 val->v.val_loc_list->vl_symbol);
6442 break;
6443 case dw_val_class_range_list:
6444 fprintf (outfile, "range list");
6445 break;
6446 case dw_val_class_const:
6447 case dw_val_class_const_implicit:
6448 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6449 break;
6450 case dw_val_class_unsigned_const:
6451 case dw_val_class_unsigned_const_implicit:
6452 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6453 break;
6454 case dw_val_class_const_double:
6455 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6456 HOST_WIDE_INT_PRINT_UNSIGNED")",
6457 val->v.val_double.high,
6458 val->v.val_double.low);
6459 break;
6460 case dw_val_class_wide_int:
6461 {
6462 int i = val->v.val_wide->get_len ();
6463 fprintf (outfile, "constant (");
6464 gcc_assert (i > 0);
6465 if (val->v.val_wide->elt (i - 1) == 0)
6466 fprintf (outfile, "0x");
6467 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6468 val->v.val_wide->elt (--i));
6469 while (--i >= 0)
6470 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6471 val->v.val_wide->elt (i));
6472 fprintf (outfile, ")");
6473 break;
6474 }
6475 case dw_val_class_vec:
6476 fprintf (outfile, "floating-point or vector constant");
6477 break;
6478 case dw_val_class_flag:
6479 fprintf (outfile, "%u", val->v.val_flag);
6480 break;
6481 case dw_val_class_die_ref:
6482 if (val->v.val_die_ref.die != NULL)
6483 {
6484 dw_die_ref die = val->v.val_die_ref.die;
6485
6486 if (die->comdat_type_p)
6487 {
6488 fprintf (outfile, "die -> signature: ");
6489 print_signature (outfile,
6490 die->die_id.die_type_node->signature);
6491 }
6492 else if (die->die_id.die_symbol)
6493 {
6494 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6495 if (die->with_offset)
6496 fprintf (outfile, " + %ld", die->die_offset);
6497 }
6498 else
6499 fprintf (outfile, "die -> %ld", die->die_offset);
6500 fprintf (outfile, " (%p)", (void *) die);
6501 }
6502 else
6503 fprintf (outfile, "die -> <null>");
6504 break;
6505 case dw_val_class_vms_delta:
6506 fprintf (outfile, "delta: @slotcount(%s-%s)",
6507 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6508 break;
6509 case dw_val_class_symview:
6510 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6511 break;
6512 case dw_val_class_lbl_id:
6513 case dw_val_class_lineptr:
6514 case dw_val_class_macptr:
6515 case dw_val_class_loclistsptr:
6516 case dw_val_class_high_pc:
6517 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6518 break;
6519 case dw_val_class_str:
6520 if (val->v.val_str->str != NULL)
6521 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6522 else
6523 fprintf (outfile, "<null>");
6524 break;
6525 case dw_val_class_file:
6526 case dw_val_class_file_implicit:
6527 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6528 val->v.val_file->emitted_number);
6529 break;
6530 case dw_val_class_data8:
6531 {
6532 int i;
6533
6534 for (i = 0; i < 8; i++)
6535 fprintf (outfile, "%02x", val->v.val_data8[i]);
6536 break;
6537 }
6538 case dw_val_class_discr_value:
6539 print_discr_value (outfile, &val->v.val_discr_value);
6540 break;
6541 case dw_val_class_discr_list:
6542 for (dw_discr_list_ref node = val->v.val_discr_list;
6543 node != NULL;
6544 node = node->dw_discr_next)
6545 {
6546 if (node->dw_discr_range)
6547 {
6548 fprintf (outfile, " .. ");
6549 print_discr_value (outfile, &node->dw_discr_lower_bound);
6550 print_discr_value (outfile, &node->dw_discr_upper_bound);
6551 }
6552 else
6553 print_discr_value (outfile, &node->dw_discr_lower_bound);
6554
6555 if (node->dw_discr_next != NULL)
6556 fprintf (outfile, " | ");
6557 }
6558 default:
6559 break;
6560 }
6561 }
6562
6563 /* Likewise, for a DIE attribute. */
6564
6565 static void
print_attribute(dw_attr_node * a,bool recurse,FILE * outfile)6566 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6567 {
6568 print_dw_val (&a->dw_attr_val, recurse, outfile);
6569 }
6570
6571
6572 /* Print the list of operands in the LOC location description to OUTFILE. This
6573 routine is a debugging aid only. */
6574
6575 static void
print_loc_descr(dw_loc_descr_ref loc,FILE * outfile)6576 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6577 {
6578 dw_loc_descr_ref l = loc;
6579
6580 if (loc == NULL)
6581 {
6582 print_spaces (outfile);
6583 fprintf (outfile, "<null>\n");
6584 return;
6585 }
6586
6587 for (l = loc; l != NULL; l = l->dw_loc_next)
6588 {
6589 print_spaces (outfile);
6590 fprintf (outfile, "(%p) %s",
6591 (void *) l,
6592 dwarf_stack_op_name (l->dw_loc_opc));
6593 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6594 {
6595 fprintf (outfile, " ");
6596 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6597 }
6598 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6599 {
6600 fprintf (outfile, ", ");
6601 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6602 }
6603 fprintf (outfile, "\n");
6604 }
6605 }
6606
6607 /* Print the information associated with a given DIE, and its children.
6608 This routine is a debugging aid only. */
6609
6610 static void
print_die(dw_die_ref die,FILE * outfile)6611 print_die (dw_die_ref die, FILE *outfile)
6612 {
6613 dw_attr_node *a;
6614 dw_die_ref c;
6615 unsigned ix;
6616
6617 print_spaces (outfile);
6618 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6619 die->die_offset, dwarf_tag_name (die->die_tag),
6620 (void*) die);
6621 print_spaces (outfile);
6622 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6623 fprintf (outfile, " offset: %ld", die->die_offset);
6624 fprintf (outfile, " mark: %d\n", die->die_mark);
6625
6626 if (die->comdat_type_p)
6627 {
6628 print_spaces (outfile);
6629 fprintf (outfile, " signature: ");
6630 print_signature (outfile, die->die_id.die_type_node->signature);
6631 fprintf (outfile, "\n");
6632 }
6633
6634 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6635 {
6636 print_spaces (outfile);
6637 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6638
6639 print_attribute (a, true, outfile);
6640 fprintf (outfile, "\n");
6641 }
6642
6643 if (die->die_child != NULL)
6644 {
6645 print_indent += 4;
6646 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6647 print_indent -= 4;
6648 }
6649 if (print_indent == 0)
6650 fprintf (outfile, "\n");
6651 }
6652
6653 /* Print the list of operations in the LOC location description. */
6654
6655 DEBUG_FUNCTION void
debug_dwarf_loc_descr(dw_loc_descr_ref loc)6656 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6657 {
6658 print_loc_descr (loc, stderr);
6659 }
6660
6661 /* Print the information collected for a given DIE. */
6662
6663 DEBUG_FUNCTION void
debug_dwarf_die(dw_die_ref die)6664 debug_dwarf_die (dw_die_ref die)
6665 {
6666 print_die (die, stderr);
6667 }
6668
6669 DEBUG_FUNCTION void
debug(die_struct & ref)6670 debug (die_struct &ref)
6671 {
6672 print_die (&ref, stderr);
6673 }
6674
6675 DEBUG_FUNCTION void
debug(die_struct * ptr)6676 debug (die_struct *ptr)
6677 {
6678 if (ptr)
6679 debug (*ptr);
6680 else
6681 fprintf (stderr, "<nil>\n");
6682 }
6683
6684
6685 /* Print all DWARF information collected for the compilation unit.
6686 This routine is a debugging aid only. */
6687
6688 DEBUG_FUNCTION void
debug_dwarf(void)6689 debug_dwarf (void)
6690 {
6691 print_indent = 0;
6692 print_die (comp_unit_die (), stderr);
6693 }
6694
6695 /* Verify the DIE tree structure. */
6696
6697 DEBUG_FUNCTION void
verify_die(dw_die_ref die)6698 verify_die (dw_die_ref die)
6699 {
6700 gcc_assert (!die->die_mark);
6701 if (die->die_parent == NULL
6702 && die->die_sib == NULL)
6703 return;
6704 /* Verify the die_sib list is cyclic. */
6705 dw_die_ref x = die;
6706 do
6707 {
6708 x->die_mark = 1;
6709 x = x->die_sib;
6710 }
6711 while (x && !x->die_mark);
6712 gcc_assert (x == die);
6713 x = die;
6714 do
6715 {
6716 /* Verify all dies have the same parent. */
6717 gcc_assert (x->die_parent == die->die_parent);
6718 if (x->die_child)
6719 {
6720 /* Verify the child has the proper parent and recurse. */
6721 gcc_assert (x->die_child->die_parent == x);
6722 verify_die (x->die_child);
6723 }
6724 x->die_mark = 0;
6725 x = x->die_sib;
6726 }
6727 while (x && x->die_mark);
6728 }
6729
6730 /* Sanity checks on DIEs. */
6731
6732 static void
check_die(dw_die_ref die)6733 check_die (dw_die_ref die)
6734 {
6735 unsigned ix;
6736 dw_attr_node *a;
6737 bool inline_found = false;
6738 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6739 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6740 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6741 {
6742 switch (a->dw_attr)
6743 {
6744 case DW_AT_inline:
6745 if (a->dw_attr_val.v.val_unsigned)
6746 inline_found = true;
6747 break;
6748 case DW_AT_location:
6749 ++n_location;
6750 break;
6751 case DW_AT_low_pc:
6752 ++n_low_pc;
6753 break;
6754 case DW_AT_high_pc:
6755 ++n_high_pc;
6756 break;
6757 case DW_AT_artificial:
6758 ++n_artificial;
6759 break;
6760 case DW_AT_decl_column:
6761 ++n_decl_column;
6762 break;
6763 case DW_AT_decl_line:
6764 ++n_decl_line;
6765 break;
6766 case DW_AT_decl_file:
6767 ++n_decl_file;
6768 break;
6769 default:
6770 break;
6771 }
6772 }
6773 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6774 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6775 {
6776 fprintf (stderr, "Duplicate attributes in DIE:\n");
6777 debug_dwarf_die (die);
6778 gcc_unreachable ();
6779 }
6780 if (inline_found)
6781 {
6782 /* A debugging information entry that is a member of an abstract
6783 instance tree [that has DW_AT_inline] should not contain any
6784 attributes which describe aspects of the subroutine which vary
6785 between distinct inlined expansions or distinct out-of-line
6786 expansions. */
6787 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6788 gcc_assert (a->dw_attr != DW_AT_low_pc
6789 && a->dw_attr != DW_AT_high_pc
6790 && a->dw_attr != DW_AT_location
6791 && a->dw_attr != DW_AT_frame_base
6792 && a->dw_attr != DW_AT_call_all_calls
6793 && a->dw_attr != DW_AT_GNU_all_call_sites);
6794 }
6795 }
6796
6797 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6798 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6799 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6800
6801 /* Calculate the checksum of a location expression. */
6802
6803 static inline void
loc_checksum(dw_loc_descr_ref loc,struct md5_ctx * ctx)6804 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6805 {
6806 int tem;
6807 inchash::hash hstate;
6808 hashval_t hash;
6809
6810 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6811 CHECKSUM (tem);
6812 hash_loc_operands (loc, hstate);
6813 hash = hstate.end();
6814 CHECKSUM (hash);
6815 }
6816
6817 /* Calculate the checksum of an attribute. */
6818
6819 static void
attr_checksum(dw_attr_node * at,struct md5_ctx * ctx,int * mark)6820 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6821 {
6822 dw_loc_descr_ref loc;
6823 rtx r;
6824
6825 CHECKSUM (at->dw_attr);
6826
6827 /* We don't care that this was compiled with a different compiler
6828 snapshot; if the output is the same, that's what matters. */
6829 if (at->dw_attr == DW_AT_producer)
6830 return;
6831
6832 switch (AT_class (at))
6833 {
6834 case dw_val_class_const:
6835 case dw_val_class_const_implicit:
6836 CHECKSUM (at->dw_attr_val.v.val_int);
6837 break;
6838 case dw_val_class_unsigned_const:
6839 case dw_val_class_unsigned_const_implicit:
6840 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6841 break;
6842 case dw_val_class_const_double:
6843 CHECKSUM (at->dw_attr_val.v.val_double);
6844 break;
6845 case dw_val_class_wide_int:
6846 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6847 get_full_len (*at->dw_attr_val.v.val_wide)
6848 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6849 break;
6850 case dw_val_class_vec:
6851 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6852 (at->dw_attr_val.v.val_vec.length
6853 * at->dw_attr_val.v.val_vec.elt_size));
6854 break;
6855 case dw_val_class_flag:
6856 CHECKSUM (at->dw_attr_val.v.val_flag);
6857 break;
6858 case dw_val_class_str:
6859 CHECKSUM_STRING (AT_string (at));
6860 break;
6861
6862 case dw_val_class_addr:
6863 r = AT_addr (at);
6864 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6865 CHECKSUM_STRING (XSTR (r, 0));
6866 break;
6867
6868 case dw_val_class_offset:
6869 CHECKSUM (at->dw_attr_val.v.val_offset);
6870 break;
6871
6872 case dw_val_class_loc:
6873 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6874 loc_checksum (loc, ctx);
6875 break;
6876
6877 case dw_val_class_die_ref:
6878 die_checksum (AT_ref (at), ctx, mark);
6879 break;
6880
6881 case dw_val_class_fde_ref:
6882 case dw_val_class_vms_delta:
6883 case dw_val_class_symview:
6884 case dw_val_class_lbl_id:
6885 case dw_val_class_lineptr:
6886 case dw_val_class_macptr:
6887 case dw_val_class_loclistsptr:
6888 case dw_val_class_high_pc:
6889 break;
6890
6891 case dw_val_class_file:
6892 case dw_val_class_file_implicit:
6893 CHECKSUM_STRING (AT_file (at)->filename);
6894 break;
6895
6896 case dw_val_class_data8:
6897 CHECKSUM (at->dw_attr_val.v.val_data8);
6898 break;
6899
6900 default:
6901 break;
6902 }
6903 }
6904
6905 /* Calculate the checksum of a DIE. */
6906
6907 static void
die_checksum(dw_die_ref die,struct md5_ctx * ctx,int * mark)6908 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6909 {
6910 dw_die_ref c;
6911 dw_attr_node *a;
6912 unsigned ix;
6913
6914 /* To avoid infinite recursion. */
6915 if (die->die_mark)
6916 {
6917 CHECKSUM (die->die_mark);
6918 return;
6919 }
6920 die->die_mark = ++(*mark);
6921
6922 CHECKSUM (die->die_tag);
6923
6924 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6925 attr_checksum (a, ctx, mark);
6926
6927 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6928 }
6929
6930 #undef CHECKSUM
6931 #undef CHECKSUM_BLOCK
6932 #undef CHECKSUM_STRING
6933
6934 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6935 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6936 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6937 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6938 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6939 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6940 #define CHECKSUM_ATTR(FOO) \
6941 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6942
6943 /* Calculate the checksum of a number in signed LEB128 format. */
6944
6945 static void
checksum_sleb128(HOST_WIDE_INT value,struct md5_ctx * ctx)6946 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6947 {
6948 unsigned char byte;
6949 bool more;
6950
6951 while (1)
6952 {
6953 byte = (value & 0x7f);
6954 value >>= 7;
6955 more = !((value == 0 && (byte & 0x40) == 0)
6956 || (value == -1 && (byte & 0x40) != 0));
6957 if (more)
6958 byte |= 0x80;
6959 CHECKSUM (byte);
6960 if (!more)
6961 break;
6962 }
6963 }
6964
6965 /* Calculate the checksum of a number in unsigned LEB128 format. */
6966
6967 static void
checksum_uleb128(unsigned HOST_WIDE_INT value,struct md5_ctx * ctx)6968 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6969 {
6970 while (1)
6971 {
6972 unsigned char byte = (value & 0x7f);
6973 value >>= 7;
6974 if (value != 0)
6975 /* More bytes to follow. */
6976 byte |= 0x80;
6977 CHECKSUM (byte);
6978 if (value == 0)
6979 break;
6980 }
6981 }
6982
6983 /* Checksum the context of the DIE. This adds the names of any
6984 surrounding namespaces or structures to the checksum. */
6985
6986 static void
checksum_die_context(dw_die_ref die,struct md5_ctx * ctx)6987 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6988 {
6989 const char *name;
6990 dw_die_ref spec;
6991 int tag = die->die_tag;
6992
6993 if (tag != DW_TAG_namespace
6994 && tag != DW_TAG_structure_type
6995 && tag != DW_TAG_class_type)
6996 return;
6997
6998 name = get_AT_string (die, DW_AT_name);
6999
7000 spec = get_AT_ref (die, DW_AT_specification);
7001 if (spec != NULL)
7002 die = spec;
7003
7004 if (die->die_parent != NULL)
7005 checksum_die_context (die->die_parent, ctx);
7006
7007 CHECKSUM_ULEB128 ('C');
7008 CHECKSUM_ULEB128 (tag);
7009 if (name != NULL)
7010 CHECKSUM_STRING (name);
7011 }
7012
7013 /* Calculate the checksum of a location expression. */
7014
7015 static inline void
loc_checksum_ordered(dw_loc_descr_ref loc,struct md5_ctx * ctx)7016 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7017 {
7018 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7019 were emitted as a DW_FORM_sdata instead of a location expression. */
7020 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7021 {
7022 CHECKSUM_ULEB128 (DW_FORM_sdata);
7023 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7024 return;
7025 }
7026
7027 /* Otherwise, just checksum the raw location expression. */
7028 while (loc != NULL)
7029 {
7030 inchash::hash hstate;
7031 hashval_t hash;
7032
7033 CHECKSUM_ULEB128 (loc->dtprel);
7034 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7035 hash_loc_operands (loc, hstate);
7036 hash = hstate.end ();
7037 CHECKSUM (hash);
7038 loc = loc->dw_loc_next;
7039 }
7040 }
7041
7042 /* Calculate the checksum of an attribute. */
7043
7044 static void
attr_checksum_ordered(enum dwarf_tag tag,dw_attr_node * at,struct md5_ctx * ctx,int * mark)7045 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7046 struct md5_ctx *ctx, int *mark)
7047 {
7048 dw_loc_descr_ref loc;
7049 rtx r;
7050
7051 if (AT_class (at) == dw_val_class_die_ref)
7052 {
7053 dw_die_ref target_die = AT_ref (at);
7054
7055 /* For pointer and reference types, we checksum only the (qualified)
7056 name of the target type (if there is a name). For friend entries,
7057 we checksum only the (qualified) name of the target type or function.
7058 This allows the checksum to remain the same whether the target type
7059 is complete or not. */
7060 if ((at->dw_attr == DW_AT_type
7061 && (tag == DW_TAG_pointer_type
7062 || tag == DW_TAG_reference_type
7063 || tag == DW_TAG_rvalue_reference_type
7064 || tag == DW_TAG_ptr_to_member_type))
7065 || (at->dw_attr == DW_AT_friend
7066 && tag == DW_TAG_friend))
7067 {
7068 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7069
7070 if (name_attr != NULL)
7071 {
7072 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7073
7074 if (decl == NULL)
7075 decl = target_die;
7076 CHECKSUM_ULEB128 ('N');
7077 CHECKSUM_ULEB128 (at->dw_attr);
7078 if (decl->die_parent != NULL)
7079 checksum_die_context (decl->die_parent, ctx);
7080 CHECKSUM_ULEB128 ('E');
7081 CHECKSUM_STRING (AT_string (name_attr));
7082 return;
7083 }
7084 }
7085
7086 /* For all other references to another DIE, we check to see if the
7087 target DIE has already been visited. If it has, we emit a
7088 backward reference; if not, we descend recursively. */
7089 if (target_die->die_mark > 0)
7090 {
7091 CHECKSUM_ULEB128 ('R');
7092 CHECKSUM_ULEB128 (at->dw_attr);
7093 CHECKSUM_ULEB128 (target_die->die_mark);
7094 }
7095 else
7096 {
7097 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7098
7099 if (decl == NULL)
7100 decl = target_die;
7101 target_die->die_mark = ++(*mark);
7102 CHECKSUM_ULEB128 ('T');
7103 CHECKSUM_ULEB128 (at->dw_attr);
7104 if (decl->die_parent != NULL)
7105 checksum_die_context (decl->die_parent, ctx);
7106 die_checksum_ordered (target_die, ctx, mark);
7107 }
7108 return;
7109 }
7110
7111 CHECKSUM_ULEB128 ('A');
7112 CHECKSUM_ULEB128 (at->dw_attr);
7113
7114 switch (AT_class (at))
7115 {
7116 case dw_val_class_const:
7117 case dw_val_class_const_implicit:
7118 CHECKSUM_ULEB128 (DW_FORM_sdata);
7119 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7120 break;
7121
7122 case dw_val_class_unsigned_const:
7123 case dw_val_class_unsigned_const_implicit:
7124 CHECKSUM_ULEB128 (DW_FORM_sdata);
7125 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7126 break;
7127
7128 case dw_val_class_const_double:
7129 CHECKSUM_ULEB128 (DW_FORM_block);
7130 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7131 CHECKSUM (at->dw_attr_val.v.val_double);
7132 break;
7133
7134 case dw_val_class_wide_int:
7135 CHECKSUM_ULEB128 (DW_FORM_block);
7136 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7137 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7138 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7139 get_full_len (*at->dw_attr_val.v.val_wide)
7140 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7141 break;
7142
7143 case dw_val_class_vec:
7144 CHECKSUM_ULEB128 (DW_FORM_block);
7145 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7146 * at->dw_attr_val.v.val_vec.elt_size);
7147 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7148 (at->dw_attr_val.v.val_vec.length
7149 * at->dw_attr_val.v.val_vec.elt_size));
7150 break;
7151
7152 case dw_val_class_flag:
7153 CHECKSUM_ULEB128 (DW_FORM_flag);
7154 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7155 break;
7156
7157 case dw_val_class_str:
7158 CHECKSUM_ULEB128 (DW_FORM_string);
7159 CHECKSUM_STRING (AT_string (at));
7160 break;
7161
7162 case dw_val_class_addr:
7163 r = AT_addr (at);
7164 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7165 CHECKSUM_ULEB128 (DW_FORM_string);
7166 CHECKSUM_STRING (XSTR (r, 0));
7167 break;
7168
7169 case dw_val_class_offset:
7170 CHECKSUM_ULEB128 (DW_FORM_sdata);
7171 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7172 break;
7173
7174 case dw_val_class_loc:
7175 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7176 loc_checksum_ordered (loc, ctx);
7177 break;
7178
7179 case dw_val_class_fde_ref:
7180 case dw_val_class_symview:
7181 case dw_val_class_lbl_id:
7182 case dw_val_class_lineptr:
7183 case dw_val_class_macptr:
7184 case dw_val_class_loclistsptr:
7185 case dw_val_class_high_pc:
7186 break;
7187
7188 case dw_val_class_file:
7189 case dw_val_class_file_implicit:
7190 CHECKSUM_ULEB128 (DW_FORM_string);
7191 CHECKSUM_STRING (AT_file (at)->filename);
7192 break;
7193
7194 case dw_val_class_data8:
7195 CHECKSUM (at->dw_attr_val.v.val_data8);
7196 break;
7197
7198 default:
7199 break;
7200 }
7201 }
7202
7203 struct checksum_attributes
7204 {
7205 dw_attr_node *at_name;
7206 dw_attr_node *at_type;
7207 dw_attr_node *at_friend;
7208 dw_attr_node *at_accessibility;
7209 dw_attr_node *at_address_class;
7210 dw_attr_node *at_alignment;
7211 dw_attr_node *at_allocated;
7212 dw_attr_node *at_artificial;
7213 dw_attr_node *at_associated;
7214 dw_attr_node *at_binary_scale;
7215 dw_attr_node *at_bit_offset;
7216 dw_attr_node *at_bit_size;
7217 dw_attr_node *at_bit_stride;
7218 dw_attr_node *at_byte_size;
7219 dw_attr_node *at_byte_stride;
7220 dw_attr_node *at_const_value;
7221 dw_attr_node *at_containing_type;
7222 dw_attr_node *at_count;
7223 dw_attr_node *at_data_location;
7224 dw_attr_node *at_data_member_location;
7225 dw_attr_node *at_decimal_scale;
7226 dw_attr_node *at_decimal_sign;
7227 dw_attr_node *at_default_value;
7228 dw_attr_node *at_digit_count;
7229 dw_attr_node *at_discr;
7230 dw_attr_node *at_discr_list;
7231 dw_attr_node *at_discr_value;
7232 dw_attr_node *at_encoding;
7233 dw_attr_node *at_endianity;
7234 dw_attr_node *at_explicit;
7235 dw_attr_node *at_is_optional;
7236 dw_attr_node *at_location;
7237 dw_attr_node *at_lower_bound;
7238 dw_attr_node *at_mutable;
7239 dw_attr_node *at_ordering;
7240 dw_attr_node *at_picture_string;
7241 dw_attr_node *at_prototyped;
7242 dw_attr_node *at_small;
7243 dw_attr_node *at_segment;
7244 dw_attr_node *at_string_length;
7245 dw_attr_node *at_string_length_bit_size;
7246 dw_attr_node *at_string_length_byte_size;
7247 dw_attr_node *at_threads_scaled;
7248 dw_attr_node *at_upper_bound;
7249 dw_attr_node *at_use_location;
7250 dw_attr_node *at_use_UTF8;
7251 dw_attr_node *at_variable_parameter;
7252 dw_attr_node *at_virtuality;
7253 dw_attr_node *at_visibility;
7254 dw_attr_node *at_vtable_elem_location;
7255 };
7256
7257 /* Collect the attributes that we will want to use for the checksum. */
7258
7259 static void
collect_checksum_attributes(struct checksum_attributes * attrs,dw_die_ref die)7260 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7261 {
7262 dw_attr_node *a;
7263 unsigned ix;
7264
7265 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7266 {
7267 switch (a->dw_attr)
7268 {
7269 case DW_AT_name:
7270 attrs->at_name = a;
7271 break;
7272 case DW_AT_type:
7273 attrs->at_type = a;
7274 break;
7275 case DW_AT_friend:
7276 attrs->at_friend = a;
7277 break;
7278 case DW_AT_accessibility:
7279 attrs->at_accessibility = a;
7280 break;
7281 case DW_AT_address_class:
7282 attrs->at_address_class = a;
7283 break;
7284 case DW_AT_alignment:
7285 attrs->at_alignment = a;
7286 break;
7287 case DW_AT_allocated:
7288 attrs->at_allocated = a;
7289 break;
7290 case DW_AT_artificial:
7291 attrs->at_artificial = a;
7292 break;
7293 case DW_AT_associated:
7294 attrs->at_associated = a;
7295 break;
7296 case DW_AT_binary_scale:
7297 attrs->at_binary_scale = a;
7298 break;
7299 case DW_AT_bit_offset:
7300 attrs->at_bit_offset = a;
7301 break;
7302 case DW_AT_bit_size:
7303 attrs->at_bit_size = a;
7304 break;
7305 case DW_AT_bit_stride:
7306 attrs->at_bit_stride = a;
7307 break;
7308 case DW_AT_byte_size:
7309 attrs->at_byte_size = a;
7310 break;
7311 case DW_AT_byte_stride:
7312 attrs->at_byte_stride = a;
7313 break;
7314 case DW_AT_const_value:
7315 attrs->at_const_value = a;
7316 break;
7317 case DW_AT_containing_type:
7318 attrs->at_containing_type = a;
7319 break;
7320 case DW_AT_count:
7321 attrs->at_count = a;
7322 break;
7323 case DW_AT_data_location:
7324 attrs->at_data_location = a;
7325 break;
7326 case DW_AT_data_member_location:
7327 attrs->at_data_member_location = a;
7328 break;
7329 case DW_AT_decimal_scale:
7330 attrs->at_decimal_scale = a;
7331 break;
7332 case DW_AT_decimal_sign:
7333 attrs->at_decimal_sign = a;
7334 break;
7335 case DW_AT_default_value:
7336 attrs->at_default_value = a;
7337 break;
7338 case DW_AT_digit_count:
7339 attrs->at_digit_count = a;
7340 break;
7341 case DW_AT_discr:
7342 attrs->at_discr = a;
7343 break;
7344 case DW_AT_discr_list:
7345 attrs->at_discr_list = a;
7346 break;
7347 case DW_AT_discr_value:
7348 attrs->at_discr_value = a;
7349 break;
7350 case DW_AT_encoding:
7351 attrs->at_encoding = a;
7352 break;
7353 case DW_AT_endianity:
7354 attrs->at_endianity = a;
7355 break;
7356 case DW_AT_explicit:
7357 attrs->at_explicit = a;
7358 break;
7359 case DW_AT_is_optional:
7360 attrs->at_is_optional = a;
7361 break;
7362 case DW_AT_location:
7363 attrs->at_location = a;
7364 break;
7365 case DW_AT_lower_bound:
7366 attrs->at_lower_bound = a;
7367 break;
7368 case DW_AT_mutable:
7369 attrs->at_mutable = a;
7370 break;
7371 case DW_AT_ordering:
7372 attrs->at_ordering = a;
7373 break;
7374 case DW_AT_picture_string:
7375 attrs->at_picture_string = a;
7376 break;
7377 case DW_AT_prototyped:
7378 attrs->at_prototyped = a;
7379 break;
7380 case DW_AT_small:
7381 attrs->at_small = a;
7382 break;
7383 case DW_AT_segment:
7384 attrs->at_segment = a;
7385 break;
7386 case DW_AT_string_length:
7387 attrs->at_string_length = a;
7388 break;
7389 case DW_AT_string_length_bit_size:
7390 attrs->at_string_length_bit_size = a;
7391 break;
7392 case DW_AT_string_length_byte_size:
7393 attrs->at_string_length_byte_size = a;
7394 break;
7395 case DW_AT_threads_scaled:
7396 attrs->at_threads_scaled = a;
7397 break;
7398 case DW_AT_upper_bound:
7399 attrs->at_upper_bound = a;
7400 break;
7401 case DW_AT_use_location:
7402 attrs->at_use_location = a;
7403 break;
7404 case DW_AT_use_UTF8:
7405 attrs->at_use_UTF8 = a;
7406 break;
7407 case DW_AT_variable_parameter:
7408 attrs->at_variable_parameter = a;
7409 break;
7410 case DW_AT_virtuality:
7411 attrs->at_virtuality = a;
7412 break;
7413 case DW_AT_visibility:
7414 attrs->at_visibility = a;
7415 break;
7416 case DW_AT_vtable_elem_location:
7417 attrs->at_vtable_elem_location = a;
7418 break;
7419 default:
7420 break;
7421 }
7422 }
7423 }
7424
7425 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7426
7427 static void
die_checksum_ordered(dw_die_ref die,struct md5_ctx * ctx,int * mark)7428 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7429 {
7430 dw_die_ref c;
7431 dw_die_ref decl;
7432 struct checksum_attributes attrs;
7433
7434 CHECKSUM_ULEB128 ('D');
7435 CHECKSUM_ULEB128 (die->die_tag);
7436
7437 memset (&attrs, 0, sizeof (attrs));
7438
7439 decl = get_AT_ref (die, DW_AT_specification);
7440 if (decl != NULL)
7441 collect_checksum_attributes (&attrs, decl);
7442 collect_checksum_attributes (&attrs, die);
7443
7444 CHECKSUM_ATTR (attrs.at_name);
7445 CHECKSUM_ATTR (attrs.at_accessibility);
7446 CHECKSUM_ATTR (attrs.at_address_class);
7447 CHECKSUM_ATTR (attrs.at_allocated);
7448 CHECKSUM_ATTR (attrs.at_artificial);
7449 CHECKSUM_ATTR (attrs.at_associated);
7450 CHECKSUM_ATTR (attrs.at_binary_scale);
7451 CHECKSUM_ATTR (attrs.at_bit_offset);
7452 CHECKSUM_ATTR (attrs.at_bit_size);
7453 CHECKSUM_ATTR (attrs.at_bit_stride);
7454 CHECKSUM_ATTR (attrs.at_byte_size);
7455 CHECKSUM_ATTR (attrs.at_byte_stride);
7456 CHECKSUM_ATTR (attrs.at_const_value);
7457 CHECKSUM_ATTR (attrs.at_containing_type);
7458 CHECKSUM_ATTR (attrs.at_count);
7459 CHECKSUM_ATTR (attrs.at_data_location);
7460 CHECKSUM_ATTR (attrs.at_data_member_location);
7461 CHECKSUM_ATTR (attrs.at_decimal_scale);
7462 CHECKSUM_ATTR (attrs.at_decimal_sign);
7463 CHECKSUM_ATTR (attrs.at_default_value);
7464 CHECKSUM_ATTR (attrs.at_digit_count);
7465 CHECKSUM_ATTR (attrs.at_discr);
7466 CHECKSUM_ATTR (attrs.at_discr_list);
7467 CHECKSUM_ATTR (attrs.at_discr_value);
7468 CHECKSUM_ATTR (attrs.at_encoding);
7469 CHECKSUM_ATTR (attrs.at_endianity);
7470 CHECKSUM_ATTR (attrs.at_explicit);
7471 CHECKSUM_ATTR (attrs.at_is_optional);
7472 CHECKSUM_ATTR (attrs.at_location);
7473 CHECKSUM_ATTR (attrs.at_lower_bound);
7474 CHECKSUM_ATTR (attrs.at_mutable);
7475 CHECKSUM_ATTR (attrs.at_ordering);
7476 CHECKSUM_ATTR (attrs.at_picture_string);
7477 CHECKSUM_ATTR (attrs.at_prototyped);
7478 CHECKSUM_ATTR (attrs.at_small);
7479 CHECKSUM_ATTR (attrs.at_segment);
7480 CHECKSUM_ATTR (attrs.at_string_length);
7481 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7482 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7483 CHECKSUM_ATTR (attrs.at_threads_scaled);
7484 CHECKSUM_ATTR (attrs.at_upper_bound);
7485 CHECKSUM_ATTR (attrs.at_use_location);
7486 CHECKSUM_ATTR (attrs.at_use_UTF8);
7487 CHECKSUM_ATTR (attrs.at_variable_parameter);
7488 CHECKSUM_ATTR (attrs.at_virtuality);
7489 CHECKSUM_ATTR (attrs.at_visibility);
7490 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7491 CHECKSUM_ATTR (attrs.at_type);
7492 CHECKSUM_ATTR (attrs.at_friend);
7493 CHECKSUM_ATTR (attrs.at_alignment);
7494
7495 /* Checksum the child DIEs. */
7496 c = die->die_child;
7497 if (c) do {
7498 dw_attr_node *name_attr;
7499
7500 c = c->die_sib;
7501 name_attr = get_AT (c, DW_AT_name);
7502 if (is_template_instantiation (c))
7503 {
7504 /* Ignore instantiations of member type and function templates. */
7505 }
7506 else if (name_attr != NULL
7507 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7508 {
7509 /* Use a shallow checksum for named nested types and member
7510 functions. */
7511 CHECKSUM_ULEB128 ('S');
7512 CHECKSUM_ULEB128 (c->die_tag);
7513 CHECKSUM_STRING (AT_string (name_attr));
7514 }
7515 else
7516 {
7517 /* Use a deep checksum for other children. */
7518 /* Mark this DIE so it gets processed when unmarking. */
7519 if (c->die_mark == 0)
7520 c->die_mark = -1;
7521 die_checksum_ordered (c, ctx, mark);
7522 }
7523 } while (c != die->die_child);
7524
7525 CHECKSUM_ULEB128 (0);
7526 }
7527
7528 /* Add a type name and tag to a hash. */
7529 static void
die_odr_checksum(int tag,const char * name,md5_ctx * ctx)7530 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7531 {
7532 CHECKSUM_ULEB128 (tag);
7533 CHECKSUM_STRING (name);
7534 }
7535
7536 #undef CHECKSUM
7537 #undef CHECKSUM_STRING
7538 #undef CHECKSUM_ATTR
7539 #undef CHECKSUM_LEB128
7540 #undef CHECKSUM_ULEB128
7541
7542 /* Generate the type signature for DIE. This is computed by generating an
7543 MD5 checksum over the DIE's tag, its relevant attributes, and its
7544 children. Attributes that are references to other DIEs are processed
7545 by recursion, using the MARK field to prevent infinite recursion.
7546 If the DIE is nested inside a namespace or another type, we also
7547 need to include that context in the signature. The lower 64 bits
7548 of the resulting MD5 checksum comprise the signature. */
7549
7550 static void
generate_type_signature(dw_die_ref die,comdat_type_node * type_node)7551 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7552 {
7553 int mark;
7554 const char *name;
7555 unsigned char checksum[16];
7556 struct md5_ctx ctx;
7557 dw_die_ref decl;
7558 dw_die_ref parent;
7559
7560 name = get_AT_string (die, DW_AT_name);
7561 decl = get_AT_ref (die, DW_AT_specification);
7562 parent = get_die_parent (die);
7563
7564 /* First, compute a signature for just the type name (and its surrounding
7565 context, if any. This is stored in the type unit DIE for link-time
7566 ODR (one-definition rule) checking. */
7567
7568 if (is_cxx () && name != NULL)
7569 {
7570 md5_init_ctx (&ctx);
7571
7572 /* Checksum the names of surrounding namespaces and structures. */
7573 if (parent != NULL)
7574 checksum_die_context (parent, &ctx);
7575
7576 /* Checksum the current DIE. */
7577 die_odr_checksum (die->die_tag, name, &ctx);
7578 md5_finish_ctx (&ctx, checksum);
7579
7580 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7581 }
7582
7583 /* Next, compute the complete type signature. */
7584
7585 md5_init_ctx (&ctx);
7586 mark = 1;
7587 die->die_mark = mark;
7588
7589 /* Checksum the names of surrounding namespaces and structures. */
7590 if (parent != NULL)
7591 checksum_die_context (parent, &ctx);
7592
7593 /* Checksum the DIE and its children. */
7594 die_checksum_ordered (die, &ctx, &mark);
7595 unmark_all_dies (die);
7596 md5_finish_ctx (&ctx, checksum);
7597
7598 /* Store the signature in the type node and link the type DIE and the
7599 type node together. */
7600 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7601 DWARF_TYPE_SIGNATURE_SIZE);
7602 die->comdat_type_p = true;
7603 die->die_id.die_type_node = type_node;
7604 type_node->type_die = die;
7605
7606 /* If the DIE is a specification, link its declaration to the type node
7607 as well. */
7608 if (decl != NULL)
7609 {
7610 decl->comdat_type_p = true;
7611 decl->die_id.die_type_node = type_node;
7612 }
7613 }
7614
7615 /* Do the location expressions look same? */
7616 static inline int
same_loc_p(dw_loc_descr_ref loc1,dw_loc_descr_ref loc2,int * mark)7617 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7618 {
7619 return loc1->dw_loc_opc == loc2->dw_loc_opc
7620 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7621 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7622 }
7623
7624 /* Do the values look the same? */
7625 static int
same_dw_val_p(const dw_val_node * v1,const dw_val_node * v2,int * mark)7626 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7627 {
7628 dw_loc_descr_ref loc1, loc2;
7629 rtx r1, r2;
7630
7631 if (v1->val_class != v2->val_class)
7632 return 0;
7633
7634 switch (v1->val_class)
7635 {
7636 case dw_val_class_const:
7637 case dw_val_class_const_implicit:
7638 return v1->v.val_int == v2->v.val_int;
7639 case dw_val_class_unsigned_const:
7640 case dw_val_class_unsigned_const_implicit:
7641 return v1->v.val_unsigned == v2->v.val_unsigned;
7642 case dw_val_class_const_double:
7643 return v1->v.val_double.high == v2->v.val_double.high
7644 && v1->v.val_double.low == v2->v.val_double.low;
7645 case dw_val_class_wide_int:
7646 return *v1->v.val_wide == *v2->v.val_wide;
7647 case dw_val_class_vec:
7648 if (v1->v.val_vec.length != v2->v.val_vec.length
7649 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7650 return 0;
7651 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7652 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7653 return 0;
7654 return 1;
7655 case dw_val_class_flag:
7656 return v1->v.val_flag == v2->v.val_flag;
7657 case dw_val_class_str:
7658 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7659
7660 case dw_val_class_addr:
7661 r1 = v1->v.val_addr;
7662 r2 = v2->v.val_addr;
7663 if (GET_CODE (r1) != GET_CODE (r2))
7664 return 0;
7665 return !rtx_equal_p (r1, r2);
7666
7667 case dw_val_class_offset:
7668 return v1->v.val_offset == v2->v.val_offset;
7669
7670 case dw_val_class_loc:
7671 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7672 loc1 && loc2;
7673 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7674 if (!same_loc_p (loc1, loc2, mark))
7675 return 0;
7676 return !loc1 && !loc2;
7677
7678 case dw_val_class_die_ref:
7679 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7680
7681 case dw_val_class_symview:
7682 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7683
7684 case dw_val_class_fde_ref:
7685 case dw_val_class_vms_delta:
7686 case dw_val_class_lbl_id:
7687 case dw_val_class_lineptr:
7688 case dw_val_class_macptr:
7689 case dw_val_class_loclistsptr:
7690 case dw_val_class_high_pc:
7691 return 1;
7692
7693 case dw_val_class_file:
7694 case dw_val_class_file_implicit:
7695 return v1->v.val_file == v2->v.val_file;
7696
7697 case dw_val_class_data8:
7698 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7699
7700 default:
7701 return 1;
7702 }
7703 }
7704
7705 /* Do the attributes look the same? */
7706
7707 static int
same_attr_p(dw_attr_node * at1,dw_attr_node * at2,int * mark)7708 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7709 {
7710 if (at1->dw_attr != at2->dw_attr)
7711 return 0;
7712
7713 /* We don't care that this was compiled with a different compiler
7714 snapshot; if the output is the same, that's what matters. */
7715 if (at1->dw_attr == DW_AT_producer)
7716 return 1;
7717
7718 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7719 }
7720
7721 /* Do the dies look the same? */
7722
7723 static int
same_die_p(dw_die_ref die1,dw_die_ref die2,int * mark)7724 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7725 {
7726 dw_die_ref c1, c2;
7727 dw_attr_node *a1;
7728 unsigned ix;
7729
7730 /* To avoid infinite recursion. */
7731 if (die1->die_mark)
7732 return die1->die_mark == die2->die_mark;
7733 die1->die_mark = die2->die_mark = ++(*mark);
7734
7735 if (die1->die_tag != die2->die_tag)
7736 return 0;
7737
7738 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7739 return 0;
7740
7741 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7742 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7743 return 0;
7744
7745 c1 = die1->die_child;
7746 c2 = die2->die_child;
7747 if (! c1)
7748 {
7749 if (c2)
7750 return 0;
7751 }
7752 else
7753 for (;;)
7754 {
7755 if (!same_die_p (c1, c2, mark))
7756 return 0;
7757 c1 = c1->die_sib;
7758 c2 = c2->die_sib;
7759 if (c1 == die1->die_child)
7760 {
7761 if (c2 == die2->die_child)
7762 break;
7763 else
7764 return 0;
7765 }
7766 }
7767
7768 return 1;
7769 }
7770
7771 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7772 children, and set die_symbol. */
7773
7774 static void
compute_comp_unit_symbol(dw_die_ref unit_die)7775 compute_comp_unit_symbol (dw_die_ref unit_die)
7776 {
7777 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7778 const char *base = die_name ? lbasename (die_name) : "anonymous";
7779 char *name = XALLOCAVEC (char, strlen (base) + 64);
7780 char *p;
7781 int i, mark;
7782 unsigned char checksum[16];
7783 struct md5_ctx ctx;
7784
7785 /* Compute the checksum of the DIE, then append part of it as hex digits to
7786 the name filename of the unit. */
7787
7788 md5_init_ctx (&ctx);
7789 mark = 0;
7790 die_checksum (unit_die, &ctx, &mark);
7791 unmark_all_dies (unit_die);
7792 md5_finish_ctx (&ctx, checksum);
7793
7794 /* When we this for comp_unit_die () we have a DW_AT_name that might
7795 not start with a letter but with anything valid for filenames and
7796 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7797 character is not a letter. */
7798 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7799 clean_symbol_name (name);
7800
7801 p = name + strlen (name);
7802 for (i = 0; i < 4; i++)
7803 {
7804 sprintf (p, "%.2x", checksum[i]);
7805 p += 2;
7806 }
7807
7808 unit_die->die_id.die_symbol = xstrdup (name);
7809 }
7810
7811 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7812
7813 static int
is_type_die(dw_die_ref die)7814 is_type_die (dw_die_ref die)
7815 {
7816 switch (die->die_tag)
7817 {
7818 case DW_TAG_array_type:
7819 case DW_TAG_class_type:
7820 case DW_TAG_interface_type:
7821 case DW_TAG_enumeration_type:
7822 case DW_TAG_pointer_type:
7823 case DW_TAG_reference_type:
7824 case DW_TAG_rvalue_reference_type:
7825 case DW_TAG_string_type:
7826 case DW_TAG_structure_type:
7827 case DW_TAG_subroutine_type:
7828 case DW_TAG_union_type:
7829 case DW_TAG_ptr_to_member_type:
7830 case DW_TAG_set_type:
7831 case DW_TAG_subrange_type:
7832 case DW_TAG_base_type:
7833 case DW_TAG_const_type:
7834 case DW_TAG_file_type:
7835 case DW_TAG_packed_type:
7836 case DW_TAG_volatile_type:
7837 case DW_TAG_typedef:
7838 return 1;
7839 default:
7840 return 0;
7841 }
7842 }
7843
7844 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7845 Basically, we want to choose the bits that are likely to be shared between
7846 compilations (types) and leave out the bits that are specific to individual
7847 compilations (functions). */
7848
7849 static int
is_comdat_die(dw_die_ref c)7850 is_comdat_die (dw_die_ref c)
7851 {
7852 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7853 we do for stabs. The advantage is a greater likelihood of sharing between
7854 objects that don't include headers in the same order (and therefore would
7855 put the base types in a different comdat). jason 8/28/00 */
7856
7857 if (c->die_tag == DW_TAG_base_type)
7858 return 0;
7859
7860 if (c->die_tag == DW_TAG_pointer_type
7861 || c->die_tag == DW_TAG_reference_type
7862 || c->die_tag == DW_TAG_rvalue_reference_type
7863 || c->die_tag == DW_TAG_const_type
7864 || c->die_tag == DW_TAG_volatile_type)
7865 {
7866 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7867
7868 return t ? is_comdat_die (t) : 0;
7869 }
7870
7871 return is_type_die (c);
7872 }
7873
7874 /* Returns true iff C is a compile-unit DIE. */
7875
7876 static inline bool
is_cu_die(dw_die_ref c)7877 is_cu_die (dw_die_ref c)
7878 {
7879 return c && (c->die_tag == DW_TAG_compile_unit
7880 || c->die_tag == DW_TAG_skeleton_unit);
7881 }
7882
7883 /* Returns true iff C is a unit DIE of some sort. */
7884
7885 static inline bool
is_unit_die(dw_die_ref c)7886 is_unit_die (dw_die_ref c)
7887 {
7888 return c && (c->die_tag == DW_TAG_compile_unit
7889 || c->die_tag == DW_TAG_partial_unit
7890 || c->die_tag == DW_TAG_type_unit
7891 || c->die_tag == DW_TAG_skeleton_unit);
7892 }
7893
7894 /* Returns true iff C is a namespace DIE. */
7895
7896 static inline bool
is_namespace_die(dw_die_ref c)7897 is_namespace_die (dw_die_ref c)
7898 {
7899 return c && c->die_tag == DW_TAG_namespace;
7900 }
7901
7902 /* Returns true iff C is a class or structure DIE. */
7903
7904 static inline bool
is_class_die(dw_die_ref c)7905 is_class_die (dw_die_ref c)
7906 {
7907 return c && (c->die_tag == DW_TAG_class_type
7908 || c->die_tag == DW_TAG_structure_type);
7909 }
7910
7911 /* Return non-zero if this DIE is a template parameter. */
7912
7913 static inline bool
is_template_parameter(dw_die_ref die)7914 is_template_parameter (dw_die_ref die)
7915 {
7916 switch (die->die_tag)
7917 {
7918 case DW_TAG_template_type_param:
7919 case DW_TAG_template_value_param:
7920 case DW_TAG_GNU_template_template_param:
7921 case DW_TAG_GNU_template_parameter_pack:
7922 return true;
7923 default:
7924 return false;
7925 }
7926 }
7927
7928 /* Return non-zero if this DIE represents a template instantiation. */
7929
7930 static inline bool
is_template_instantiation(dw_die_ref die)7931 is_template_instantiation (dw_die_ref die)
7932 {
7933 dw_die_ref c;
7934
7935 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7936 return false;
7937 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7938 return false;
7939 }
7940
7941 static char *
gen_internal_sym(const char * prefix)7942 gen_internal_sym (const char *prefix)
7943 {
7944 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7945
7946 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7947 return xstrdup (buf);
7948 }
7949
7950 /* Return non-zero if this DIE is a declaration. */
7951
7952 static int
is_declaration_die(dw_die_ref die)7953 is_declaration_die (dw_die_ref die)
7954 {
7955 dw_attr_node *a;
7956 unsigned ix;
7957
7958 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7959 if (a->dw_attr == DW_AT_declaration)
7960 return 1;
7961
7962 return 0;
7963 }
7964
7965 /* Return non-zero if this DIE is nested inside a subprogram. */
7966
7967 static int
is_nested_in_subprogram(dw_die_ref die)7968 is_nested_in_subprogram (dw_die_ref die)
7969 {
7970 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7971
7972 if (decl == NULL)
7973 decl = die;
7974 return local_scope_p (decl);
7975 }
7976
7977 /* Return non-zero if this DIE contains a defining declaration of a
7978 subprogram. */
7979
7980 static int
contains_subprogram_definition(dw_die_ref die)7981 contains_subprogram_definition (dw_die_ref die)
7982 {
7983 dw_die_ref c;
7984
7985 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7986 return 1;
7987 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7988 return 0;
7989 }
7990
7991 /* Return non-zero if this is a type DIE that should be moved to a
7992 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7993 unit type. */
7994
7995 static int
should_move_die_to_comdat(dw_die_ref die)7996 should_move_die_to_comdat (dw_die_ref die)
7997 {
7998 switch (die->die_tag)
7999 {
8000 case DW_TAG_class_type:
8001 case DW_TAG_structure_type:
8002 case DW_TAG_enumeration_type:
8003 case DW_TAG_union_type:
8004 /* Don't move declarations, inlined instances, types nested in a
8005 subprogram, or types that contain subprogram definitions. */
8006 if (is_declaration_die (die)
8007 || get_AT (die, DW_AT_abstract_origin)
8008 || is_nested_in_subprogram (die)
8009 || contains_subprogram_definition (die))
8010 return 0;
8011 return 1;
8012 case DW_TAG_array_type:
8013 case DW_TAG_interface_type:
8014 case DW_TAG_pointer_type:
8015 case DW_TAG_reference_type:
8016 case DW_TAG_rvalue_reference_type:
8017 case DW_TAG_string_type:
8018 case DW_TAG_subroutine_type:
8019 case DW_TAG_ptr_to_member_type:
8020 case DW_TAG_set_type:
8021 case DW_TAG_subrange_type:
8022 case DW_TAG_base_type:
8023 case DW_TAG_const_type:
8024 case DW_TAG_file_type:
8025 case DW_TAG_packed_type:
8026 case DW_TAG_volatile_type:
8027 case DW_TAG_typedef:
8028 default:
8029 return 0;
8030 }
8031 }
8032
8033 /* Make a clone of DIE. */
8034
8035 static dw_die_ref
clone_die(dw_die_ref die)8036 clone_die (dw_die_ref die)
8037 {
8038 dw_die_ref clone = new_die_raw (die->die_tag);
8039 dw_attr_node *a;
8040 unsigned ix;
8041
8042 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8043 add_dwarf_attr (clone, a);
8044
8045 return clone;
8046 }
8047
8048 /* Make a clone of the tree rooted at DIE. */
8049
8050 static dw_die_ref
clone_tree(dw_die_ref die)8051 clone_tree (dw_die_ref die)
8052 {
8053 dw_die_ref c;
8054 dw_die_ref clone = clone_die (die);
8055
8056 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8057
8058 return clone;
8059 }
8060
8061 /* Make a clone of DIE as a declaration. */
8062
8063 static dw_die_ref
clone_as_declaration(dw_die_ref die)8064 clone_as_declaration (dw_die_ref die)
8065 {
8066 dw_die_ref clone;
8067 dw_die_ref decl;
8068 dw_attr_node *a;
8069 unsigned ix;
8070
8071 /* If the DIE is already a declaration, just clone it. */
8072 if (is_declaration_die (die))
8073 return clone_die (die);
8074
8075 /* If the DIE is a specification, just clone its declaration DIE. */
8076 decl = get_AT_ref (die, DW_AT_specification);
8077 if (decl != NULL)
8078 {
8079 clone = clone_die (decl);
8080 if (die->comdat_type_p)
8081 add_AT_die_ref (clone, DW_AT_signature, die);
8082 return clone;
8083 }
8084
8085 clone = new_die_raw (die->die_tag);
8086
8087 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8088 {
8089 /* We don't want to copy over all attributes.
8090 For example we don't want DW_AT_byte_size because otherwise we will no
8091 longer have a declaration and GDB will treat it as a definition. */
8092
8093 switch (a->dw_attr)
8094 {
8095 case DW_AT_abstract_origin:
8096 case DW_AT_artificial:
8097 case DW_AT_containing_type:
8098 case DW_AT_external:
8099 case DW_AT_name:
8100 case DW_AT_type:
8101 case DW_AT_virtuality:
8102 case DW_AT_linkage_name:
8103 case DW_AT_MIPS_linkage_name:
8104 add_dwarf_attr (clone, a);
8105 break;
8106 case DW_AT_byte_size:
8107 case DW_AT_alignment:
8108 default:
8109 break;
8110 }
8111 }
8112
8113 if (die->comdat_type_p)
8114 add_AT_die_ref (clone, DW_AT_signature, die);
8115
8116 add_AT_flag (clone, DW_AT_declaration, 1);
8117 return clone;
8118 }
8119
8120
8121 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8122
8123 struct decl_table_entry
8124 {
8125 dw_die_ref orig;
8126 dw_die_ref copy;
8127 };
8128
8129 /* Helpers to manipulate hash table of copied declarations. */
8130
8131 /* Hashtable helpers. */
8132
8133 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8134 {
8135 typedef die_struct *compare_type;
8136 static inline hashval_t hash (const decl_table_entry *);
8137 static inline bool equal (const decl_table_entry *, const die_struct *);
8138 };
8139
8140 inline hashval_t
hash(const decl_table_entry * entry)8141 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8142 {
8143 return htab_hash_pointer (entry->orig);
8144 }
8145
8146 inline bool
equal(const decl_table_entry * entry1,const die_struct * entry2)8147 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8148 const die_struct *entry2)
8149 {
8150 return entry1->orig == entry2;
8151 }
8152
8153 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8154
8155 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8156 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8157 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8158 to check if the ancestor has already been copied into UNIT. */
8159
8160 static dw_die_ref
copy_ancestor_tree(dw_die_ref unit,dw_die_ref die,decl_hash_type * decl_table)8161 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8162 decl_hash_type *decl_table)
8163 {
8164 dw_die_ref parent = die->die_parent;
8165 dw_die_ref new_parent = unit;
8166 dw_die_ref copy;
8167 decl_table_entry **slot = NULL;
8168 struct decl_table_entry *entry = NULL;
8169
8170 /* If DIE refers to a stub unfold that so we get the appropriate
8171 DIE registered as orig in decl_table. */
8172 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8173 die = c;
8174
8175 if (decl_table)
8176 {
8177 /* Check if the entry has already been copied to UNIT. */
8178 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8179 INSERT);
8180 if (*slot != HTAB_EMPTY_ENTRY)
8181 {
8182 entry = *slot;
8183 return entry->copy;
8184 }
8185
8186 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8187 entry = XCNEW (struct decl_table_entry);
8188 entry->orig = die;
8189 entry->copy = NULL;
8190 *slot = entry;
8191 }
8192
8193 if (parent != NULL)
8194 {
8195 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8196 if (spec != NULL)
8197 parent = spec;
8198 if (!is_unit_die (parent))
8199 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8200 }
8201
8202 copy = clone_as_declaration (die);
8203 add_child_die (new_parent, copy);
8204
8205 if (decl_table)
8206 {
8207 /* Record the pointer to the copy. */
8208 entry->copy = copy;
8209 }
8210
8211 return copy;
8212 }
8213 /* Copy the declaration context to the new type unit DIE. This includes
8214 any surrounding namespace or type declarations. If the DIE has an
8215 AT_specification attribute, it also includes attributes and children
8216 attached to the specification, and returns a pointer to the original
8217 parent of the declaration DIE. Returns NULL otherwise. */
8218
8219 static dw_die_ref
copy_declaration_context(dw_die_ref unit,dw_die_ref die)8220 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8221 {
8222 dw_die_ref decl;
8223 dw_die_ref new_decl;
8224 dw_die_ref orig_parent = NULL;
8225
8226 decl = get_AT_ref (die, DW_AT_specification);
8227 if (decl == NULL)
8228 decl = die;
8229 else
8230 {
8231 unsigned ix;
8232 dw_die_ref c;
8233 dw_attr_node *a;
8234
8235 /* The original DIE will be changed to a declaration, and must
8236 be moved to be a child of the original declaration DIE. */
8237 orig_parent = decl->die_parent;
8238
8239 /* Copy the type node pointer from the new DIE to the original
8240 declaration DIE so we can forward references later. */
8241 decl->comdat_type_p = true;
8242 decl->die_id.die_type_node = die->die_id.die_type_node;
8243
8244 remove_AT (die, DW_AT_specification);
8245
8246 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8247 {
8248 if (a->dw_attr != DW_AT_name
8249 && a->dw_attr != DW_AT_declaration
8250 && a->dw_attr != DW_AT_external)
8251 add_dwarf_attr (die, a);
8252 }
8253
8254 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8255 }
8256
8257 if (decl->die_parent != NULL
8258 && !is_unit_die (decl->die_parent))
8259 {
8260 new_decl = copy_ancestor_tree (unit, decl, NULL);
8261 if (new_decl != NULL)
8262 {
8263 remove_AT (new_decl, DW_AT_signature);
8264 add_AT_specification (die, new_decl);
8265 }
8266 }
8267
8268 return orig_parent;
8269 }
8270
8271 /* Generate the skeleton ancestor tree for the given NODE, then clone
8272 the DIE and add the clone into the tree. */
8273
8274 static void
generate_skeleton_ancestor_tree(skeleton_chain_node * node)8275 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8276 {
8277 if (node->new_die != NULL)
8278 return;
8279
8280 node->new_die = clone_as_declaration (node->old_die);
8281
8282 if (node->parent != NULL)
8283 {
8284 generate_skeleton_ancestor_tree (node->parent);
8285 add_child_die (node->parent->new_die, node->new_die);
8286 }
8287 }
8288
8289 /* Generate a skeleton tree of DIEs containing any declarations that are
8290 found in the original tree. We traverse the tree looking for declaration
8291 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8292
8293 static void
generate_skeleton_bottom_up(skeleton_chain_node * parent)8294 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8295 {
8296 skeleton_chain_node node;
8297 dw_die_ref c;
8298 dw_die_ref first;
8299 dw_die_ref prev = NULL;
8300 dw_die_ref next = NULL;
8301
8302 node.parent = parent;
8303
8304 first = c = parent->old_die->die_child;
8305 if (c)
8306 next = c->die_sib;
8307 if (c) do {
8308 if (prev == NULL || prev->die_sib == c)
8309 prev = c;
8310 c = next;
8311 next = (c == first ? NULL : c->die_sib);
8312 node.old_die = c;
8313 node.new_die = NULL;
8314 if (is_declaration_die (c))
8315 {
8316 if (is_template_instantiation (c))
8317 {
8318 /* Instantiated templates do not need to be cloned into the
8319 type unit. Just move the DIE and its children back to
8320 the skeleton tree (in the main CU). */
8321 remove_child_with_prev (c, prev);
8322 add_child_die (parent->new_die, c);
8323 c = prev;
8324 }
8325 else if (c->comdat_type_p)
8326 {
8327 /* This is the skeleton of earlier break_out_comdat_types
8328 type. Clone the existing DIE, but keep the children
8329 under the original (which is in the main CU). */
8330 dw_die_ref clone = clone_die (c);
8331
8332 replace_child (c, clone, prev);
8333 generate_skeleton_ancestor_tree (parent);
8334 add_child_die (parent->new_die, c);
8335 c = clone;
8336 continue;
8337 }
8338 else
8339 {
8340 /* Clone the existing DIE, move the original to the skeleton
8341 tree (which is in the main CU), and put the clone, with
8342 all the original's children, where the original came from
8343 (which is about to be moved to the type unit). */
8344 dw_die_ref clone = clone_die (c);
8345 move_all_children (c, clone);
8346
8347 /* If the original has a DW_AT_object_pointer attribute,
8348 it would now point to a child DIE just moved to the
8349 cloned tree, so we need to remove that attribute from
8350 the original. */
8351 remove_AT (c, DW_AT_object_pointer);
8352
8353 replace_child (c, clone, prev);
8354 generate_skeleton_ancestor_tree (parent);
8355 add_child_die (parent->new_die, c);
8356 node.old_die = clone;
8357 node.new_die = c;
8358 c = clone;
8359 }
8360 }
8361 generate_skeleton_bottom_up (&node);
8362 } while (next != NULL);
8363 }
8364
8365 /* Wrapper function for generate_skeleton_bottom_up. */
8366
8367 static dw_die_ref
generate_skeleton(dw_die_ref die)8368 generate_skeleton (dw_die_ref die)
8369 {
8370 skeleton_chain_node node;
8371
8372 node.old_die = die;
8373 node.new_die = NULL;
8374 node.parent = NULL;
8375
8376 /* If this type definition is nested inside another type,
8377 and is not an instantiation of a template, always leave
8378 at least a declaration in its place. */
8379 if (die->die_parent != NULL
8380 && is_type_die (die->die_parent)
8381 && !is_template_instantiation (die))
8382 node.new_die = clone_as_declaration (die);
8383
8384 generate_skeleton_bottom_up (&node);
8385 return node.new_die;
8386 }
8387
8388 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8389 declaration. The original DIE is moved to a new compile unit so that
8390 existing references to it follow it to the new location. If any of the
8391 original DIE's descendants is a declaration, we need to replace the
8392 original DIE with a skeleton tree and move the declarations back into the
8393 skeleton tree. */
8394
8395 static dw_die_ref
remove_child_or_replace_with_skeleton(dw_die_ref unit,dw_die_ref child,dw_die_ref prev)8396 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8397 dw_die_ref prev)
8398 {
8399 dw_die_ref skeleton, orig_parent;
8400
8401 /* Copy the declaration context to the type unit DIE. If the returned
8402 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8403 that DIE. */
8404 orig_parent = copy_declaration_context (unit, child);
8405
8406 skeleton = generate_skeleton (child);
8407 if (skeleton == NULL)
8408 remove_child_with_prev (child, prev);
8409 else
8410 {
8411 skeleton->comdat_type_p = true;
8412 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8413
8414 /* If the original DIE was a specification, we need to put
8415 the skeleton under the parent DIE of the declaration.
8416 This leaves the original declaration in the tree, but
8417 it will be pruned later since there are no longer any
8418 references to it. */
8419 if (orig_parent != NULL)
8420 {
8421 remove_child_with_prev (child, prev);
8422 add_child_die (orig_parent, skeleton);
8423 }
8424 else
8425 replace_child (child, skeleton, prev);
8426 }
8427
8428 return skeleton;
8429 }
8430
8431 static void
8432 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8433 comdat_type_node *type_node,
8434 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8435
8436 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8437 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8438 DWARF procedure references in the DW_AT_location attribute. */
8439
8440 static dw_die_ref
copy_dwarf_procedure(dw_die_ref die,comdat_type_node * type_node,hash_map<dw_die_ref,dw_die_ref> & copied_dwarf_procs)8441 copy_dwarf_procedure (dw_die_ref die,
8442 comdat_type_node *type_node,
8443 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8444 {
8445 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8446
8447 /* DWARF procedures are not supposed to have children... */
8448 gcc_assert (die->die_child == NULL);
8449
8450 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8451 gcc_assert (vec_safe_length (die->die_attr) == 1
8452 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8453
8454 /* Do not copy more than once DWARF procedures. */
8455 bool existed;
8456 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8457 if (existed)
8458 return die_copy;
8459
8460 die_copy = clone_die (die);
8461 add_child_die (type_node->root_die, die_copy);
8462 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8463 return die_copy;
8464 }
8465
8466 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8467 procedures in DIE's attributes. */
8468
8469 static void
copy_dwarf_procs_ref_in_attrs(dw_die_ref die,comdat_type_node * type_node,hash_map<dw_die_ref,dw_die_ref> & copied_dwarf_procs)8470 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8471 comdat_type_node *type_node,
8472 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8473 {
8474 dw_attr_node *a;
8475 unsigned i;
8476
8477 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8478 {
8479 dw_loc_descr_ref loc;
8480
8481 if (a->dw_attr_val.val_class != dw_val_class_loc)
8482 continue;
8483
8484 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8485 {
8486 switch (loc->dw_loc_opc)
8487 {
8488 case DW_OP_call2:
8489 case DW_OP_call4:
8490 case DW_OP_call_ref:
8491 gcc_assert (loc->dw_loc_oprnd1.val_class
8492 == dw_val_class_die_ref);
8493 loc->dw_loc_oprnd1.v.val_die_ref.die
8494 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8495 type_node,
8496 copied_dwarf_procs);
8497
8498 default:
8499 break;
8500 }
8501 }
8502 }
8503 }
8504
8505 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8506 rewrite references to point to the copies.
8507
8508 References are looked for in DIE's attributes and recursively in all its
8509 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8510 mapping from old DWARF procedures to their copy. It is used not to copy
8511 twice the same DWARF procedure under TYPE_NODE. */
8512
8513 static void
copy_dwarf_procs_ref_in_dies(dw_die_ref die,comdat_type_node * type_node,hash_map<dw_die_ref,dw_die_ref> & copied_dwarf_procs)8514 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8515 comdat_type_node *type_node,
8516 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8517 {
8518 dw_die_ref c;
8519
8520 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8521 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8522 type_node,
8523 copied_dwarf_procs));
8524 }
8525
8526 /* Traverse the DIE and set up additional .debug_types or .debug_info
8527 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8528 section. */
8529
8530 static void
break_out_comdat_types(dw_die_ref die)8531 break_out_comdat_types (dw_die_ref die)
8532 {
8533 dw_die_ref c;
8534 dw_die_ref first;
8535 dw_die_ref prev = NULL;
8536 dw_die_ref next = NULL;
8537 dw_die_ref unit = NULL;
8538
8539 first = c = die->die_child;
8540 if (c)
8541 next = c->die_sib;
8542 if (c) do {
8543 if (prev == NULL || prev->die_sib == c)
8544 prev = c;
8545 c = next;
8546 next = (c == first ? NULL : c->die_sib);
8547 if (should_move_die_to_comdat (c))
8548 {
8549 dw_die_ref replacement;
8550 comdat_type_node *type_node;
8551
8552 /* Break out nested types into their own type units. */
8553 break_out_comdat_types (c);
8554
8555 /* Create a new type unit DIE as the root for the new tree, and
8556 add it to the list of comdat types. */
8557 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8558 add_AT_unsigned (unit, DW_AT_language,
8559 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8560 type_node = ggc_cleared_alloc<comdat_type_node> ();
8561 type_node->root_die = unit;
8562 type_node->next = comdat_type_list;
8563 comdat_type_list = type_node;
8564
8565 /* Generate the type signature. */
8566 generate_type_signature (c, type_node);
8567
8568 /* Copy the declaration context, attributes, and children of the
8569 declaration into the new type unit DIE, then remove this DIE
8570 from the main CU (or replace it with a skeleton if necessary). */
8571 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8572 type_node->skeleton_die = replacement;
8573
8574 /* Add the DIE to the new compunit. */
8575 add_child_die (unit, c);
8576
8577 /* Types can reference DWARF procedures for type size or data location
8578 expressions. Calls in DWARF expressions cannot target procedures
8579 that are not in the same section. So we must copy DWARF procedures
8580 along with this type and then rewrite references to them. */
8581 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8582 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8583
8584 if (replacement != NULL)
8585 c = replacement;
8586 }
8587 else if (c->die_tag == DW_TAG_namespace
8588 || c->die_tag == DW_TAG_class_type
8589 || c->die_tag == DW_TAG_structure_type
8590 || c->die_tag == DW_TAG_union_type)
8591 {
8592 /* Look for nested types that can be broken out. */
8593 break_out_comdat_types (c);
8594 }
8595 } while (next != NULL);
8596 }
8597
8598 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8599 Enter all the cloned children into the hash table decl_table. */
8600
8601 static dw_die_ref
clone_tree_partial(dw_die_ref die,decl_hash_type * decl_table)8602 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8603 {
8604 dw_die_ref c;
8605 dw_die_ref clone;
8606 struct decl_table_entry *entry;
8607 decl_table_entry **slot;
8608
8609 if (die->die_tag == DW_TAG_subprogram)
8610 clone = clone_as_declaration (die);
8611 else
8612 clone = clone_die (die);
8613
8614 slot = decl_table->find_slot_with_hash (die,
8615 htab_hash_pointer (die), INSERT);
8616
8617 /* Assert that DIE isn't in the hash table yet. If it would be there
8618 before, the ancestors would be necessarily there as well, therefore
8619 clone_tree_partial wouldn't be called. */
8620 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8621
8622 entry = XCNEW (struct decl_table_entry);
8623 entry->orig = die;
8624 entry->copy = clone;
8625 *slot = entry;
8626
8627 if (die->die_tag != DW_TAG_subprogram)
8628 FOR_EACH_CHILD (die, c,
8629 add_child_die (clone, clone_tree_partial (c, decl_table)));
8630
8631 return clone;
8632 }
8633
8634 /* Walk the DIE and its children, looking for references to incomplete
8635 or trivial types that are unmarked (i.e., that are not in the current
8636 type_unit). */
8637
8638 static void
copy_decls_walk(dw_die_ref unit,dw_die_ref die,decl_hash_type * decl_table)8639 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8640 {
8641 dw_die_ref c;
8642 dw_attr_node *a;
8643 unsigned ix;
8644
8645 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8646 {
8647 if (AT_class (a) == dw_val_class_die_ref)
8648 {
8649 dw_die_ref targ = AT_ref (a);
8650 decl_table_entry **slot;
8651 struct decl_table_entry *entry;
8652
8653 if (targ->die_mark != 0 || targ->comdat_type_p)
8654 continue;
8655
8656 slot = decl_table->find_slot_with_hash (targ,
8657 htab_hash_pointer (targ),
8658 INSERT);
8659
8660 if (*slot != HTAB_EMPTY_ENTRY)
8661 {
8662 /* TARG has already been copied, so we just need to
8663 modify the reference to point to the copy. */
8664 entry = *slot;
8665 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8666 }
8667 else
8668 {
8669 dw_die_ref parent = unit;
8670 dw_die_ref copy = clone_die (targ);
8671
8672 /* Record in DECL_TABLE that TARG has been copied.
8673 Need to do this now, before the recursive call,
8674 because DECL_TABLE may be expanded and SLOT
8675 would no longer be a valid pointer. */
8676 entry = XCNEW (struct decl_table_entry);
8677 entry->orig = targ;
8678 entry->copy = copy;
8679 *slot = entry;
8680
8681 /* If TARG is not a declaration DIE, we need to copy its
8682 children. */
8683 if (!is_declaration_die (targ))
8684 {
8685 FOR_EACH_CHILD (
8686 targ, c,
8687 add_child_die (copy,
8688 clone_tree_partial (c, decl_table)));
8689 }
8690
8691 /* Make sure the cloned tree is marked as part of the
8692 type unit. */
8693 mark_dies (copy);
8694
8695 /* If TARG has surrounding context, copy its ancestor tree
8696 into the new type unit. */
8697 if (targ->die_parent != NULL
8698 && !is_unit_die (targ->die_parent))
8699 parent = copy_ancestor_tree (unit, targ->die_parent,
8700 decl_table);
8701
8702 add_child_die (parent, copy);
8703 a->dw_attr_val.v.val_die_ref.die = copy;
8704
8705 /* Make sure the newly-copied DIE is walked. If it was
8706 installed in a previously-added context, it won't
8707 get visited otherwise. */
8708 if (parent != unit)
8709 {
8710 /* Find the highest point of the newly-added tree,
8711 mark each node along the way, and walk from there. */
8712 parent->die_mark = 1;
8713 while (parent->die_parent
8714 && parent->die_parent->die_mark == 0)
8715 {
8716 parent = parent->die_parent;
8717 parent->die_mark = 1;
8718 }
8719 copy_decls_walk (unit, parent, decl_table);
8720 }
8721 }
8722 }
8723 }
8724
8725 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8726 }
8727
8728 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8729 and record them in DECL_TABLE. */
8730
8731 static void
collect_skeleton_dies(dw_die_ref die,decl_hash_type * decl_table)8732 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8733 {
8734 dw_die_ref c;
8735
8736 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8737 {
8738 dw_die_ref targ = AT_ref (a);
8739 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8740 decl_table_entry **slot
8741 = decl_table->find_slot_with_hash (targ,
8742 htab_hash_pointer (targ),
8743 INSERT);
8744 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8745 /* Record in DECL_TABLE that TARG has been already copied
8746 by remove_child_or_replace_with_skeleton. */
8747 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8748 entry->orig = targ;
8749 entry->copy = die;
8750 *slot = entry;
8751 }
8752 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8753 }
8754
8755 /* Copy declarations for "unworthy" types into the new comdat section.
8756 Incomplete types, modified types, and certain other types aren't broken
8757 out into comdat sections of their own, so they don't have a signature,
8758 and we need to copy the declaration into the same section so that we
8759 don't have an external reference. */
8760
8761 static void
copy_decls_for_unworthy_types(dw_die_ref unit)8762 copy_decls_for_unworthy_types (dw_die_ref unit)
8763 {
8764 mark_dies (unit);
8765 decl_hash_type decl_table (10);
8766 collect_skeleton_dies (unit, &decl_table);
8767 copy_decls_walk (unit, unit, &decl_table);
8768 unmark_dies (unit);
8769 }
8770
8771 /* Traverse the DIE and add a sibling attribute if it may have the
8772 effect of speeding up access to siblings. To save some space,
8773 avoid generating sibling attributes for DIE's without children. */
8774
8775 static void
add_sibling_attributes(dw_die_ref die)8776 add_sibling_attributes (dw_die_ref die)
8777 {
8778 dw_die_ref c;
8779
8780 if (! die->die_child)
8781 return;
8782
8783 if (die->die_parent && die != die->die_parent->die_child)
8784 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8785
8786 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8787 }
8788
8789 /* Output all location lists for the DIE and its children. */
8790
8791 static void
output_location_lists(dw_die_ref die)8792 output_location_lists (dw_die_ref die)
8793 {
8794 dw_die_ref c;
8795 dw_attr_node *a;
8796 unsigned ix;
8797
8798 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8799 if (AT_class (a) == dw_val_class_loc_list)
8800 output_loc_list (AT_loc_list (a));
8801
8802 FOR_EACH_CHILD (die, c, output_location_lists (c));
8803 }
8804
8805 /* During assign_location_list_indexes and output_loclists_offset the
8806 current index, after it the number of assigned indexes (i.e. how
8807 large the .debug_loclists* offset table should be). */
8808 static unsigned int loc_list_idx;
8809
8810 /* Output all location list offsets for the DIE and its children. */
8811
8812 static void
output_loclists_offsets(dw_die_ref die)8813 output_loclists_offsets (dw_die_ref die)
8814 {
8815 dw_die_ref c;
8816 dw_attr_node *a;
8817 unsigned ix;
8818
8819 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8820 if (AT_class (a) == dw_val_class_loc_list)
8821 {
8822 dw_loc_list_ref l = AT_loc_list (a);
8823 if (l->offset_emitted)
8824 continue;
8825 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8826 loc_section_label, NULL);
8827 gcc_assert (l->hash == loc_list_idx);
8828 loc_list_idx++;
8829 l->offset_emitted = true;
8830 }
8831
8832 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8833 }
8834
8835 /* Recursively set indexes of location lists. */
8836
8837 static void
assign_location_list_indexes(dw_die_ref die)8838 assign_location_list_indexes (dw_die_ref die)
8839 {
8840 dw_die_ref c;
8841 dw_attr_node *a;
8842 unsigned ix;
8843
8844 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8845 if (AT_class (a) == dw_val_class_loc_list)
8846 {
8847 dw_loc_list_ref list = AT_loc_list (a);
8848 if (!list->num_assigned)
8849 {
8850 list->num_assigned = true;
8851 list->hash = loc_list_idx++;
8852 }
8853 }
8854
8855 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8856 }
8857
8858 /* We want to limit the number of external references, because they are
8859 larger than local references: a relocation takes multiple words, and
8860 even a sig8 reference is always eight bytes, whereas a local reference
8861 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8862 So if we encounter multiple external references to the same type DIE, we
8863 make a local typedef stub for it and redirect all references there.
8864
8865 This is the element of the hash table for keeping track of these
8866 references. */
8867
8868 struct external_ref
8869 {
8870 dw_die_ref type;
8871 dw_die_ref stub;
8872 unsigned n_refs;
8873 };
8874
8875 /* Hashtable helpers. */
8876
8877 struct external_ref_hasher : free_ptr_hash <external_ref>
8878 {
8879 static inline hashval_t hash (const external_ref *);
8880 static inline bool equal (const external_ref *, const external_ref *);
8881 };
8882
8883 inline hashval_t
hash(const external_ref * r)8884 external_ref_hasher::hash (const external_ref *r)
8885 {
8886 dw_die_ref die = r->type;
8887 hashval_t h = 0;
8888
8889 /* We can't use the address of the DIE for hashing, because
8890 that will make the order of the stub DIEs non-deterministic. */
8891 if (! die->comdat_type_p)
8892 /* We have a symbol; use it to compute a hash. */
8893 h = htab_hash_string (die->die_id.die_symbol);
8894 else
8895 {
8896 /* We have a type signature; use a subset of the bits as the hash.
8897 The 8-byte signature is at least as large as hashval_t. */
8898 comdat_type_node *type_node = die->die_id.die_type_node;
8899 memcpy (&h, type_node->signature, sizeof (h));
8900 }
8901 return h;
8902 }
8903
8904 inline bool
equal(const external_ref * r1,const external_ref * r2)8905 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8906 {
8907 return r1->type == r2->type;
8908 }
8909
8910 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8911
8912 /* Return a pointer to the external_ref for references to DIE. */
8913
8914 static struct external_ref *
lookup_external_ref(external_ref_hash_type * map,dw_die_ref die)8915 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8916 {
8917 struct external_ref ref, *ref_p;
8918 external_ref **slot;
8919
8920 ref.type = die;
8921 slot = map->find_slot (&ref, INSERT);
8922 if (*slot != HTAB_EMPTY_ENTRY)
8923 return *slot;
8924
8925 ref_p = XCNEW (struct external_ref);
8926 ref_p->type = die;
8927 *slot = ref_p;
8928 return ref_p;
8929 }
8930
8931 /* Subroutine of optimize_external_refs, below.
8932
8933 If we see a type skeleton, record it as our stub. If we see external
8934 references, remember how many we've seen. */
8935
8936 static void
optimize_external_refs_1(dw_die_ref die,external_ref_hash_type * map)8937 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8938 {
8939 dw_die_ref c;
8940 dw_attr_node *a;
8941 unsigned ix;
8942 struct external_ref *ref_p;
8943
8944 if (is_type_die (die)
8945 && (c = get_AT_ref (die, DW_AT_signature)))
8946 {
8947 /* This is a local skeleton; use it for local references. */
8948 ref_p = lookup_external_ref (map, c);
8949 ref_p->stub = die;
8950 }
8951
8952 /* Scan the DIE references, and remember any that refer to DIEs from
8953 other CUs (i.e. those which are not marked). */
8954 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8955 if (AT_class (a) == dw_val_class_die_ref
8956 && (c = AT_ref (a))->die_mark == 0
8957 && is_type_die (c))
8958 {
8959 ref_p = lookup_external_ref (map, c);
8960 ref_p->n_refs++;
8961 }
8962
8963 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8964 }
8965
8966 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8967 points to an external_ref, DATA is the CU we're processing. If we don't
8968 already have a local stub, and we have multiple refs, build a stub. */
8969
8970 int
dwarf2_build_local_stub(external_ref ** slot,dw_die_ref data)8971 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8972 {
8973 struct external_ref *ref_p = *slot;
8974
8975 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8976 {
8977 /* We have multiple references to this type, so build a small stub.
8978 Both of these forms are a bit dodgy from the perspective of the
8979 DWARF standard, since technically they should have names. */
8980 dw_die_ref cu = data;
8981 dw_die_ref type = ref_p->type;
8982 dw_die_ref stub = NULL;
8983
8984 if (type->comdat_type_p)
8985 {
8986 /* If we refer to this type via sig8, use AT_signature. */
8987 stub = new_die (type->die_tag, cu, NULL_TREE);
8988 add_AT_die_ref (stub, DW_AT_signature, type);
8989 }
8990 else
8991 {
8992 /* Otherwise, use a typedef with no name. */
8993 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8994 add_AT_die_ref (stub, DW_AT_type, type);
8995 }
8996
8997 stub->die_mark++;
8998 ref_p->stub = stub;
8999 }
9000 return 1;
9001 }
9002
9003 /* DIE is a unit; look through all the DIE references to see if there are
9004 any external references to types, and if so, create local stubs for
9005 them which will be applied in build_abbrev_table. This is useful because
9006 references to local DIEs are smaller. */
9007
9008 static external_ref_hash_type *
optimize_external_refs(dw_die_ref die)9009 optimize_external_refs (dw_die_ref die)
9010 {
9011 external_ref_hash_type *map = new external_ref_hash_type (10);
9012 optimize_external_refs_1 (die, map);
9013 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9014 return map;
9015 }
9016
9017 /* The following 3 variables are temporaries that are computed only during the
9018 build_abbrev_table call and used and released during the following
9019 optimize_abbrev_table call. */
9020
9021 /* First abbrev_id that can be optimized based on usage. */
9022 static unsigned int abbrev_opt_start;
9023
9024 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9025 abbrev_id smaller than this, because they must be already sized
9026 during build_abbrev_table). */
9027 static unsigned int abbrev_opt_base_type_end;
9028
9029 /* Vector of usage counts during build_abbrev_table. Indexed by
9030 abbrev_id - abbrev_opt_start. */
9031 static vec<unsigned int> abbrev_usage_count;
9032
9033 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9034 static vec<dw_die_ref> sorted_abbrev_dies;
9035
9036 /* The format of each DIE (and its attribute value pairs) is encoded in an
9037 abbreviation table. This routine builds the abbreviation table and assigns
9038 a unique abbreviation id for each abbreviation entry. The children of each
9039 die are visited recursively. */
9040
9041 static void
build_abbrev_table(dw_die_ref die,external_ref_hash_type * extern_map)9042 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9043 {
9044 unsigned int abbrev_id = 0;
9045 dw_die_ref c;
9046 dw_attr_node *a;
9047 unsigned ix;
9048 dw_die_ref abbrev;
9049
9050 /* Scan the DIE references, and replace any that refer to
9051 DIEs from other CUs (i.e. those which are not marked) with
9052 the local stubs we built in optimize_external_refs. */
9053 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9054 if (AT_class (a) == dw_val_class_die_ref
9055 && (c = AT_ref (a))->die_mark == 0)
9056 {
9057 struct external_ref *ref_p;
9058 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9059
9060 if (is_type_die (c)
9061 && (ref_p = lookup_external_ref (extern_map, c))
9062 && ref_p->stub && ref_p->stub != die
9063 && a->dw_attr != DW_AT_signature)
9064 change_AT_die_ref (a, ref_p->stub);
9065 else
9066 /* We aren't changing this reference, so mark it external. */
9067 set_AT_ref_external (a, 1);
9068 }
9069
9070 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9071 {
9072 dw_attr_node *die_a, *abbrev_a;
9073 unsigned ix;
9074 bool ok = true;
9075
9076 if (abbrev_id == 0)
9077 continue;
9078 if (abbrev->die_tag != die->die_tag)
9079 continue;
9080 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9081 continue;
9082
9083 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9084 continue;
9085
9086 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9087 {
9088 abbrev_a = &(*abbrev->die_attr)[ix];
9089 if ((abbrev_a->dw_attr != die_a->dw_attr)
9090 || (value_format (abbrev_a) != value_format (die_a)))
9091 {
9092 ok = false;
9093 break;
9094 }
9095 }
9096 if (ok)
9097 break;
9098 }
9099
9100 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9101 {
9102 vec_safe_push (abbrev_die_table, die);
9103 if (abbrev_opt_start)
9104 abbrev_usage_count.safe_push (0);
9105 }
9106 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9107 {
9108 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9109 sorted_abbrev_dies.safe_push (die);
9110 }
9111
9112 die->die_abbrev = abbrev_id;
9113 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9114 }
9115
9116 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9117 by die_abbrev's usage count, from the most commonly used
9118 abbreviation to the least. */
9119
9120 static int
die_abbrev_cmp(const void * p1,const void * p2)9121 die_abbrev_cmp (const void *p1, const void *p2)
9122 {
9123 dw_die_ref die1 = *(const dw_die_ref *) p1;
9124 dw_die_ref die2 = *(const dw_die_ref *) p2;
9125
9126 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9127 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9128
9129 if (die1->die_abbrev >= abbrev_opt_base_type_end
9130 && die2->die_abbrev >= abbrev_opt_base_type_end)
9131 {
9132 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9133 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9134 return -1;
9135 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9136 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9137 return 1;
9138 }
9139
9140 /* Stabilize the sort. */
9141 if (die1->die_abbrev < die2->die_abbrev)
9142 return -1;
9143 if (die1->die_abbrev > die2->die_abbrev)
9144 return 1;
9145
9146 return 0;
9147 }
9148
9149 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9150 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9151 into dw_val_class_const_implicit or
9152 dw_val_class_unsigned_const_implicit. */
9153
9154 static void
optimize_implicit_const(unsigned int first_id,unsigned int end,vec<bool> & implicit_consts)9155 optimize_implicit_const (unsigned int first_id, unsigned int end,
9156 vec<bool> &implicit_consts)
9157 {
9158 /* It never makes sense if there is just one DIE using the abbreviation. */
9159 if (end < first_id + 2)
9160 return;
9161
9162 dw_attr_node *a;
9163 unsigned ix, i;
9164 dw_die_ref die = sorted_abbrev_dies[first_id];
9165 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9166 if (implicit_consts[ix])
9167 {
9168 enum dw_val_class new_class = dw_val_class_none;
9169 switch (AT_class (a))
9170 {
9171 case dw_val_class_unsigned_const:
9172 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9173 continue;
9174
9175 /* The .debug_abbrev section will grow by
9176 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9177 in all the DIEs using that abbreviation. */
9178 if (constant_size (AT_unsigned (a)) * (end - first_id)
9179 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9180 continue;
9181
9182 new_class = dw_val_class_unsigned_const_implicit;
9183 break;
9184
9185 case dw_val_class_const:
9186 new_class = dw_val_class_const_implicit;
9187 break;
9188
9189 case dw_val_class_file:
9190 new_class = dw_val_class_file_implicit;
9191 break;
9192
9193 default:
9194 continue;
9195 }
9196 for (i = first_id; i < end; i++)
9197 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9198 = new_class;
9199 }
9200 }
9201
9202 /* Attempt to optimize abbreviation table from abbrev_opt_start
9203 abbreviation above. */
9204
9205 static void
optimize_abbrev_table(void)9206 optimize_abbrev_table (void)
9207 {
9208 if (abbrev_opt_start
9209 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9210 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9211 {
9212 auto_vec<bool, 32> implicit_consts;
9213 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9214
9215 unsigned int abbrev_id = abbrev_opt_start - 1;
9216 unsigned int first_id = ~0U;
9217 unsigned int last_abbrev_id = 0;
9218 unsigned int i;
9219 dw_die_ref die;
9220 if (abbrev_opt_base_type_end > abbrev_opt_start)
9221 abbrev_id = abbrev_opt_base_type_end - 1;
9222 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9223 most commonly used abbreviations come first. */
9224 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9225 {
9226 dw_attr_node *a;
9227 unsigned ix;
9228
9229 /* If calc_base_type_die_sizes has been called, the CU and
9230 base types after it can't be optimized, because we've already
9231 calculated their DIE offsets. We've sorted them first. */
9232 if (die->die_abbrev < abbrev_opt_base_type_end)
9233 continue;
9234 if (die->die_abbrev != last_abbrev_id)
9235 {
9236 last_abbrev_id = die->die_abbrev;
9237 if (dwarf_version >= 5 && first_id != ~0U)
9238 optimize_implicit_const (first_id, i, implicit_consts);
9239 abbrev_id++;
9240 (*abbrev_die_table)[abbrev_id] = die;
9241 if (dwarf_version >= 5)
9242 {
9243 first_id = i;
9244 implicit_consts.truncate (0);
9245
9246 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9247 switch (AT_class (a))
9248 {
9249 case dw_val_class_const:
9250 case dw_val_class_unsigned_const:
9251 case dw_val_class_file:
9252 implicit_consts.safe_push (true);
9253 break;
9254 default:
9255 implicit_consts.safe_push (false);
9256 break;
9257 }
9258 }
9259 }
9260 else if (dwarf_version >= 5)
9261 {
9262 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9263 if (!implicit_consts[ix])
9264 continue;
9265 else
9266 {
9267 dw_attr_node *other_a
9268 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9269 if (!dw_val_equal_p (&a->dw_attr_val,
9270 &other_a->dw_attr_val))
9271 implicit_consts[ix] = false;
9272 }
9273 }
9274 die->die_abbrev = abbrev_id;
9275 }
9276 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9277 if (dwarf_version >= 5 && first_id != ~0U)
9278 optimize_implicit_const (first_id, i, implicit_consts);
9279 }
9280
9281 abbrev_opt_start = 0;
9282 abbrev_opt_base_type_end = 0;
9283 abbrev_usage_count.release ();
9284 sorted_abbrev_dies.release ();
9285 }
9286
9287 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9288
9289 static int
constant_size(unsigned HOST_WIDE_INT value)9290 constant_size (unsigned HOST_WIDE_INT value)
9291 {
9292 int log;
9293
9294 if (value == 0)
9295 log = 0;
9296 else
9297 log = floor_log2 (value);
9298
9299 log = log / 8;
9300 log = 1 << (floor_log2 (log) + 1);
9301
9302 return log;
9303 }
9304
9305 /* Return the size of a DIE as it is represented in the
9306 .debug_info section. */
9307
9308 static unsigned long
size_of_die(dw_die_ref die)9309 size_of_die (dw_die_ref die)
9310 {
9311 unsigned long size = 0;
9312 dw_attr_node *a;
9313 unsigned ix;
9314 enum dwarf_form form;
9315
9316 size += size_of_uleb128 (die->die_abbrev);
9317 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9318 {
9319 switch (AT_class (a))
9320 {
9321 case dw_val_class_addr:
9322 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9323 {
9324 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9325 size += size_of_uleb128 (AT_index (a));
9326 }
9327 else
9328 size += DWARF2_ADDR_SIZE;
9329 break;
9330 case dw_val_class_offset:
9331 size += DWARF_OFFSET_SIZE;
9332 break;
9333 case dw_val_class_loc:
9334 {
9335 unsigned long lsize = size_of_locs (AT_loc (a));
9336
9337 /* Block length. */
9338 if (dwarf_version >= 4)
9339 size += size_of_uleb128 (lsize);
9340 else
9341 size += constant_size (lsize);
9342 size += lsize;
9343 }
9344 break;
9345 case dw_val_class_loc_list:
9346 case dw_val_class_view_list:
9347 if (dwarf_split_debug_info && dwarf_version >= 5)
9348 {
9349 gcc_assert (AT_loc_list (a)->num_assigned);
9350 size += size_of_uleb128 (AT_loc_list (a)->hash);
9351 }
9352 else
9353 size += DWARF_OFFSET_SIZE;
9354 break;
9355 case dw_val_class_range_list:
9356 if (value_format (a) == DW_FORM_rnglistx)
9357 {
9358 gcc_assert (rnglist_idx);
9359 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9360 size += size_of_uleb128 (r->idx);
9361 }
9362 else
9363 size += DWARF_OFFSET_SIZE;
9364 break;
9365 case dw_val_class_const:
9366 size += size_of_sleb128 (AT_int (a));
9367 break;
9368 case dw_val_class_unsigned_const:
9369 {
9370 int csize = constant_size (AT_unsigned (a));
9371 if (dwarf_version == 3
9372 && a->dw_attr == DW_AT_data_member_location
9373 && csize >= 4)
9374 size += size_of_uleb128 (AT_unsigned (a));
9375 else
9376 size += csize;
9377 }
9378 break;
9379 case dw_val_class_symview:
9380 if (symview_upper_bound <= 0xff)
9381 size += 1;
9382 else if (symview_upper_bound <= 0xffff)
9383 size += 2;
9384 else if (symview_upper_bound <= 0xffffffff)
9385 size += 4;
9386 else
9387 size += 8;
9388 break;
9389 case dw_val_class_const_implicit:
9390 case dw_val_class_unsigned_const_implicit:
9391 case dw_val_class_file_implicit:
9392 /* These occupy no size in the DIE, just an extra sleb128 in
9393 .debug_abbrev. */
9394 break;
9395 case dw_val_class_const_double:
9396 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9397 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9398 size++; /* block */
9399 break;
9400 case dw_val_class_wide_int:
9401 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9402 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9403 if (get_full_len (*a->dw_attr_val.v.val_wide)
9404 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9405 size++; /* block */
9406 break;
9407 case dw_val_class_vec:
9408 size += constant_size (a->dw_attr_val.v.val_vec.length
9409 * a->dw_attr_val.v.val_vec.elt_size)
9410 + a->dw_attr_val.v.val_vec.length
9411 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9412 break;
9413 case dw_val_class_flag:
9414 if (dwarf_version >= 4)
9415 /* Currently all add_AT_flag calls pass in 1 as last argument,
9416 so DW_FORM_flag_present can be used. If that ever changes,
9417 we'll need to use DW_FORM_flag and have some optimization
9418 in build_abbrev_table that will change those to
9419 DW_FORM_flag_present if it is set to 1 in all DIEs using
9420 the same abbrev entry. */
9421 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9422 else
9423 size += 1;
9424 break;
9425 case dw_val_class_die_ref:
9426 if (AT_ref_external (a))
9427 {
9428 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9429 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9430 is sized by target address length, whereas in DWARF3
9431 it's always sized as an offset. */
9432 if (AT_ref (a)->comdat_type_p)
9433 size += DWARF_TYPE_SIGNATURE_SIZE;
9434 else if (dwarf_version == 2)
9435 size += DWARF2_ADDR_SIZE;
9436 else
9437 size += DWARF_OFFSET_SIZE;
9438 }
9439 else
9440 size += DWARF_OFFSET_SIZE;
9441 break;
9442 case dw_val_class_fde_ref:
9443 size += DWARF_OFFSET_SIZE;
9444 break;
9445 case dw_val_class_lbl_id:
9446 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9447 {
9448 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9449 size += size_of_uleb128 (AT_index (a));
9450 }
9451 else
9452 size += DWARF2_ADDR_SIZE;
9453 break;
9454 case dw_val_class_lineptr:
9455 case dw_val_class_macptr:
9456 case dw_val_class_loclistsptr:
9457 size += DWARF_OFFSET_SIZE;
9458 break;
9459 case dw_val_class_str:
9460 form = AT_string_form (a);
9461 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9462 size += DWARF_OFFSET_SIZE;
9463 else if (form == DW_FORM_GNU_str_index)
9464 size += size_of_uleb128 (AT_index (a));
9465 else
9466 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9467 break;
9468 case dw_val_class_file:
9469 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9470 break;
9471 case dw_val_class_data8:
9472 size += 8;
9473 break;
9474 case dw_val_class_vms_delta:
9475 size += DWARF_OFFSET_SIZE;
9476 break;
9477 case dw_val_class_high_pc:
9478 size += DWARF2_ADDR_SIZE;
9479 break;
9480 case dw_val_class_discr_value:
9481 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9482 break;
9483 case dw_val_class_discr_list:
9484 {
9485 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9486
9487 /* This is a block, so we have the block length and then its
9488 data. */
9489 size += constant_size (block_size) + block_size;
9490 }
9491 break;
9492 default:
9493 gcc_unreachable ();
9494 }
9495 }
9496
9497 return size;
9498 }
9499
9500 /* Size the debugging information associated with a given DIE. Visits the
9501 DIE's children recursively. Updates the global variable next_die_offset, on
9502 each time through. Uses the current value of next_die_offset to update the
9503 die_offset field in each DIE. */
9504
9505 static void
calc_die_sizes(dw_die_ref die)9506 calc_die_sizes (dw_die_ref die)
9507 {
9508 dw_die_ref c;
9509
9510 gcc_assert (die->die_offset == 0
9511 || (unsigned long int) die->die_offset == next_die_offset);
9512 die->die_offset = next_die_offset;
9513 next_die_offset += size_of_die (die);
9514
9515 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9516
9517 if (die->die_child != NULL)
9518 /* Count the null byte used to terminate sibling lists. */
9519 next_die_offset += 1;
9520 }
9521
9522 /* Size just the base type children at the start of the CU.
9523 This is needed because build_abbrev needs to size locs
9524 and sizing of type based stack ops needs to know die_offset
9525 values for the base types. */
9526
9527 static void
calc_base_type_die_sizes(void)9528 calc_base_type_die_sizes (void)
9529 {
9530 unsigned long die_offset = (dwarf_split_debug_info
9531 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9532 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9533 unsigned int i;
9534 dw_die_ref base_type;
9535 #if ENABLE_ASSERT_CHECKING
9536 dw_die_ref prev = comp_unit_die ()->die_child;
9537 #endif
9538
9539 die_offset += size_of_die (comp_unit_die ());
9540 for (i = 0; base_types.iterate (i, &base_type); i++)
9541 {
9542 #if ENABLE_ASSERT_CHECKING
9543 gcc_assert (base_type->die_offset == 0
9544 && prev->die_sib == base_type
9545 && base_type->die_child == NULL
9546 && base_type->die_abbrev);
9547 prev = base_type;
9548 #endif
9549 if (abbrev_opt_start
9550 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9551 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9552 base_type->die_offset = die_offset;
9553 die_offset += size_of_die (base_type);
9554 }
9555 }
9556
9557 /* Set the marks for a die and its children. We do this so
9558 that we know whether or not a reference needs to use FORM_ref_addr; only
9559 DIEs in the same CU will be marked. We used to clear out the offset
9560 and use that as the flag, but ran into ordering problems. */
9561
9562 static void
mark_dies(dw_die_ref die)9563 mark_dies (dw_die_ref die)
9564 {
9565 dw_die_ref c;
9566
9567 gcc_assert (!die->die_mark);
9568
9569 die->die_mark = 1;
9570 FOR_EACH_CHILD (die, c, mark_dies (c));
9571 }
9572
9573 /* Clear the marks for a die and its children. */
9574
9575 static void
unmark_dies(dw_die_ref die)9576 unmark_dies (dw_die_ref die)
9577 {
9578 dw_die_ref c;
9579
9580 if (! use_debug_types)
9581 gcc_assert (die->die_mark);
9582
9583 die->die_mark = 0;
9584 FOR_EACH_CHILD (die, c, unmark_dies (c));
9585 }
9586
9587 /* Clear the marks for a die, its children and referred dies. */
9588
9589 static void
unmark_all_dies(dw_die_ref die)9590 unmark_all_dies (dw_die_ref die)
9591 {
9592 dw_die_ref c;
9593 dw_attr_node *a;
9594 unsigned ix;
9595
9596 if (!die->die_mark)
9597 return;
9598 die->die_mark = 0;
9599
9600 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9601
9602 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9603 if (AT_class (a) == dw_val_class_die_ref)
9604 unmark_all_dies (AT_ref (a));
9605 }
9606
9607 /* Calculate if the entry should appear in the final output file. It may be
9608 from a pruned a type. */
9609
9610 static bool
include_pubname_in_output(vec<pubname_entry,va_gc> * table,pubname_entry * p)9611 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9612 {
9613 /* By limiting gnu pubnames to definitions only, gold can generate a
9614 gdb index without entries for declarations, which don't include
9615 enough information to be useful. */
9616 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9617 return false;
9618
9619 if (table == pubname_table)
9620 {
9621 /* Enumerator names are part of the pubname table, but the
9622 parent DW_TAG_enumeration_type die may have been pruned.
9623 Don't output them if that is the case. */
9624 if (p->die->die_tag == DW_TAG_enumerator &&
9625 (p->die->die_parent == NULL
9626 || !p->die->die_parent->die_perennial_p))
9627 return false;
9628
9629 /* Everything else in the pubname table is included. */
9630 return true;
9631 }
9632
9633 /* The pubtypes table shouldn't include types that have been
9634 pruned. */
9635 return (p->die->die_offset != 0
9636 || !flag_eliminate_unused_debug_types);
9637 }
9638
9639 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9640 generated for the compilation unit. */
9641
9642 static unsigned long
size_of_pubnames(vec<pubname_entry,va_gc> * names)9643 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9644 {
9645 unsigned long size;
9646 unsigned i;
9647 pubname_entry *p;
9648 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9649
9650 size = DWARF_PUBNAMES_HEADER_SIZE;
9651 FOR_EACH_VEC_ELT (*names, i, p)
9652 if (include_pubname_in_output (names, p))
9653 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9654
9655 size += DWARF_OFFSET_SIZE;
9656 return size;
9657 }
9658
9659 /* Return the size of the information in the .debug_aranges section. */
9660
9661 static unsigned long
size_of_aranges(void)9662 size_of_aranges (void)
9663 {
9664 unsigned long size;
9665
9666 size = DWARF_ARANGES_HEADER_SIZE;
9667
9668 /* Count the address/length pair for this compilation unit. */
9669 if (text_section_used)
9670 size += 2 * DWARF2_ADDR_SIZE;
9671 if (cold_text_section_used)
9672 size += 2 * DWARF2_ADDR_SIZE;
9673 if (have_multiple_function_sections)
9674 {
9675 unsigned fde_idx;
9676 dw_fde_ref fde;
9677
9678 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9679 {
9680 if (DECL_IGNORED_P (fde->decl))
9681 continue;
9682 if (!fde->in_std_section)
9683 size += 2 * DWARF2_ADDR_SIZE;
9684 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9685 size += 2 * DWARF2_ADDR_SIZE;
9686 }
9687 }
9688
9689 /* Count the two zero words used to terminated the address range table. */
9690 size += 2 * DWARF2_ADDR_SIZE;
9691 return size;
9692 }
9693
9694 /* Select the encoding of an attribute value. */
9695
9696 static enum dwarf_form
value_format(dw_attr_node * a)9697 value_format (dw_attr_node *a)
9698 {
9699 switch (AT_class (a))
9700 {
9701 case dw_val_class_addr:
9702 /* Only very few attributes allow DW_FORM_addr. */
9703 switch (a->dw_attr)
9704 {
9705 case DW_AT_low_pc:
9706 case DW_AT_high_pc:
9707 case DW_AT_entry_pc:
9708 case DW_AT_trampoline:
9709 return (AT_index (a) == NOT_INDEXED
9710 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9711 default:
9712 break;
9713 }
9714 switch (DWARF2_ADDR_SIZE)
9715 {
9716 case 1:
9717 return DW_FORM_data1;
9718 case 2:
9719 return DW_FORM_data2;
9720 case 4:
9721 return DW_FORM_data4;
9722 case 8:
9723 return DW_FORM_data8;
9724 default:
9725 gcc_unreachable ();
9726 }
9727 case dw_val_class_loc_list:
9728 case dw_val_class_view_list:
9729 if (dwarf_split_debug_info
9730 && dwarf_version >= 5
9731 && AT_loc_list (a)->num_assigned)
9732 return DW_FORM_loclistx;
9733 /* FALLTHRU */
9734 case dw_val_class_range_list:
9735 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9736 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9737 care about sizes of .debug* sections in shared libraries and
9738 executables and don't take into account relocations that affect just
9739 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9740 table in the .debug_rnglists section. */
9741 if (dwarf_split_debug_info
9742 && dwarf_version >= 5
9743 && AT_class (a) == dw_val_class_range_list
9744 && rnglist_idx
9745 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9746 return DW_FORM_rnglistx;
9747 if (dwarf_version >= 4)
9748 return DW_FORM_sec_offset;
9749 /* FALLTHRU */
9750 case dw_val_class_vms_delta:
9751 case dw_val_class_offset:
9752 switch (DWARF_OFFSET_SIZE)
9753 {
9754 case 4:
9755 return DW_FORM_data4;
9756 case 8:
9757 return DW_FORM_data8;
9758 default:
9759 gcc_unreachable ();
9760 }
9761 case dw_val_class_loc:
9762 if (dwarf_version >= 4)
9763 return DW_FORM_exprloc;
9764 switch (constant_size (size_of_locs (AT_loc (a))))
9765 {
9766 case 1:
9767 return DW_FORM_block1;
9768 case 2:
9769 return DW_FORM_block2;
9770 case 4:
9771 return DW_FORM_block4;
9772 default:
9773 gcc_unreachable ();
9774 }
9775 case dw_val_class_const:
9776 return DW_FORM_sdata;
9777 case dw_val_class_unsigned_const:
9778 switch (constant_size (AT_unsigned (a)))
9779 {
9780 case 1:
9781 return DW_FORM_data1;
9782 case 2:
9783 return DW_FORM_data2;
9784 case 4:
9785 /* In DWARF3 DW_AT_data_member_location with
9786 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9787 constant, so we need to use DW_FORM_udata if we need
9788 a large constant. */
9789 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9790 return DW_FORM_udata;
9791 return DW_FORM_data4;
9792 case 8:
9793 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9794 return DW_FORM_udata;
9795 return DW_FORM_data8;
9796 default:
9797 gcc_unreachable ();
9798 }
9799 case dw_val_class_const_implicit:
9800 case dw_val_class_unsigned_const_implicit:
9801 case dw_val_class_file_implicit:
9802 return DW_FORM_implicit_const;
9803 case dw_val_class_const_double:
9804 switch (HOST_BITS_PER_WIDE_INT)
9805 {
9806 case 8:
9807 return DW_FORM_data2;
9808 case 16:
9809 return DW_FORM_data4;
9810 case 32:
9811 return DW_FORM_data8;
9812 case 64:
9813 if (dwarf_version >= 5)
9814 return DW_FORM_data16;
9815 /* FALLTHRU */
9816 default:
9817 return DW_FORM_block1;
9818 }
9819 case dw_val_class_wide_int:
9820 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9821 {
9822 case 8:
9823 return DW_FORM_data1;
9824 case 16:
9825 return DW_FORM_data2;
9826 case 32:
9827 return DW_FORM_data4;
9828 case 64:
9829 return DW_FORM_data8;
9830 case 128:
9831 if (dwarf_version >= 5)
9832 return DW_FORM_data16;
9833 /* FALLTHRU */
9834 default:
9835 return DW_FORM_block1;
9836 }
9837 case dw_val_class_symview:
9838 /* ??? We might use uleb128, but then we'd have to compute
9839 .debug_info offsets in the assembler. */
9840 if (symview_upper_bound <= 0xff)
9841 return DW_FORM_data1;
9842 else if (symview_upper_bound <= 0xffff)
9843 return DW_FORM_data2;
9844 else if (symview_upper_bound <= 0xffffffff)
9845 return DW_FORM_data4;
9846 else
9847 return DW_FORM_data8;
9848 case dw_val_class_vec:
9849 switch (constant_size (a->dw_attr_val.v.val_vec.length
9850 * a->dw_attr_val.v.val_vec.elt_size))
9851 {
9852 case 1:
9853 return DW_FORM_block1;
9854 case 2:
9855 return DW_FORM_block2;
9856 case 4:
9857 return DW_FORM_block4;
9858 default:
9859 gcc_unreachable ();
9860 }
9861 case dw_val_class_flag:
9862 if (dwarf_version >= 4)
9863 {
9864 /* Currently all add_AT_flag calls pass in 1 as last argument,
9865 so DW_FORM_flag_present can be used. If that ever changes,
9866 we'll need to use DW_FORM_flag and have some optimization
9867 in build_abbrev_table that will change those to
9868 DW_FORM_flag_present if it is set to 1 in all DIEs using
9869 the same abbrev entry. */
9870 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9871 return DW_FORM_flag_present;
9872 }
9873 return DW_FORM_flag;
9874 case dw_val_class_die_ref:
9875 if (AT_ref_external (a))
9876 {
9877 if (AT_ref (a)->comdat_type_p)
9878 return DW_FORM_ref_sig8;
9879 else
9880 return DW_FORM_ref_addr;
9881 }
9882 else
9883 return DW_FORM_ref;
9884 case dw_val_class_fde_ref:
9885 return DW_FORM_data;
9886 case dw_val_class_lbl_id:
9887 return (AT_index (a) == NOT_INDEXED
9888 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9889 case dw_val_class_lineptr:
9890 case dw_val_class_macptr:
9891 case dw_val_class_loclistsptr:
9892 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9893 case dw_val_class_str:
9894 return AT_string_form (a);
9895 case dw_val_class_file:
9896 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9897 {
9898 case 1:
9899 return DW_FORM_data1;
9900 case 2:
9901 return DW_FORM_data2;
9902 case 4:
9903 return DW_FORM_data4;
9904 default:
9905 gcc_unreachable ();
9906 }
9907
9908 case dw_val_class_data8:
9909 return DW_FORM_data8;
9910
9911 case dw_val_class_high_pc:
9912 switch (DWARF2_ADDR_SIZE)
9913 {
9914 case 1:
9915 return DW_FORM_data1;
9916 case 2:
9917 return DW_FORM_data2;
9918 case 4:
9919 return DW_FORM_data4;
9920 case 8:
9921 return DW_FORM_data8;
9922 default:
9923 gcc_unreachable ();
9924 }
9925
9926 case dw_val_class_discr_value:
9927 return (a->dw_attr_val.v.val_discr_value.pos
9928 ? DW_FORM_udata
9929 : DW_FORM_sdata);
9930 case dw_val_class_discr_list:
9931 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9932 {
9933 case 1:
9934 return DW_FORM_block1;
9935 case 2:
9936 return DW_FORM_block2;
9937 case 4:
9938 return DW_FORM_block4;
9939 default:
9940 gcc_unreachable ();
9941 }
9942
9943 default:
9944 gcc_unreachable ();
9945 }
9946 }
9947
9948 /* Output the encoding of an attribute value. */
9949
9950 static void
output_value_format(dw_attr_node * a)9951 output_value_format (dw_attr_node *a)
9952 {
9953 enum dwarf_form form = value_format (a);
9954
9955 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9956 }
9957
9958 /* Given a die and id, produce the appropriate abbreviations. */
9959
9960 static void
output_die_abbrevs(unsigned long abbrev_id,dw_die_ref abbrev)9961 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9962 {
9963 unsigned ix;
9964 dw_attr_node *a_attr;
9965
9966 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9967 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9968 dwarf_tag_name (abbrev->die_tag));
9969
9970 if (abbrev->die_child != NULL)
9971 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9972 else
9973 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9974
9975 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9976 {
9977 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9978 dwarf_attr_name (a_attr->dw_attr));
9979 output_value_format (a_attr);
9980 if (value_format (a_attr) == DW_FORM_implicit_const)
9981 {
9982 if (AT_class (a_attr) == dw_val_class_file_implicit)
9983 {
9984 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9985 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9986 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9987 }
9988 else
9989 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9990 }
9991 }
9992
9993 dw2_asm_output_data (1, 0, NULL);
9994 dw2_asm_output_data (1, 0, NULL);
9995 }
9996
9997
9998 /* Output the .debug_abbrev section which defines the DIE abbreviation
9999 table. */
10000
10001 static void
output_abbrev_section(void)10002 output_abbrev_section (void)
10003 {
10004 unsigned int abbrev_id;
10005 dw_die_ref abbrev;
10006
10007 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10008 if (abbrev_id != 0)
10009 output_die_abbrevs (abbrev_id, abbrev);
10010
10011 /* Terminate the table. */
10012 dw2_asm_output_data (1, 0, NULL);
10013 }
10014
10015 /* Return a new location list, given the begin and end range, and the
10016 expression. */
10017
10018 static inline dw_loc_list_ref
new_loc_list(dw_loc_descr_ref expr,const char * begin,var_loc_view vbegin,const char * end,var_loc_view vend,const char * section)10019 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10020 const char *end, var_loc_view vend,
10021 const char *section)
10022 {
10023 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10024
10025 retlist->begin = begin;
10026 retlist->begin_entry = NULL;
10027 retlist->end = end;
10028 retlist->expr = expr;
10029 retlist->section = section;
10030 retlist->vbegin = vbegin;
10031 retlist->vend = vend;
10032
10033 return retlist;
10034 }
10035
10036 /* Return true iff there's any nonzero view number in the loc list.
10037
10038 ??? When views are not enabled, we'll often extend a single range
10039 to the entire function, so that we emit a single location
10040 expression rather than a location list. With views, even with a
10041 single range, we'll output a list if start or end have a nonzero
10042 view. If we change this, we may want to stop splitting a single
10043 range in dw_loc_list just because of a nonzero view, even if it
10044 straddles across hot/cold partitions. */
10045
10046 static bool
loc_list_has_views(dw_loc_list_ref list)10047 loc_list_has_views (dw_loc_list_ref list)
10048 {
10049 if (!debug_variable_location_views)
10050 return false;
10051
10052 for (dw_loc_list_ref loc = list;
10053 loc != NULL; loc = loc->dw_loc_next)
10054 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10055 return true;
10056
10057 return false;
10058 }
10059
10060 /* Generate a new internal symbol for this location list node, if it
10061 hasn't got one yet. */
10062
10063 static inline void
gen_llsym(dw_loc_list_ref list)10064 gen_llsym (dw_loc_list_ref list)
10065 {
10066 gcc_assert (!list->ll_symbol);
10067 list->ll_symbol = gen_internal_sym ("LLST");
10068
10069 if (!loc_list_has_views (list))
10070 return;
10071
10072 if (dwarf2out_locviews_in_attribute ())
10073 {
10074 /* Use the same label_num for the view list. */
10075 label_num--;
10076 list->vl_symbol = gen_internal_sym ("LVUS");
10077 }
10078 else
10079 list->vl_symbol = list->ll_symbol;
10080 }
10081
10082 /* Generate a symbol for the list, but only if we really want to emit
10083 it as a list. */
10084
10085 static inline void
maybe_gen_llsym(dw_loc_list_ref list)10086 maybe_gen_llsym (dw_loc_list_ref list)
10087 {
10088 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10089 return;
10090
10091 gen_llsym (list);
10092 }
10093
10094 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10095 NULL, don't consider size of the location expression. If we're not
10096 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10097 representation in *SIZEP. */
10098
10099 static bool
10100 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10101 {
10102 /* Don't output an entry that starts and ends at the same address. */
10103 if (strcmp (curr->begin, curr->end) == 0
10104 && curr->vbegin == curr->vend && !curr->force)
10105 return true;
10106
10107 if (!sizep)
10108 return false;
10109
10110 unsigned long size = size_of_locs (curr->expr);
10111
10112 /* If the expression is too large, drop it on the floor. We could
10113 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10114 in the expression, but >= 64KB expressions for a single value
10115 in a single range are unlikely very useful. */
10116 if (dwarf_version < 5 && size > 0xffff)
10117 return true;
10118
10119 *sizep = size;
10120
10121 return false;
10122 }
10123
10124 /* Output a view pair loclist entry for CURR, if it requires one. */
10125
10126 static void
dwarf2out_maybe_output_loclist_view_pair(dw_loc_list_ref curr)10127 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10128 {
10129 if (!dwarf2out_locviews_in_loclist ())
10130 return;
10131
10132 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10133 return;
10134
10135 #ifdef DW_LLE_view_pair
10136 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10137
10138 if (dwarf2out_as_locview_support)
10139 {
10140 if (ZERO_VIEW_P (curr->vbegin))
10141 dw2_asm_output_data_uleb128 (0, "Location view begin");
10142 else
10143 {
10144 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10145 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10146 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10147 }
10148
10149 if (ZERO_VIEW_P (curr->vend))
10150 dw2_asm_output_data_uleb128 (0, "Location view end");
10151 else
10152 {
10153 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10154 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10155 dw2_asm_output_symname_uleb128 (label, "Location view end");
10156 }
10157 }
10158 else
10159 {
10160 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10161 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10162 }
10163 #endif /* DW_LLE_view_pair */
10164
10165 return;
10166 }
10167
10168 /* Output the location list given to us. */
10169
10170 static void
output_loc_list(dw_loc_list_ref list_head)10171 output_loc_list (dw_loc_list_ref list_head)
10172 {
10173 int vcount = 0, lcount = 0;
10174
10175 if (list_head->emitted)
10176 return;
10177 list_head->emitted = true;
10178
10179 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10180 {
10181 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10182
10183 for (dw_loc_list_ref curr = list_head; curr != NULL;
10184 curr = curr->dw_loc_next)
10185 {
10186 unsigned long size;
10187
10188 if (skip_loc_list_entry (curr, &size))
10189 continue;
10190
10191 vcount++;
10192
10193 /* ?? dwarf_split_debug_info? */
10194 if (dwarf2out_as_locview_support)
10195 {
10196 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10197
10198 if (!ZERO_VIEW_P (curr->vbegin))
10199 {
10200 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10201 dw2_asm_output_symname_uleb128 (label,
10202 "View list begin (%s)",
10203 list_head->vl_symbol);
10204 }
10205 else
10206 dw2_asm_output_data_uleb128 (0,
10207 "View list begin (%s)",
10208 list_head->vl_symbol);
10209
10210 if (!ZERO_VIEW_P (curr->vend))
10211 {
10212 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10213 dw2_asm_output_symname_uleb128 (label,
10214 "View list end (%s)",
10215 list_head->vl_symbol);
10216 }
10217 else
10218 dw2_asm_output_data_uleb128 (0,
10219 "View list end (%s)",
10220 list_head->vl_symbol);
10221 }
10222 else
10223 {
10224 dw2_asm_output_data_uleb128 (curr->vbegin,
10225 "View list begin (%s)",
10226 list_head->vl_symbol);
10227 dw2_asm_output_data_uleb128 (curr->vend,
10228 "View list end (%s)",
10229 list_head->vl_symbol);
10230 }
10231 }
10232 }
10233
10234 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10235
10236 const char *last_section = NULL;
10237 const char *base_label = NULL;
10238
10239 /* Walk the location list, and output each range + expression. */
10240 for (dw_loc_list_ref curr = list_head; curr != NULL;
10241 curr = curr->dw_loc_next)
10242 {
10243 unsigned long size;
10244
10245 /* Skip this entry? If we skip it here, we must skip it in the
10246 view list above as well. */
10247 if (skip_loc_list_entry (curr, &size))
10248 continue;
10249
10250 lcount++;
10251
10252 if (dwarf_version >= 5)
10253 {
10254 if (dwarf_split_debug_info)
10255 {
10256 dwarf2out_maybe_output_loclist_view_pair (curr);
10257 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10258 uleb128 index into .debug_addr and uleb128 length. */
10259 dw2_asm_output_data (1, DW_LLE_startx_length,
10260 "DW_LLE_startx_length (%s)",
10261 list_head->ll_symbol);
10262 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10263 "Location list range start index "
10264 "(%s)", curr->begin);
10265 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10266 For that case we probably need to emit DW_LLE_startx_endx,
10267 but we'd need 2 .debug_addr entries rather than just one. */
10268 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10269 "Location list length (%s)",
10270 list_head->ll_symbol);
10271 }
10272 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10273 {
10274 dwarf2out_maybe_output_loclist_view_pair (curr);
10275 /* If all code is in .text section, the base address is
10276 already provided by the CU attributes. Use
10277 DW_LLE_offset_pair where both addresses are uleb128 encoded
10278 offsets against that base. */
10279 dw2_asm_output_data (1, DW_LLE_offset_pair,
10280 "DW_LLE_offset_pair (%s)",
10281 list_head->ll_symbol);
10282 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10283 "Location list begin address (%s)",
10284 list_head->ll_symbol);
10285 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10286 "Location list end address (%s)",
10287 list_head->ll_symbol);
10288 }
10289 else if (HAVE_AS_LEB128)
10290 {
10291 /* Otherwise, find out how many consecutive entries could share
10292 the same base entry. If just one, emit DW_LLE_start_length,
10293 otherwise emit DW_LLE_base_address for the base address
10294 followed by a series of DW_LLE_offset_pair. */
10295 if (last_section == NULL || curr->section != last_section)
10296 {
10297 dw_loc_list_ref curr2;
10298 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10299 curr2 = curr2->dw_loc_next)
10300 {
10301 if (strcmp (curr2->begin, curr2->end) == 0
10302 && !curr2->force)
10303 continue;
10304 break;
10305 }
10306 if (curr2 == NULL || curr->section != curr2->section)
10307 last_section = NULL;
10308 else
10309 {
10310 last_section = curr->section;
10311 base_label = curr->begin;
10312 dw2_asm_output_data (1, DW_LLE_base_address,
10313 "DW_LLE_base_address (%s)",
10314 list_head->ll_symbol);
10315 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10316 "Base address (%s)",
10317 list_head->ll_symbol);
10318 }
10319 }
10320 /* Only one entry with the same base address. Use
10321 DW_LLE_start_length with absolute address and uleb128
10322 length. */
10323 if (last_section == NULL)
10324 {
10325 dwarf2out_maybe_output_loclist_view_pair (curr);
10326 dw2_asm_output_data (1, DW_LLE_start_length,
10327 "DW_LLE_start_length (%s)",
10328 list_head->ll_symbol);
10329 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10330 "Location list begin address (%s)",
10331 list_head->ll_symbol);
10332 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10333 "Location list length "
10334 "(%s)", list_head->ll_symbol);
10335 }
10336 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10337 DW_LLE_base_address. */
10338 else
10339 {
10340 dwarf2out_maybe_output_loclist_view_pair (curr);
10341 dw2_asm_output_data (1, DW_LLE_offset_pair,
10342 "DW_LLE_offset_pair (%s)",
10343 list_head->ll_symbol);
10344 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10345 "Location list begin address "
10346 "(%s)", list_head->ll_symbol);
10347 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10348 "Location list end address "
10349 "(%s)", list_head->ll_symbol);
10350 }
10351 }
10352 /* The assembler does not support .uleb128 directive. Emit
10353 DW_LLE_start_end with a pair of absolute addresses. */
10354 else
10355 {
10356 dwarf2out_maybe_output_loclist_view_pair (curr);
10357 dw2_asm_output_data (1, DW_LLE_start_end,
10358 "DW_LLE_start_end (%s)",
10359 list_head->ll_symbol);
10360 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10361 "Location list begin address (%s)",
10362 list_head->ll_symbol);
10363 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10364 "Location list end address (%s)",
10365 list_head->ll_symbol);
10366 }
10367 }
10368 else if (dwarf_split_debug_info)
10369 {
10370 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10371 and 4 byte length. */
10372 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10373 "Location list start/length entry (%s)",
10374 list_head->ll_symbol);
10375 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10376 "Location list range start index (%s)",
10377 curr->begin);
10378 /* The length field is 4 bytes. If we ever need to support
10379 an 8-byte length, we can add a new DW_LLE code or fall back
10380 to DW_LLE_GNU_start_end_entry. */
10381 dw2_asm_output_delta (4, curr->end, curr->begin,
10382 "Location list range length (%s)",
10383 list_head->ll_symbol);
10384 }
10385 else if (!have_multiple_function_sections)
10386 {
10387 /* Pair of relative addresses against start of text section. */
10388 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10389 "Location list begin address (%s)",
10390 list_head->ll_symbol);
10391 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10392 "Location list end address (%s)",
10393 list_head->ll_symbol);
10394 }
10395 else
10396 {
10397 /* Pair of absolute addresses. */
10398 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10399 "Location list begin address (%s)",
10400 list_head->ll_symbol);
10401 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10402 "Location list end address (%s)",
10403 list_head->ll_symbol);
10404 }
10405
10406 /* Output the block length for this list of location operations. */
10407 if (dwarf_version >= 5)
10408 dw2_asm_output_data_uleb128 (size, "Location expression size");
10409 else
10410 {
10411 gcc_assert (size <= 0xffff);
10412 dw2_asm_output_data (2, size, "Location expression size");
10413 }
10414
10415 output_loc_sequence (curr->expr, -1);
10416 }
10417
10418 /* And finally list termination. */
10419 if (dwarf_version >= 5)
10420 dw2_asm_output_data (1, DW_LLE_end_of_list,
10421 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10422 else if (dwarf_split_debug_info)
10423 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10424 "Location list terminator (%s)",
10425 list_head->ll_symbol);
10426 else
10427 {
10428 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10429 "Location list terminator begin (%s)",
10430 list_head->ll_symbol);
10431 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10432 "Location list terminator end (%s)",
10433 list_head->ll_symbol);
10434 }
10435
10436 gcc_assert (!list_head->vl_symbol
10437 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10438 }
10439
10440 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10441 section. Emit a relocated reference if val_entry is NULL, otherwise,
10442 emit an indirect reference. */
10443
10444 static void
output_range_list_offset(dw_attr_node * a)10445 output_range_list_offset (dw_attr_node *a)
10446 {
10447 const char *name = dwarf_attr_name (a->dw_attr);
10448
10449 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10450 {
10451 if (dwarf_version >= 5)
10452 {
10453 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10454 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10455 debug_ranges_section, "%s", name);
10456 }
10457 else
10458 {
10459 char *p = strchr (ranges_section_label, '\0');
10460 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10461 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10462 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10463 debug_ranges_section, "%s", name);
10464 *p = '\0';
10465 }
10466 }
10467 else if (dwarf_version >= 5)
10468 {
10469 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10470 gcc_assert (rnglist_idx);
10471 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10472 }
10473 else
10474 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10475 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10476 "%s (offset from %s)", name, ranges_section_label);
10477 }
10478
10479 /* Output the offset into the debug_loc section. */
10480
10481 static void
output_loc_list_offset(dw_attr_node * a)10482 output_loc_list_offset (dw_attr_node *a)
10483 {
10484 char *sym = AT_loc_list (a)->ll_symbol;
10485
10486 gcc_assert (sym);
10487 if (!dwarf_split_debug_info)
10488 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10489 "%s", dwarf_attr_name (a->dw_attr));
10490 else if (dwarf_version >= 5)
10491 {
10492 gcc_assert (AT_loc_list (a)->num_assigned);
10493 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10494 dwarf_attr_name (a->dw_attr),
10495 sym);
10496 }
10497 else
10498 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10499 "%s", dwarf_attr_name (a->dw_attr));
10500 }
10501
10502 /* Output the offset into the debug_loc section. */
10503
10504 static void
output_view_list_offset(dw_attr_node * a)10505 output_view_list_offset (dw_attr_node *a)
10506 {
10507 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10508
10509 gcc_assert (sym);
10510 if (dwarf_split_debug_info)
10511 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10512 "%s", dwarf_attr_name (a->dw_attr));
10513 else
10514 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10515 "%s", dwarf_attr_name (a->dw_attr));
10516 }
10517
10518 /* Output an attribute's index or value appropriately. */
10519
10520 static void
output_attr_index_or_value(dw_attr_node * a)10521 output_attr_index_or_value (dw_attr_node *a)
10522 {
10523 const char *name = dwarf_attr_name (a->dw_attr);
10524
10525 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10526 {
10527 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10528 return;
10529 }
10530 switch (AT_class (a))
10531 {
10532 case dw_val_class_addr:
10533 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10534 break;
10535 case dw_val_class_high_pc:
10536 case dw_val_class_lbl_id:
10537 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10538 break;
10539 default:
10540 gcc_unreachable ();
10541 }
10542 }
10543
10544 /* Output a type signature. */
10545
10546 static inline void
output_signature(const char * sig,const char * name)10547 output_signature (const char *sig, const char *name)
10548 {
10549 int i;
10550
10551 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10552 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10553 }
10554
10555 /* Output a discriminant value. */
10556
10557 static inline void
output_discr_value(dw_discr_value * discr_value,const char * name)10558 output_discr_value (dw_discr_value *discr_value, const char *name)
10559 {
10560 if (discr_value->pos)
10561 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10562 else
10563 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10564 }
10565
10566 /* Output the DIE and its attributes. Called recursively to generate
10567 the definitions of each child DIE. */
10568
10569 static void
output_die(dw_die_ref die)10570 output_die (dw_die_ref die)
10571 {
10572 dw_attr_node *a;
10573 dw_die_ref c;
10574 unsigned long size;
10575 unsigned ix;
10576
10577 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10578 (unsigned long)die->die_offset,
10579 dwarf_tag_name (die->die_tag));
10580
10581 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10582 {
10583 const char *name = dwarf_attr_name (a->dw_attr);
10584
10585 switch (AT_class (a))
10586 {
10587 case dw_val_class_addr:
10588 output_attr_index_or_value (a);
10589 break;
10590
10591 case dw_val_class_offset:
10592 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10593 "%s", name);
10594 break;
10595
10596 case dw_val_class_range_list:
10597 output_range_list_offset (a);
10598 break;
10599
10600 case dw_val_class_loc:
10601 size = size_of_locs (AT_loc (a));
10602
10603 /* Output the block length for this list of location operations. */
10604 if (dwarf_version >= 4)
10605 dw2_asm_output_data_uleb128 (size, "%s", name);
10606 else
10607 dw2_asm_output_data (constant_size (size), size, "%s", name);
10608
10609 output_loc_sequence (AT_loc (a), -1);
10610 break;
10611
10612 case dw_val_class_const:
10613 /* ??? It would be slightly more efficient to use a scheme like is
10614 used for unsigned constants below, but gdb 4.x does not sign
10615 extend. Gdb 5.x does sign extend. */
10616 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10617 break;
10618
10619 case dw_val_class_unsigned_const:
10620 {
10621 int csize = constant_size (AT_unsigned (a));
10622 if (dwarf_version == 3
10623 && a->dw_attr == DW_AT_data_member_location
10624 && csize >= 4)
10625 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10626 else
10627 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10628 }
10629 break;
10630
10631 case dw_val_class_symview:
10632 {
10633 int vsize;
10634 if (symview_upper_bound <= 0xff)
10635 vsize = 1;
10636 else if (symview_upper_bound <= 0xffff)
10637 vsize = 2;
10638 else if (symview_upper_bound <= 0xffffffff)
10639 vsize = 4;
10640 else
10641 vsize = 8;
10642 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10643 "%s", name);
10644 }
10645 break;
10646
10647 case dw_val_class_const_implicit:
10648 if (flag_debug_asm)
10649 fprintf (asm_out_file, "\t\t\t%s %s ("
10650 HOST_WIDE_INT_PRINT_DEC ")\n",
10651 ASM_COMMENT_START, name, AT_int (a));
10652 break;
10653
10654 case dw_val_class_unsigned_const_implicit:
10655 if (flag_debug_asm)
10656 fprintf (asm_out_file, "\t\t\t%s %s ("
10657 HOST_WIDE_INT_PRINT_HEX ")\n",
10658 ASM_COMMENT_START, name, AT_unsigned (a));
10659 break;
10660
10661 case dw_val_class_const_double:
10662 {
10663 unsigned HOST_WIDE_INT first, second;
10664
10665 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10666 dw2_asm_output_data (1,
10667 HOST_BITS_PER_DOUBLE_INT
10668 / HOST_BITS_PER_CHAR,
10669 NULL);
10670
10671 if (WORDS_BIG_ENDIAN)
10672 {
10673 first = a->dw_attr_val.v.val_double.high;
10674 second = a->dw_attr_val.v.val_double.low;
10675 }
10676 else
10677 {
10678 first = a->dw_attr_val.v.val_double.low;
10679 second = a->dw_attr_val.v.val_double.high;
10680 }
10681
10682 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10683 first, "%s", name);
10684 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10685 second, NULL);
10686 }
10687 break;
10688
10689 case dw_val_class_wide_int:
10690 {
10691 int i;
10692 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10693 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10694 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10695 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10696 * l, NULL);
10697
10698 if (WORDS_BIG_ENDIAN)
10699 for (i = len - 1; i >= 0; --i)
10700 {
10701 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10702 "%s", name);
10703 name = "";
10704 }
10705 else
10706 for (i = 0; i < len; ++i)
10707 {
10708 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10709 "%s", name);
10710 name = "";
10711 }
10712 }
10713 break;
10714
10715 case dw_val_class_vec:
10716 {
10717 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10718 unsigned int len = a->dw_attr_val.v.val_vec.length;
10719 unsigned int i;
10720 unsigned char *p;
10721
10722 dw2_asm_output_data (constant_size (len * elt_size),
10723 len * elt_size, "%s", name);
10724 if (elt_size > sizeof (HOST_WIDE_INT))
10725 {
10726 elt_size /= 2;
10727 len *= 2;
10728 }
10729 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10730 i < len;
10731 i++, p += elt_size)
10732 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10733 "fp or vector constant word %u", i);
10734 break;
10735 }
10736
10737 case dw_val_class_flag:
10738 if (dwarf_version >= 4)
10739 {
10740 /* Currently all add_AT_flag calls pass in 1 as last argument,
10741 so DW_FORM_flag_present can be used. If that ever changes,
10742 we'll need to use DW_FORM_flag and have some optimization
10743 in build_abbrev_table that will change those to
10744 DW_FORM_flag_present if it is set to 1 in all DIEs using
10745 the same abbrev entry. */
10746 gcc_assert (AT_flag (a) == 1);
10747 if (flag_debug_asm)
10748 fprintf (asm_out_file, "\t\t\t%s %s\n",
10749 ASM_COMMENT_START, name);
10750 break;
10751 }
10752 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10753 break;
10754
10755 case dw_val_class_loc_list:
10756 output_loc_list_offset (a);
10757 break;
10758
10759 case dw_val_class_view_list:
10760 output_view_list_offset (a);
10761 break;
10762
10763 case dw_val_class_die_ref:
10764 if (AT_ref_external (a))
10765 {
10766 if (AT_ref (a)->comdat_type_p)
10767 {
10768 comdat_type_node *type_node
10769 = AT_ref (a)->die_id.die_type_node;
10770
10771 gcc_assert (type_node);
10772 output_signature (type_node->signature, name);
10773 }
10774 else
10775 {
10776 const char *sym = AT_ref (a)->die_id.die_symbol;
10777 int size;
10778
10779 gcc_assert (sym);
10780 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10781 length, whereas in DWARF3 it's always sized as an
10782 offset. */
10783 if (dwarf_version == 2)
10784 size = DWARF2_ADDR_SIZE;
10785 else
10786 size = DWARF_OFFSET_SIZE;
10787 /* ??? We cannot unconditionally output die_offset if
10788 non-zero - others might create references to those
10789 DIEs via symbols.
10790 And we do not clear its DIE offset after outputting it
10791 (and the label refers to the actual DIEs, not the
10792 DWARF CU unit header which is when using label + offset
10793 would be the correct thing to do).
10794 ??? This is the reason for the with_offset flag. */
10795 if (AT_ref (a)->with_offset)
10796 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10797 debug_info_section, "%s", name);
10798 else
10799 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10800 name);
10801 }
10802 }
10803 else
10804 {
10805 gcc_assert (AT_ref (a)->die_offset);
10806 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10807 "%s", name);
10808 }
10809 break;
10810
10811 case dw_val_class_fde_ref:
10812 {
10813 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10814
10815 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10816 a->dw_attr_val.v.val_fde_index * 2);
10817 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10818 "%s", name);
10819 }
10820 break;
10821
10822 case dw_val_class_vms_delta:
10823 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10824 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10825 AT_vms_delta2 (a), AT_vms_delta1 (a),
10826 "%s", name);
10827 #else
10828 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10829 AT_vms_delta2 (a), AT_vms_delta1 (a),
10830 "%s", name);
10831 #endif
10832 break;
10833
10834 case dw_val_class_lbl_id:
10835 output_attr_index_or_value (a);
10836 break;
10837
10838 case dw_val_class_lineptr:
10839 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10840 debug_line_section, "%s", name);
10841 break;
10842
10843 case dw_val_class_macptr:
10844 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10845 debug_macinfo_section, "%s", name);
10846 break;
10847
10848 case dw_val_class_loclistsptr:
10849 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10850 debug_loc_section, "%s", name);
10851 break;
10852
10853 case dw_val_class_str:
10854 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10855 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10856 a->dw_attr_val.v.val_str->label,
10857 debug_str_section,
10858 "%s: \"%s\"", name, AT_string (a));
10859 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10860 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10861 a->dw_attr_val.v.val_str->label,
10862 debug_line_str_section,
10863 "%s: \"%s\"", name, AT_string (a));
10864 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10865 dw2_asm_output_data_uleb128 (AT_index (a),
10866 "%s: \"%s\"", name, AT_string (a));
10867 else
10868 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10869 break;
10870
10871 case dw_val_class_file:
10872 {
10873 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10874
10875 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10876 a->dw_attr_val.v.val_file->filename);
10877 break;
10878 }
10879
10880 case dw_val_class_file_implicit:
10881 if (flag_debug_asm)
10882 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10883 ASM_COMMENT_START, name,
10884 maybe_emit_file (a->dw_attr_val.v.val_file),
10885 a->dw_attr_val.v.val_file->filename);
10886 break;
10887
10888 case dw_val_class_data8:
10889 {
10890 int i;
10891
10892 for (i = 0; i < 8; i++)
10893 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10894 i == 0 ? "%s" : NULL, name);
10895 break;
10896 }
10897
10898 case dw_val_class_high_pc:
10899 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10900 get_AT_low_pc (die), "DW_AT_high_pc");
10901 break;
10902
10903 case dw_val_class_discr_value:
10904 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10905 break;
10906
10907 case dw_val_class_discr_list:
10908 {
10909 dw_discr_list_ref list = AT_discr_list (a);
10910 const int size = size_of_discr_list (list);
10911
10912 /* This is a block, so output its length first. */
10913 dw2_asm_output_data (constant_size (size), size,
10914 "%s: block size", name);
10915
10916 for (; list != NULL; list = list->dw_discr_next)
10917 {
10918 /* One byte for the discriminant value descriptor, and then as
10919 many LEB128 numbers as required. */
10920 if (list->dw_discr_range)
10921 dw2_asm_output_data (1, DW_DSC_range,
10922 "%s: DW_DSC_range", name);
10923 else
10924 dw2_asm_output_data (1, DW_DSC_label,
10925 "%s: DW_DSC_label", name);
10926
10927 output_discr_value (&list->dw_discr_lower_bound, name);
10928 if (list->dw_discr_range)
10929 output_discr_value (&list->dw_discr_upper_bound, name);
10930 }
10931 break;
10932 }
10933
10934 default:
10935 gcc_unreachable ();
10936 }
10937 }
10938
10939 FOR_EACH_CHILD (die, c, output_die (c));
10940
10941 /* Add null byte to terminate sibling list. */
10942 if (die->die_child != NULL)
10943 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10944 (unsigned long) die->die_offset);
10945 }
10946
10947 /* Output the dwarf version number. */
10948
10949 static void
output_dwarf_version()10950 output_dwarf_version ()
10951 {
10952 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10953 views in loclist. That will change eventually. */
10954 if (dwarf_version == 6)
10955 {
10956 static bool once;
10957 if (!once)
10958 {
10959 warning (0,
10960 "-gdwarf-6 is output as version 5 with incompatibilities");
10961 once = true;
10962 }
10963 dw2_asm_output_data (2, 5, "DWARF version number");
10964 }
10965 else
10966 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10967 }
10968
10969 /* Output the compilation unit that appears at the beginning of the
10970 .debug_info section, and precedes the DIE descriptions. */
10971
10972 static void
output_compilation_unit_header(enum dwarf_unit_type ut)10973 output_compilation_unit_header (enum dwarf_unit_type ut)
10974 {
10975 if (!XCOFF_DEBUGGING_INFO)
10976 {
10977 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10978 dw2_asm_output_data (4, 0xffffffff,
10979 "Initial length escape value indicating 64-bit DWARF extension");
10980 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10981 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10982 "Length of Compilation Unit Info");
10983 }
10984
10985 output_dwarf_version ();
10986 if (dwarf_version >= 5)
10987 {
10988 const char *name;
10989 switch (ut)
10990 {
10991 case DW_UT_compile: name = "DW_UT_compile"; break;
10992 case DW_UT_type: name = "DW_UT_type"; break;
10993 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10994 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10995 default: gcc_unreachable ();
10996 }
10997 dw2_asm_output_data (1, ut, "%s", name);
10998 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10999 }
11000 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11001 debug_abbrev_section,
11002 "Offset Into Abbrev. Section");
11003 if (dwarf_version < 5)
11004 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11005 }
11006
11007 /* Output the compilation unit DIE and its children. */
11008
11009 static void
output_comp_unit(dw_die_ref die,int output_if_empty,const unsigned char * dwo_id)11010 output_comp_unit (dw_die_ref die, int output_if_empty,
11011 const unsigned char *dwo_id)
11012 {
11013 const char *secname, *oldsym;
11014 char *tmp;
11015
11016 /* Unless we are outputting main CU, we may throw away empty ones. */
11017 if (!output_if_empty && die->die_child == NULL)
11018 return;
11019
11020 /* Even if there are no children of this DIE, we must output the information
11021 about the compilation unit. Otherwise, on an empty translation unit, we
11022 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11023 will then complain when examining the file. First mark all the DIEs in
11024 this CU so we know which get local refs. */
11025 mark_dies (die);
11026
11027 external_ref_hash_type *extern_map = optimize_external_refs (die);
11028
11029 /* For now, optimize only the main CU, in order to optimize the rest
11030 we'd need to see all of them earlier. Leave the rest for post-linking
11031 tools like DWZ. */
11032 if (die == comp_unit_die ())
11033 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11034
11035 build_abbrev_table (die, extern_map);
11036
11037 optimize_abbrev_table ();
11038
11039 delete extern_map;
11040
11041 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11042 next_die_offset = (dwo_id
11043 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11044 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11045 calc_die_sizes (die);
11046
11047 oldsym = die->die_id.die_symbol;
11048 if (oldsym && die->comdat_type_p)
11049 {
11050 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11051
11052 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11053 secname = tmp;
11054 die->die_id.die_symbol = NULL;
11055 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11056 }
11057 else
11058 {
11059 switch_to_section (debug_info_section);
11060 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11061 info_section_emitted = true;
11062 }
11063
11064 /* For LTO cross unit DIE refs we want a symbol on the start of the
11065 debuginfo section, not on the CU DIE. */
11066 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11067 {
11068 /* ??? No way to get visibility assembled without a decl. */
11069 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11070 get_identifier (oldsym), char_type_node);
11071 TREE_PUBLIC (decl) = true;
11072 TREE_STATIC (decl) = true;
11073 DECL_ARTIFICIAL (decl) = true;
11074 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11075 DECL_VISIBILITY_SPECIFIED (decl) = true;
11076 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11077 #ifdef ASM_WEAKEN_LABEL
11078 /* We prefer a .weak because that handles duplicates from duplicate
11079 archive members in a graceful way. */
11080 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11081 #else
11082 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11083 #endif
11084 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11085 }
11086
11087 /* Output debugging information. */
11088 output_compilation_unit_header (dwo_id
11089 ? DW_UT_split_compile : DW_UT_compile);
11090 if (dwarf_version >= 5)
11091 {
11092 if (dwo_id != NULL)
11093 for (int i = 0; i < 8; i++)
11094 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11095 }
11096 output_die (die);
11097
11098 /* Leave the marks on the main CU, so we can check them in
11099 output_pubnames. */
11100 if (oldsym)
11101 {
11102 unmark_dies (die);
11103 die->die_id.die_symbol = oldsym;
11104 }
11105 }
11106
11107 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11108 and .debug_pubtypes. This is configured per-target, but can be
11109 overridden by the -gpubnames or -gno-pubnames options. */
11110
11111 static inline bool
want_pubnames(void)11112 want_pubnames (void)
11113 {
11114 if (debug_info_level <= DINFO_LEVEL_TERSE
11115 /* Names and types go to the early debug part only. */
11116 || in_lto_p)
11117 return false;
11118 if (debug_generate_pub_sections != -1)
11119 return debug_generate_pub_sections;
11120 return targetm.want_debug_pub_sections;
11121 }
11122
11123 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11124
11125 static void
add_AT_pubnames(dw_die_ref die)11126 add_AT_pubnames (dw_die_ref die)
11127 {
11128 if (want_pubnames ())
11129 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11130 }
11131
11132 /* Add a string attribute value to a skeleton DIE. */
11133
11134 static inline void
add_skeleton_AT_string(dw_die_ref die,enum dwarf_attribute attr_kind,const char * str)11135 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11136 const char *str)
11137 {
11138 dw_attr_node attr;
11139 struct indirect_string_node *node;
11140
11141 if (! skeleton_debug_str_hash)
11142 skeleton_debug_str_hash
11143 = hash_table<indirect_string_hasher>::create_ggc (10);
11144
11145 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11146 find_string_form (node);
11147 if (node->form == DW_FORM_GNU_str_index)
11148 node->form = DW_FORM_strp;
11149
11150 attr.dw_attr = attr_kind;
11151 attr.dw_attr_val.val_class = dw_val_class_str;
11152 attr.dw_attr_val.val_entry = NULL;
11153 attr.dw_attr_val.v.val_str = node;
11154 add_dwarf_attr (die, &attr);
11155 }
11156
11157 /* Helper function to generate top-level dies for skeleton debug_info and
11158 debug_types. */
11159
11160 static void
add_top_level_skeleton_die_attrs(dw_die_ref die)11161 add_top_level_skeleton_die_attrs (dw_die_ref die)
11162 {
11163 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11164 const char *comp_dir = comp_dir_string ();
11165
11166 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11167 if (comp_dir != NULL)
11168 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11169 add_AT_pubnames (die);
11170 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
11171 }
11172
11173 /* Output skeleton debug sections that point to the dwo file. */
11174
11175 static void
output_skeleton_debug_sections(dw_die_ref comp_unit,const unsigned char * dwo_id)11176 output_skeleton_debug_sections (dw_die_ref comp_unit,
11177 const unsigned char *dwo_id)
11178 {
11179 /* These attributes will be found in the full debug_info section. */
11180 remove_AT (comp_unit, DW_AT_producer);
11181 remove_AT (comp_unit, DW_AT_language);
11182
11183 switch_to_section (debug_skeleton_info_section);
11184 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11185
11186 /* Produce the skeleton compilation-unit header. This one differs enough from
11187 a normal CU header that it's better not to call output_compilation_unit
11188 header. */
11189 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11190 dw2_asm_output_data (4, 0xffffffff,
11191 "Initial length escape value indicating 64-bit "
11192 "DWARF extension");
11193
11194 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11195 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11196 - DWARF_INITIAL_LENGTH_SIZE
11197 + size_of_die (comp_unit),
11198 "Length of Compilation Unit Info");
11199 output_dwarf_version ();
11200 if (dwarf_version >= 5)
11201 {
11202 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11203 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11204 }
11205 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11206 debug_skeleton_abbrev_section,
11207 "Offset Into Abbrev. Section");
11208 if (dwarf_version < 5)
11209 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11210 else
11211 for (int i = 0; i < 8; i++)
11212 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11213
11214 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11215 output_die (comp_unit);
11216
11217 /* Build the skeleton debug_abbrev section. */
11218 switch_to_section (debug_skeleton_abbrev_section);
11219 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11220
11221 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11222
11223 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11224 }
11225
11226 /* Output a comdat type unit DIE and its children. */
11227
11228 static void
output_comdat_type_unit(comdat_type_node * node,bool early_lto_debug ATTRIBUTE_UNUSED)11229 output_comdat_type_unit (comdat_type_node *node,
11230 bool early_lto_debug ATTRIBUTE_UNUSED)
11231 {
11232 const char *secname;
11233 char *tmp;
11234 int i;
11235 #if defined (OBJECT_FORMAT_ELF)
11236 tree comdat_key;
11237 #endif
11238
11239 /* First mark all the DIEs in this CU so we know which get local refs. */
11240 mark_dies (node->root_die);
11241
11242 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11243
11244 build_abbrev_table (node->root_die, extern_map);
11245
11246 delete extern_map;
11247 extern_map = NULL;
11248
11249 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11250 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11251 calc_die_sizes (node->root_die);
11252
11253 #if defined (OBJECT_FORMAT_ELF)
11254 if (dwarf_version >= 5)
11255 {
11256 if (!dwarf_split_debug_info)
11257 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11258 else
11259 secname = (early_lto_debug
11260 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11261 }
11262 else if (!dwarf_split_debug_info)
11263 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11264 else
11265 secname = (early_lto_debug
11266 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11267
11268 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11269 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11270 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11271 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11272 comdat_key = get_identifier (tmp);
11273 targetm.asm_out.named_section (secname,
11274 SECTION_DEBUG | SECTION_LINKONCE,
11275 comdat_key);
11276 #else
11277 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11278 sprintf (tmp, (dwarf_version >= 5
11279 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11280 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11281 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11282 secname = tmp;
11283 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11284 #endif
11285
11286 /* Output debugging information. */
11287 output_compilation_unit_header (dwarf_split_debug_info
11288 ? DW_UT_split_type : DW_UT_type);
11289 output_signature (node->signature, "Type Signature");
11290 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11291 "Offset to Type DIE");
11292 output_die (node->root_die);
11293
11294 unmark_dies (node->root_die);
11295 }
11296
11297 /* Return the DWARF2/3 pubname associated with a decl. */
11298
11299 static const char *
dwarf2_name(tree decl,int scope)11300 dwarf2_name (tree decl, int scope)
11301 {
11302 if (DECL_NAMELESS (decl))
11303 return NULL;
11304 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11305 }
11306
11307 /* Add a new entry to .debug_pubnames if appropriate. */
11308
11309 static void
add_pubname_string(const char * str,dw_die_ref die)11310 add_pubname_string (const char *str, dw_die_ref die)
11311 {
11312 pubname_entry e;
11313
11314 e.die = die;
11315 e.name = xstrdup (str);
11316 vec_safe_push (pubname_table, e);
11317 }
11318
11319 static void
add_pubname(tree decl,dw_die_ref die)11320 add_pubname (tree decl, dw_die_ref die)
11321 {
11322 if (!want_pubnames ())
11323 return;
11324
11325 /* Don't add items to the table when we expect that the consumer will have
11326 just read the enclosing die. For example, if the consumer is looking at a
11327 class_member, it will either be inside the class already, or will have just
11328 looked up the class to find the member. Either way, searching the class is
11329 faster than searching the index. */
11330 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11331 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11332 {
11333 const char *name = dwarf2_name (decl, 1);
11334
11335 if (name)
11336 add_pubname_string (name, die);
11337 }
11338 }
11339
11340 /* Add an enumerator to the pubnames section. */
11341
11342 static void
add_enumerator_pubname(const char * scope_name,dw_die_ref die)11343 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11344 {
11345 pubname_entry e;
11346
11347 gcc_assert (scope_name);
11348 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11349 e.die = die;
11350 vec_safe_push (pubname_table, e);
11351 }
11352
11353 /* Add a new entry to .debug_pubtypes if appropriate. */
11354
11355 static void
add_pubtype(tree decl,dw_die_ref die)11356 add_pubtype (tree decl, dw_die_ref die)
11357 {
11358 pubname_entry e;
11359
11360 if (!want_pubnames ())
11361 return;
11362
11363 if ((TREE_PUBLIC (decl)
11364 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11365 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11366 {
11367 tree scope = NULL;
11368 const char *scope_name = "";
11369 const char *sep = is_cxx () ? "::" : ".";
11370 const char *name;
11371
11372 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11373 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11374 {
11375 scope_name = lang_hooks.dwarf_name (scope, 1);
11376 if (scope_name != NULL && scope_name[0] != '\0')
11377 scope_name = concat (scope_name, sep, NULL);
11378 else
11379 scope_name = "";
11380 }
11381
11382 if (TYPE_P (decl))
11383 name = type_tag (decl);
11384 else
11385 name = lang_hooks.dwarf_name (decl, 1);
11386
11387 /* If we don't have a name for the type, there's no point in adding
11388 it to the table. */
11389 if (name != NULL && name[0] != '\0')
11390 {
11391 e.die = die;
11392 e.name = concat (scope_name, name, NULL);
11393 vec_safe_push (pubtype_table, e);
11394 }
11395
11396 /* Although it might be more consistent to add the pubinfo for the
11397 enumerators as their dies are created, they should only be added if the
11398 enum type meets the criteria above. So rather than re-check the parent
11399 enum type whenever an enumerator die is created, just output them all
11400 here. This isn't protected by the name conditional because anonymous
11401 enums don't have names. */
11402 if (die->die_tag == DW_TAG_enumeration_type)
11403 {
11404 dw_die_ref c;
11405
11406 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11407 }
11408 }
11409 }
11410
11411 /* Output a single entry in the pubnames table. */
11412
11413 static void
output_pubname(dw_offset die_offset,pubname_entry * entry)11414 output_pubname (dw_offset die_offset, pubname_entry *entry)
11415 {
11416 dw_die_ref die = entry->die;
11417 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11418
11419 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11420
11421 if (debug_generate_pub_sections == 2)
11422 {
11423 /* This logic follows gdb's method for determining the value of the flag
11424 byte. */
11425 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11426 switch (die->die_tag)
11427 {
11428 case DW_TAG_typedef:
11429 case DW_TAG_base_type:
11430 case DW_TAG_subrange_type:
11431 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11432 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11433 break;
11434 case DW_TAG_enumerator:
11435 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11436 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11437 if (!is_cxx ())
11438 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11439 break;
11440 case DW_TAG_subprogram:
11441 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11442 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11443 if (!is_ada ())
11444 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11445 break;
11446 case DW_TAG_constant:
11447 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11448 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11449 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11450 break;
11451 case DW_TAG_variable:
11452 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11453 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11454 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11455 break;
11456 case DW_TAG_namespace:
11457 case DW_TAG_imported_declaration:
11458 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11459 break;
11460 case DW_TAG_class_type:
11461 case DW_TAG_interface_type:
11462 case DW_TAG_structure_type:
11463 case DW_TAG_union_type:
11464 case DW_TAG_enumeration_type:
11465 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11466 if (!is_cxx ())
11467 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11468 break;
11469 default:
11470 /* An unusual tag. Leave the flag-byte empty. */
11471 break;
11472 }
11473 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11474 "GDB-index flags");
11475 }
11476
11477 dw2_asm_output_nstring (entry->name, -1, "external name");
11478 }
11479
11480
11481 /* Output the public names table used to speed up access to externally
11482 visible names; or the public types table used to find type definitions. */
11483
11484 static void
output_pubnames(vec<pubname_entry,va_gc> * names)11485 output_pubnames (vec<pubname_entry, va_gc> *names)
11486 {
11487 unsigned i;
11488 unsigned long pubnames_length = size_of_pubnames (names);
11489 pubname_entry *pub;
11490
11491 if (!XCOFF_DEBUGGING_INFO)
11492 {
11493 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11494 dw2_asm_output_data (4, 0xffffffff,
11495 "Initial length escape value indicating 64-bit DWARF extension");
11496 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11497 "Pub Info Length");
11498 }
11499
11500 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11501 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11502
11503 if (dwarf_split_debug_info)
11504 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11505 debug_skeleton_info_section,
11506 "Offset of Compilation Unit Info");
11507 else
11508 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11509 debug_info_section,
11510 "Offset of Compilation Unit Info");
11511 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11512 "Compilation Unit Length");
11513
11514 FOR_EACH_VEC_ELT (*names, i, pub)
11515 {
11516 if (include_pubname_in_output (names, pub))
11517 {
11518 dw_offset die_offset = pub->die->die_offset;
11519
11520 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11521 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11522 gcc_assert (pub->die->die_mark);
11523
11524 /* If we're putting types in their own .debug_types sections,
11525 the .debug_pubtypes table will still point to the compile
11526 unit (not the type unit), so we want to use the offset of
11527 the skeleton DIE (if there is one). */
11528 if (pub->die->comdat_type_p && names == pubtype_table)
11529 {
11530 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11531
11532 if (type_node != NULL)
11533 die_offset = (type_node->skeleton_die != NULL
11534 ? type_node->skeleton_die->die_offset
11535 : comp_unit_die ()->die_offset);
11536 }
11537
11538 output_pubname (die_offset, pub);
11539 }
11540 }
11541
11542 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11543 }
11544
11545 /* Output public names and types tables if necessary. */
11546
11547 static void
output_pubtables(void)11548 output_pubtables (void)
11549 {
11550 if (!want_pubnames () || !info_section_emitted)
11551 return;
11552
11553 switch_to_section (debug_pubnames_section);
11554 output_pubnames (pubname_table);
11555 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11556 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11557 simply won't look for the section. */
11558 switch_to_section (debug_pubtypes_section);
11559 output_pubnames (pubtype_table);
11560 }
11561
11562
11563 /* Output the information that goes into the .debug_aranges table.
11564 Namely, define the beginning and ending address range of the
11565 text section generated for this compilation unit. */
11566
11567 static void
output_aranges(void)11568 output_aranges (void)
11569 {
11570 unsigned i;
11571 unsigned long aranges_length = size_of_aranges ();
11572
11573 if (!XCOFF_DEBUGGING_INFO)
11574 {
11575 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11576 dw2_asm_output_data (4, 0xffffffff,
11577 "Initial length escape value indicating 64-bit DWARF extension");
11578 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11579 "Length of Address Ranges Info");
11580 }
11581
11582 /* Version number for aranges is still 2, even up to DWARF5. */
11583 dw2_asm_output_data (2, 2, "DWARF aranges version");
11584 if (dwarf_split_debug_info)
11585 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11586 debug_skeleton_info_section,
11587 "Offset of Compilation Unit Info");
11588 else
11589 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11590 debug_info_section,
11591 "Offset of Compilation Unit Info");
11592 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11593 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11594
11595 /* We need to align to twice the pointer size here. */
11596 if (DWARF_ARANGES_PAD_SIZE)
11597 {
11598 /* Pad using a 2 byte words so that padding is correct for any
11599 pointer size. */
11600 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11601 2 * DWARF2_ADDR_SIZE);
11602 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11603 dw2_asm_output_data (2, 0, NULL);
11604 }
11605
11606 /* It is necessary not to output these entries if the sections were
11607 not used; if the sections were not used, the length will be 0 and
11608 the address may end up as 0 if the section is discarded by ld
11609 --gc-sections, leaving an invalid (0, 0) entry that can be
11610 confused with the terminator. */
11611 if (text_section_used)
11612 {
11613 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11614 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11615 text_section_label, "Length");
11616 }
11617 if (cold_text_section_used)
11618 {
11619 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11620 "Address");
11621 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11622 cold_text_section_label, "Length");
11623 }
11624
11625 if (have_multiple_function_sections)
11626 {
11627 unsigned fde_idx;
11628 dw_fde_ref fde;
11629
11630 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11631 {
11632 if (DECL_IGNORED_P (fde->decl))
11633 continue;
11634 if (!fde->in_std_section)
11635 {
11636 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11637 "Address");
11638 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11639 fde->dw_fde_begin, "Length");
11640 }
11641 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11642 {
11643 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11644 "Address");
11645 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11646 fde->dw_fde_second_begin, "Length");
11647 }
11648 }
11649 }
11650
11651 /* Output the terminator words. */
11652 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11653 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11654 }
11655
11656 /* Add a new entry to .debug_ranges. Return its index into
11657 ranges_table vector. */
11658
11659 static unsigned int
add_ranges_num(int num,bool maybe_new_sec)11660 add_ranges_num (int num, bool maybe_new_sec)
11661 {
11662 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11663 vec_safe_push (ranges_table, r);
11664 return vec_safe_length (ranges_table) - 1;
11665 }
11666
11667 /* Add a new entry to .debug_ranges corresponding to a block, or a
11668 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11669 this entry might be in a different section from previous range. */
11670
11671 static unsigned int
add_ranges(const_tree block,bool maybe_new_sec)11672 add_ranges (const_tree block, bool maybe_new_sec)
11673 {
11674 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11675 }
11676
11677 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11678 chain, or middle entry of a chain that will be directly referred to. */
11679
11680 static void
note_rnglist_head(unsigned int offset)11681 note_rnglist_head (unsigned int offset)
11682 {
11683 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11684 return;
11685 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11686 }
11687
11688 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11689 When using dwarf_split_debug_info, address attributes in dies destined
11690 for the final executable should be direct references--setting the
11691 parameter force_direct ensures this behavior. */
11692
11693 static void
add_ranges_by_labels(dw_die_ref die,const char * begin,const char * end,bool * added,bool force_direct)11694 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11695 bool *added, bool force_direct)
11696 {
11697 unsigned int in_use = vec_safe_length (ranges_by_label);
11698 unsigned int offset;
11699 dw_ranges_by_label rbl = { begin, end };
11700 vec_safe_push (ranges_by_label, rbl);
11701 offset = add_ranges_num (-(int)in_use - 1, true);
11702 if (!*added)
11703 {
11704 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11705 *added = true;
11706 note_rnglist_head (offset);
11707 }
11708 }
11709
11710 /* Emit .debug_ranges section. */
11711
11712 static void
output_ranges(void)11713 output_ranges (void)
11714 {
11715 unsigned i;
11716 static const char *const start_fmt = "Offset %#x";
11717 const char *fmt = start_fmt;
11718 dw_ranges *r;
11719
11720 switch_to_section (debug_ranges_section);
11721 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11722 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11723 {
11724 int block_num = r->num;
11725
11726 if (block_num > 0)
11727 {
11728 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11729 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11730
11731 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11732 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11733
11734 /* If all code is in the text section, then the compilation
11735 unit base address defaults to DW_AT_low_pc, which is the
11736 base of the text section. */
11737 if (!have_multiple_function_sections)
11738 {
11739 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11740 text_section_label,
11741 fmt, i * 2 * DWARF2_ADDR_SIZE);
11742 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11743 text_section_label, NULL);
11744 }
11745
11746 /* Otherwise, the compilation unit base address is zero,
11747 which allows us to use absolute addresses, and not worry
11748 about whether the target supports cross-section
11749 arithmetic. */
11750 else
11751 {
11752 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11753 fmt, i * 2 * DWARF2_ADDR_SIZE);
11754 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11755 }
11756
11757 fmt = NULL;
11758 }
11759
11760 /* Negative block_num stands for an index into ranges_by_label. */
11761 else if (block_num < 0)
11762 {
11763 int lab_idx = - block_num - 1;
11764
11765 if (!have_multiple_function_sections)
11766 {
11767 gcc_unreachable ();
11768 #if 0
11769 /* If we ever use add_ranges_by_labels () for a single
11770 function section, all we have to do is to take out
11771 the #if 0 above. */
11772 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11773 (*ranges_by_label)[lab_idx].begin,
11774 text_section_label,
11775 fmt, i * 2 * DWARF2_ADDR_SIZE);
11776 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11777 (*ranges_by_label)[lab_idx].end,
11778 text_section_label, NULL);
11779 #endif
11780 }
11781 else
11782 {
11783 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11784 (*ranges_by_label)[lab_idx].begin,
11785 fmt, i * 2 * DWARF2_ADDR_SIZE);
11786 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11787 (*ranges_by_label)[lab_idx].end,
11788 NULL);
11789 }
11790 }
11791 else
11792 {
11793 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11794 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11795 fmt = start_fmt;
11796 }
11797 }
11798 }
11799
11800 /* Non-zero if .debug_line_str should be used for .debug_line section
11801 strings or strings that are likely shareable with those. */
11802 #define DWARF5_USE_DEBUG_LINE_STR \
11803 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11804 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11805 /* FIXME: there is no .debug_line_str.dwo section, \
11806 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11807 && !dwarf_split_debug_info)
11808
11809 /* Assign .debug_rnglists indexes. */
11810
11811 static void
index_rnglists(void)11812 index_rnglists (void)
11813 {
11814 unsigned i;
11815 dw_ranges *r;
11816
11817 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11818 if (r->label)
11819 r->idx = rnglist_idx++;
11820 }
11821
11822 /* Emit .debug_rnglists section. */
11823
11824 static void
output_rnglists(unsigned generation)11825 output_rnglists (unsigned generation)
11826 {
11827 unsigned i;
11828 dw_ranges *r;
11829 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11830 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11831 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11832
11833 switch_to_section (debug_ranges_section);
11834 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11835 /* There are up to 4 unique ranges labels per generation.
11836 See also init_sections_and_labels. */
11837 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11838 2 + generation * 4);
11839 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11840 3 + generation * 4);
11841 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11842 dw2_asm_output_data (4, 0xffffffff,
11843 "Initial length escape value indicating "
11844 "64-bit DWARF extension");
11845 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11846 "Length of Range Lists");
11847 ASM_OUTPUT_LABEL (asm_out_file, l1);
11848 output_dwarf_version ();
11849 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11850 dw2_asm_output_data (1, 0, "Segment Size");
11851 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11852 about relocation sizes and primarily care about the size of .debug*
11853 sections in linked shared libraries and executables, then
11854 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11855 into it are usually larger than just DW_FORM_sec_offset offsets
11856 into the .debug_rnglists section. */
11857 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11858 "Offset Entry Count");
11859 if (dwarf_split_debug_info)
11860 {
11861 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11862 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11863 if (r->label)
11864 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11865 ranges_base_label, NULL);
11866 }
11867
11868 const char *lab = "";
11869 unsigned int len = vec_safe_length (ranges_table);
11870 const char *base = NULL;
11871 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11872 {
11873 int block_num = r->num;
11874
11875 if (r->label)
11876 {
11877 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11878 lab = r->label;
11879 }
11880 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11881 base = NULL;
11882 if (block_num > 0)
11883 {
11884 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11885 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11886
11887 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11888 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11889
11890 if (HAVE_AS_LEB128)
11891 {
11892 /* If all code is in the text section, then the compilation
11893 unit base address defaults to DW_AT_low_pc, which is the
11894 base of the text section. */
11895 if (!have_multiple_function_sections)
11896 {
11897 dw2_asm_output_data (1, DW_RLE_offset_pair,
11898 "DW_RLE_offset_pair (%s)", lab);
11899 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11900 "Range begin address (%s)", lab);
11901 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11902 "Range end address (%s)", lab);
11903 continue;
11904 }
11905 if (base == NULL)
11906 {
11907 dw_ranges *r2 = NULL;
11908 if (i < len - 1)
11909 r2 = &(*ranges_table)[i + 1];
11910 if (r2
11911 && r2->num != 0
11912 && r2->label == NULL
11913 && !r2->maybe_new_sec)
11914 {
11915 dw2_asm_output_data (1, DW_RLE_base_address,
11916 "DW_RLE_base_address (%s)", lab);
11917 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11918 "Base address (%s)", lab);
11919 strcpy (basebuf, blabel);
11920 base = basebuf;
11921 }
11922 }
11923 if (base)
11924 {
11925 dw2_asm_output_data (1, DW_RLE_offset_pair,
11926 "DW_RLE_offset_pair (%s)", lab);
11927 dw2_asm_output_delta_uleb128 (blabel, base,
11928 "Range begin address (%s)", lab);
11929 dw2_asm_output_delta_uleb128 (elabel, base,
11930 "Range end address (%s)", lab);
11931 continue;
11932 }
11933 dw2_asm_output_data (1, DW_RLE_start_length,
11934 "DW_RLE_start_length (%s)", lab);
11935 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11936 "Range begin address (%s)", lab);
11937 dw2_asm_output_delta_uleb128 (elabel, blabel,
11938 "Range length (%s)", lab);
11939 }
11940 else
11941 {
11942 dw2_asm_output_data (1, DW_RLE_start_end,
11943 "DW_RLE_start_end (%s)", lab);
11944 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11945 "Range begin address (%s)", lab);
11946 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11947 "Range end address (%s)", lab);
11948 }
11949 }
11950
11951 /* Negative block_num stands for an index into ranges_by_label. */
11952 else if (block_num < 0)
11953 {
11954 int lab_idx = - block_num - 1;
11955 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11956 const char *elabel = (*ranges_by_label)[lab_idx].end;
11957
11958 if (!have_multiple_function_sections)
11959 gcc_unreachable ();
11960 if (HAVE_AS_LEB128)
11961 {
11962 dw2_asm_output_data (1, DW_RLE_start_length,
11963 "DW_RLE_start_length (%s)", lab);
11964 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11965 "Range begin address (%s)", lab);
11966 dw2_asm_output_delta_uleb128 (elabel, blabel,
11967 "Range length (%s)", lab);
11968 }
11969 else
11970 {
11971 dw2_asm_output_data (1, DW_RLE_start_end,
11972 "DW_RLE_start_end (%s)", lab);
11973 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11974 "Range begin address (%s)", lab);
11975 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11976 "Range end address (%s)", lab);
11977 }
11978 }
11979 else
11980 dw2_asm_output_data (1, DW_RLE_end_of_list,
11981 "DW_RLE_end_of_list (%s)", lab);
11982 }
11983 ASM_OUTPUT_LABEL (asm_out_file, l2);
11984 }
11985
11986 /* Data structure containing information about input files. */
11987 struct file_info
11988 {
11989 const char *path; /* Complete file name. */
11990 const char *fname; /* File name part. */
11991 int length; /* Length of entire string. */
11992 struct dwarf_file_data * file_idx; /* Index in input file table. */
11993 int dir_idx; /* Index in directory table. */
11994 };
11995
11996 /* Data structure containing information about directories with source
11997 files. */
11998 struct dir_info
11999 {
12000 const char *path; /* Path including directory name. */
12001 int length; /* Path length. */
12002 int prefix; /* Index of directory entry which is a prefix. */
12003 int count; /* Number of files in this directory. */
12004 int dir_idx; /* Index of directory used as base. */
12005 };
12006
12007 /* Callback function for file_info comparison. We sort by looking at
12008 the directories in the path. */
12009
12010 static int
file_info_cmp(const void * p1,const void * p2)12011 file_info_cmp (const void *p1, const void *p2)
12012 {
12013 const struct file_info *const s1 = (const struct file_info *) p1;
12014 const struct file_info *const s2 = (const struct file_info *) p2;
12015 const unsigned char *cp1;
12016 const unsigned char *cp2;
12017
12018 /* Take care of file names without directories. We need to make sure that
12019 we return consistent values to qsort since some will get confused if
12020 we return the same value when identical operands are passed in opposite
12021 orders. So if neither has a directory, return 0 and otherwise return
12022 1 or -1 depending on which one has the directory. */
12023 if ((s1->path == s1->fname || s2->path == s2->fname))
12024 return (s2->path == s2->fname) - (s1->path == s1->fname);
12025
12026 cp1 = (const unsigned char *) s1->path;
12027 cp2 = (const unsigned char *) s2->path;
12028
12029 while (1)
12030 {
12031 ++cp1;
12032 ++cp2;
12033 /* Reached the end of the first path? If so, handle like above. */
12034 if ((cp1 == (const unsigned char *) s1->fname)
12035 || (cp2 == (const unsigned char *) s2->fname))
12036 return ((cp2 == (const unsigned char *) s2->fname)
12037 - (cp1 == (const unsigned char *) s1->fname));
12038
12039 /* Character of current path component the same? */
12040 else if (*cp1 != *cp2)
12041 return *cp1 - *cp2;
12042 }
12043 }
12044
12045 struct file_name_acquire_data
12046 {
12047 struct file_info *files;
12048 int used_files;
12049 int max_files;
12050 };
12051
12052 /* Traversal function for the hash table. */
12053
12054 int
file_name_acquire(dwarf_file_data ** slot,file_name_acquire_data * fnad)12055 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12056 {
12057 struct dwarf_file_data *d = *slot;
12058 struct file_info *fi;
12059 const char *f;
12060
12061 gcc_assert (fnad->max_files >= d->emitted_number);
12062
12063 if (! d->emitted_number)
12064 return 1;
12065
12066 gcc_assert (fnad->max_files != fnad->used_files);
12067
12068 fi = fnad->files + fnad->used_files++;
12069
12070 /* Skip all leading "./". */
12071 f = d->filename;
12072 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12073 f += 2;
12074
12075 /* Create a new array entry. */
12076 fi->path = f;
12077 fi->length = strlen (f);
12078 fi->file_idx = d;
12079
12080 /* Search for the file name part. */
12081 f = strrchr (f, DIR_SEPARATOR);
12082 #if defined (DIR_SEPARATOR_2)
12083 {
12084 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12085
12086 if (g != NULL)
12087 {
12088 if (f == NULL || f < g)
12089 f = g;
12090 }
12091 }
12092 #endif
12093
12094 fi->fname = f == NULL ? fi->path : f + 1;
12095 return 1;
12096 }
12097
12098 /* Helper function for output_file_names. Emit a FORM encoded
12099 string STR, with assembly comment start ENTRY_KIND and
12100 index IDX */
12101
12102 static void
output_line_string(enum dwarf_form form,const char * str,const char * entry_kind,unsigned int idx)12103 output_line_string (enum dwarf_form form, const char *str,
12104 const char *entry_kind, unsigned int idx)
12105 {
12106 switch (form)
12107 {
12108 case DW_FORM_string:
12109 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12110 break;
12111 case DW_FORM_line_strp:
12112 if (!debug_line_str_hash)
12113 debug_line_str_hash
12114 = hash_table<indirect_string_hasher>::create_ggc (10);
12115
12116 struct indirect_string_node *node;
12117 node = find_AT_string_in_table (str, debug_line_str_hash);
12118 set_indirect_string (node);
12119 node->form = form;
12120 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12121 debug_line_str_section, "%s: %#x: \"%s\"",
12122 entry_kind, 0, node->str);
12123 break;
12124 default:
12125 gcc_unreachable ();
12126 }
12127 }
12128
12129 /* Output the directory table and the file name table. We try to minimize
12130 the total amount of memory needed. A heuristic is used to avoid large
12131 slowdowns with many input files. */
12132
12133 static void
output_file_names(void)12134 output_file_names (void)
12135 {
12136 struct file_name_acquire_data fnad;
12137 int numfiles;
12138 struct file_info *files;
12139 struct dir_info *dirs;
12140 int *saved;
12141 int *savehere;
12142 int *backmap;
12143 int ndirs;
12144 int idx_offset;
12145 int i;
12146
12147 if (!last_emitted_file)
12148 {
12149 if (dwarf_version >= 5)
12150 {
12151 dw2_asm_output_data (1, 0, "Directory entry format count");
12152 dw2_asm_output_data_uleb128 (0, "Directories count");
12153 dw2_asm_output_data (1, 0, "File name entry format count");
12154 dw2_asm_output_data_uleb128 (0, "File names count");
12155 }
12156 else
12157 {
12158 dw2_asm_output_data (1, 0, "End directory table");
12159 dw2_asm_output_data (1, 0, "End file name table");
12160 }
12161 return;
12162 }
12163
12164 numfiles = last_emitted_file->emitted_number;
12165
12166 /* Allocate the various arrays we need. */
12167 files = XALLOCAVEC (struct file_info, numfiles);
12168 dirs = XALLOCAVEC (struct dir_info, numfiles);
12169
12170 fnad.files = files;
12171 fnad.used_files = 0;
12172 fnad.max_files = numfiles;
12173 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12174 gcc_assert (fnad.used_files == fnad.max_files);
12175
12176 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12177
12178 /* Find all the different directories used. */
12179 dirs[0].path = files[0].path;
12180 dirs[0].length = files[0].fname - files[0].path;
12181 dirs[0].prefix = -1;
12182 dirs[0].count = 1;
12183 dirs[0].dir_idx = 0;
12184 files[0].dir_idx = 0;
12185 ndirs = 1;
12186
12187 for (i = 1; i < numfiles; i++)
12188 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12189 && memcmp (dirs[ndirs - 1].path, files[i].path,
12190 dirs[ndirs - 1].length) == 0)
12191 {
12192 /* Same directory as last entry. */
12193 files[i].dir_idx = ndirs - 1;
12194 ++dirs[ndirs - 1].count;
12195 }
12196 else
12197 {
12198 int j;
12199
12200 /* This is a new directory. */
12201 dirs[ndirs].path = files[i].path;
12202 dirs[ndirs].length = files[i].fname - files[i].path;
12203 dirs[ndirs].count = 1;
12204 dirs[ndirs].dir_idx = ndirs;
12205 files[i].dir_idx = ndirs;
12206
12207 /* Search for a prefix. */
12208 dirs[ndirs].prefix = -1;
12209 for (j = 0; j < ndirs; j++)
12210 if (dirs[j].length < dirs[ndirs].length
12211 && dirs[j].length > 1
12212 && (dirs[ndirs].prefix == -1
12213 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12214 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12215 dirs[ndirs].prefix = j;
12216
12217 ++ndirs;
12218 }
12219
12220 /* Now to the actual work. We have to find a subset of the directories which
12221 allow expressing the file name using references to the directory table
12222 with the least amount of characters. We do not do an exhaustive search
12223 where we would have to check out every combination of every single
12224 possible prefix. Instead we use a heuristic which provides nearly optimal
12225 results in most cases and never is much off. */
12226 saved = XALLOCAVEC (int, ndirs);
12227 savehere = XALLOCAVEC (int, ndirs);
12228
12229 memset (saved, '\0', ndirs * sizeof (saved[0]));
12230 for (i = 0; i < ndirs; i++)
12231 {
12232 int j;
12233 int total;
12234
12235 /* We can always save some space for the current directory. But this
12236 does not mean it will be enough to justify adding the directory. */
12237 savehere[i] = dirs[i].length;
12238 total = (savehere[i] - saved[i]) * dirs[i].count;
12239
12240 for (j = i + 1; j < ndirs; j++)
12241 {
12242 savehere[j] = 0;
12243 if (saved[j] < dirs[i].length)
12244 {
12245 /* Determine whether the dirs[i] path is a prefix of the
12246 dirs[j] path. */
12247 int k;
12248
12249 k = dirs[j].prefix;
12250 while (k != -1 && k != (int) i)
12251 k = dirs[k].prefix;
12252
12253 if (k == (int) i)
12254 {
12255 /* Yes it is. We can possibly save some memory by
12256 writing the filenames in dirs[j] relative to
12257 dirs[i]. */
12258 savehere[j] = dirs[i].length;
12259 total += (savehere[j] - saved[j]) * dirs[j].count;
12260 }
12261 }
12262 }
12263
12264 /* Check whether we can save enough to justify adding the dirs[i]
12265 directory. */
12266 if (total > dirs[i].length + 1)
12267 {
12268 /* It's worthwhile adding. */
12269 for (j = i; j < ndirs; j++)
12270 if (savehere[j] > 0)
12271 {
12272 /* Remember how much we saved for this directory so far. */
12273 saved[j] = savehere[j];
12274
12275 /* Remember the prefix directory. */
12276 dirs[j].dir_idx = i;
12277 }
12278 }
12279 }
12280
12281 /* Emit the directory name table. */
12282 idx_offset = dirs[0].length > 0 ? 1 : 0;
12283 enum dwarf_form str_form = DW_FORM_string;
12284 enum dwarf_form idx_form = DW_FORM_udata;
12285 if (dwarf_version >= 5)
12286 {
12287 const char *comp_dir = comp_dir_string ();
12288 if (comp_dir == NULL)
12289 comp_dir = "";
12290 dw2_asm_output_data (1, 1, "Directory entry format count");
12291 if (DWARF5_USE_DEBUG_LINE_STR)
12292 str_form = DW_FORM_line_strp;
12293 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12294 dw2_asm_output_data_uleb128 (str_form, "%s",
12295 get_DW_FORM_name (str_form));
12296 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12297 if (str_form == DW_FORM_string)
12298 {
12299 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12300 for (i = 1 - idx_offset; i < ndirs; i++)
12301 dw2_asm_output_nstring (dirs[i].path,
12302 dirs[i].length
12303 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12304 "Directory Entry: %#x", i + idx_offset);
12305 }
12306 else
12307 {
12308 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12309 for (i = 1 - idx_offset; i < ndirs; i++)
12310 {
12311 const char *str
12312 = ggc_alloc_string (dirs[i].path,
12313 dirs[i].length
12314 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12315 output_line_string (str_form, str, "Directory Entry",
12316 (unsigned) i + idx_offset);
12317 }
12318 }
12319 }
12320 else
12321 {
12322 for (i = 1 - idx_offset; i < ndirs; i++)
12323 dw2_asm_output_nstring (dirs[i].path,
12324 dirs[i].length
12325 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12326 "Directory Entry: %#x", i + idx_offset);
12327
12328 dw2_asm_output_data (1, 0, "End directory table");
12329 }
12330
12331 /* We have to emit them in the order of emitted_number since that's
12332 used in the debug info generation. To do this efficiently we
12333 generate a back-mapping of the indices first. */
12334 backmap = XALLOCAVEC (int, numfiles);
12335 for (i = 0; i < numfiles; i++)
12336 backmap[files[i].file_idx->emitted_number - 1] = i;
12337
12338 if (dwarf_version >= 5)
12339 {
12340 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12341 if (filename0 == NULL)
12342 filename0 = "";
12343 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12344 DW_FORM_data2. Choose one based on the number of directories
12345 and how much space would they occupy in each encoding.
12346 If we have at most 256 directories, all indexes fit into
12347 a single byte, so DW_FORM_data1 is most compact (if there
12348 are at most 128 directories, DW_FORM_udata would be as
12349 compact as that, but not shorter and slower to decode). */
12350 if (ndirs + idx_offset <= 256)
12351 idx_form = DW_FORM_data1;
12352 /* If there are more than 65536 directories, we have to use
12353 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12354 Otherwise, compute what space would occupy if all the indexes
12355 used DW_FORM_udata - sum - and compare that to how large would
12356 be DW_FORM_data2 encoding, and pick the more efficient one. */
12357 else if (ndirs + idx_offset <= 65536)
12358 {
12359 unsigned HOST_WIDE_INT sum = 1;
12360 for (i = 0; i < numfiles; i++)
12361 {
12362 int file_idx = backmap[i];
12363 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12364 sum += size_of_uleb128 (dir_idx);
12365 }
12366 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12367 idx_form = DW_FORM_data2;
12368 }
12369 #ifdef VMS_DEBUGGING_INFO
12370 dw2_asm_output_data (1, 4, "File name entry format count");
12371 #else
12372 dw2_asm_output_data (1, 2, "File name entry format count");
12373 #endif
12374 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12375 dw2_asm_output_data_uleb128 (str_form, "%s",
12376 get_DW_FORM_name (str_form));
12377 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12378 "DW_LNCT_directory_index");
12379 dw2_asm_output_data_uleb128 (idx_form, "%s",
12380 get_DW_FORM_name (idx_form));
12381 #ifdef VMS_DEBUGGING_INFO
12382 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12383 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12384 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12385 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12386 #endif
12387 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12388
12389 output_line_string (str_form, filename0, "File Entry", 0);
12390
12391 /* Include directory index. */
12392 if (idx_form != DW_FORM_udata)
12393 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12394 0, NULL);
12395 else
12396 dw2_asm_output_data_uleb128 (0, NULL);
12397
12398 #ifdef VMS_DEBUGGING_INFO
12399 dw2_asm_output_data_uleb128 (0, NULL);
12400 dw2_asm_output_data_uleb128 (0, NULL);
12401 #endif
12402 }
12403
12404 /* Now write all the file names. */
12405 for (i = 0; i < numfiles; i++)
12406 {
12407 int file_idx = backmap[i];
12408 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12409
12410 #ifdef VMS_DEBUGGING_INFO
12411 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12412
12413 /* Setting these fields can lead to debugger miscomparisons,
12414 but VMS Debug requires them to be set correctly. */
12415
12416 int ver;
12417 long long cdt;
12418 long siz;
12419 int maxfilelen = (strlen (files[file_idx].path)
12420 + dirs[dir_idx].length
12421 + MAX_VMS_VERSION_LEN + 1);
12422 char *filebuf = XALLOCAVEC (char, maxfilelen);
12423
12424 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12425 snprintf (filebuf, maxfilelen, "%s;%d",
12426 files[file_idx].path + dirs[dir_idx].length, ver);
12427
12428 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12429
12430 /* Include directory index. */
12431 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12432 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12433 dir_idx + idx_offset, NULL);
12434 else
12435 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12436
12437 /* Modification time. */
12438 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12439 &cdt, 0, 0, 0) == 0)
12440 ? cdt : 0, NULL);
12441
12442 /* File length in bytes. */
12443 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12444 0, &siz, 0, 0) == 0)
12445 ? siz : 0, NULL);
12446 #else
12447 output_line_string (str_form,
12448 files[file_idx].path + dirs[dir_idx].length,
12449 "File Entry", (unsigned) i + 1);
12450
12451 /* Include directory index. */
12452 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12453 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12454 dir_idx + idx_offset, NULL);
12455 else
12456 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12457
12458 if (dwarf_version >= 5)
12459 continue;
12460
12461 /* Modification time. */
12462 dw2_asm_output_data_uleb128 (0, NULL);
12463
12464 /* File length in bytes. */
12465 dw2_asm_output_data_uleb128 (0, NULL);
12466 #endif /* VMS_DEBUGGING_INFO */
12467 }
12468
12469 if (dwarf_version < 5)
12470 dw2_asm_output_data (1, 0, "End file name table");
12471 }
12472
12473
12474 /* Output one line number table into the .debug_line section. */
12475
12476 static void
output_one_line_info_table(dw_line_info_table * table)12477 output_one_line_info_table (dw_line_info_table *table)
12478 {
12479 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12480 unsigned int current_line = 1;
12481 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12482 dw_line_info_entry *ent, *prev_addr;
12483 size_t i;
12484 unsigned int view;
12485
12486 view = 0;
12487
12488 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12489 {
12490 switch (ent->opcode)
12491 {
12492 case LI_set_address:
12493 /* ??? Unfortunately, we have little choice here currently, and
12494 must always use the most general form. GCC does not know the
12495 address delta itself, so we can't use DW_LNS_advance_pc. Many
12496 ports do have length attributes which will give an upper bound
12497 on the address range. We could perhaps use length attributes
12498 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12499 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12500
12501 view = 0;
12502
12503 /* This can handle any delta. This takes
12504 4+DWARF2_ADDR_SIZE bytes. */
12505 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12506 debug_variable_location_views
12507 ? ", reset view to 0" : "");
12508 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12509 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12510 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12511
12512 prev_addr = ent;
12513 break;
12514
12515 case LI_adv_address:
12516 {
12517 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12518 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12519 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12520
12521 view++;
12522
12523 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12524 dw2_asm_output_delta (2, line_label, prev_label,
12525 "from %s to %s", prev_label, line_label);
12526
12527 prev_addr = ent;
12528 break;
12529 }
12530
12531 case LI_set_line:
12532 if (ent->val == current_line)
12533 {
12534 /* We still need to start a new row, so output a copy insn. */
12535 dw2_asm_output_data (1, DW_LNS_copy,
12536 "copy line %u", current_line);
12537 }
12538 else
12539 {
12540 int line_offset = ent->val - current_line;
12541 int line_delta = line_offset - DWARF_LINE_BASE;
12542
12543 current_line = ent->val;
12544 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12545 {
12546 /* This can handle deltas from -10 to 234, using the current
12547 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12548 This takes 1 byte. */
12549 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12550 "line %u", current_line);
12551 }
12552 else
12553 {
12554 /* This can handle any delta. This takes at least 4 bytes,
12555 depending on the value being encoded. */
12556 dw2_asm_output_data (1, DW_LNS_advance_line,
12557 "advance to line %u", current_line);
12558 dw2_asm_output_data_sleb128 (line_offset, NULL);
12559 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12560 }
12561 }
12562 break;
12563
12564 case LI_set_file:
12565 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12566 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12567 break;
12568
12569 case LI_set_column:
12570 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12571 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12572 break;
12573
12574 case LI_negate_stmt:
12575 current_is_stmt = !current_is_stmt;
12576 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12577 "is_stmt %d", current_is_stmt);
12578 break;
12579
12580 case LI_set_prologue_end:
12581 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12582 "set prologue end");
12583 break;
12584
12585 case LI_set_epilogue_begin:
12586 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12587 "set epilogue begin");
12588 break;
12589
12590 case LI_set_discriminator:
12591 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12592 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12593 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12594 dw2_asm_output_data_uleb128 (ent->val, NULL);
12595 break;
12596 }
12597 }
12598
12599 /* Emit debug info for the address of the end of the table. */
12600 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12601 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12602 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12603 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12604
12605 dw2_asm_output_data (1, 0, "end sequence");
12606 dw2_asm_output_data_uleb128 (1, NULL);
12607 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12608 }
12609
12610 /* Output the source line number correspondence information. This
12611 information goes into the .debug_line section. */
12612
12613 static void
output_line_info(bool prologue_only)12614 output_line_info (bool prologue_only)
12615 {
12616 static unsigned int generation;
12617 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12618 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12619 bool saw_one = false;
12620 int opc;
12621
12622 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12623 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12624 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12625 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12626
12627 if (!XCOFF_DEBUGGING_INFO)
12628 {
12629 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12630 dw2_asm_output_data (4, 0xffffffff,
12631 "Initial length escape value indicating 64-bit DWARF extension");
12632 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12633 "Length of Source Line Info");
12634 }
12635
12636 ASM_OUTPUT_LABEL (asm_out_file, l1);
12637
12638 output_dwarf_version ();
12639 if (dwarf_version >= 5)
12640 {
12641 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12642 dw2_asm_output_data (1, 0, "Segment Size");
12643 }
12644 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12645 ASM_OUTPUT_LABEL (asm_out_file, p1);
12646
12647 /* Define the architecture-dependent minimum instruction length (in bytes).
12648 In this implementation of DWARF, this field is used for information
12649 purposes only. Since GCC generates assembly language, we have no
12650 a priori knowledge of how many instruction bytes are generated for each
12651 source line, and therefore can use only the DW_LNE_set_address and
12652 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12653 this as '1', which is "correct enough" for all architectures,
12654 and don't let the target override. */
12655 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12656
12657 if (dwarf_version >= 4)
12658 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12659 "Maximum Operations Per Instruction");
12660 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12661 "Default is_stmt_start flag");
12662 dw2_asm_output_data (1, DWARF_LINE_BASE,
12663 "Line Base Value (Special Opcodes)");
12664 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12665 "Line Range Value (Special Opcodes)");
12666 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12667 "Special Opcode Base");
12668
12669 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12670 {
12671 int n_op_args;
12672 switch (opc)
12673 {
12674 case DW_LNS_advance_pc:
12675 case DW_LNS_advance_line:
12676 case DW_LNS_set_file:
12677 case DW_LNS_set_column:
12678 case DW_LNS_fixed_advance_pc:
12679 case DW_LNS_set_isa:
12680 n_op_args = 1;
12681 break;
12682 default:
12683 n_op_args = 0;
12684 break;
12685 }
12686
12687 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12688 opc, n_op_args);
12689 }
12690
12691 /* Write out the information about the files we use. */
12692 output_file_names ();
12693 ASM_OUTPUT_LABEL (asm_out_file, p2);
12694 if (prologue_only)
12695 {
12696 /* Output the marker for the end of the line number info. */
12697 ASM_OUTPUT_LABEL (asm_out_file, l2);
12698 return;
12699 }
12700
12701 if (separate_line_info)
12702 {
12703 dw_line_info_table *table;
12704 size_t i;
12705
12706 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12707 if (table->in_use)
12708 {
12709 output_one_line_info_table (table);
12710 saw_one = true;
12711 }
12712 }
12713 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12714 {
12715 output_one_line_info_table (cold_text_section_line_info);
12716 saw_one = true;
12717 }
12718
12719 /* ??? Some Darwin linkers crash on a .debug_line section with no
12720 sequences. Further, merely a DW_LNE_end_sequence entry is not
12721 sufficient -- the address column must also be initialized.
12722 Make sure to output at least one set_address/end_sequence pair,
12723 choosing .text since that section is always present. */
12724 if (text_section_line_info->in_use || !saw_one)
12725 output_one_line_info_table (text_section_line_info);
12726
12727 /* Output the marker for the end of the line number info. */
12728 ASM_OUTPUT_LABEL (asm_out_file, l2);
12729 }
12730
12731 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12732
12733 static inline bool
need_endianity_attribute_p(bool reverse)12734 need_endianity_attribute_p (bool reverse)
12735 {
12736 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12737 }
12738
12739 /* Given a pointer to a tree node for some base type, return a pointer to
12740 a DIE that describes the given type. REVERSE is true if the type is
12741 to be interpreted in the reverse storage order wrt the target order.
12742
12743 This routine must only be called for GCC type nodes that correspond to
12744 Dwarf base (fundamental) types. */
12745
12746 static dw_die_ref
base_type_die(tree type,bool reverse)12747 base_type_die (tree type, bool reverse)
12748 {
12749 dw_die_ref base_type_result;
12750 enum dwarf_type encoding;
12751 bool fpt_used = false;
12752 struct fixed_point_type_info fpt_info;
12753 tree type_bias = NULL_TREE;
12754
12755 /* If this is a subtype that should not be emitted as a subrange type,
12756 use the base type. See subrange_type_for_debug_p. */
12757 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12758 type = TREE_TYPE (type);
12759
12760 switch (TREE_CODE (type))
12761 {
12762 case INTEGER_TYPE:
12763 if ((dwarf_version >= 4 || !dwarf_strict)
12764 && TYPE_NAME (type)
12765 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12766 && DECL_IS_BUILTIN (TYPE_NAME (type))
12767 && DECL_NAME (TYPE_NAME (type)))
12768 {
12769 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12770 if (strcmp (name, "char16_t") == 0
12771 || strcmp (name, "char32_t") == 0)
12772 {
12773 encoding = DW_ATE_UTF;
12774 break;
12775 }
12776 }
12777 if ((dwarf_version >= 3 || !dwarf_strict)
12778 && lang_hooks.types.get_fixed_point_type_info)
12779 {
12780 memset (&fpt_info, 0, sizeof (fpt_info));
12781 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12782 {
12783 fpt_used = true;
12784 encoding = ((TYPE_UNSIGNED (type))
12785 ? DW_ATE_unsigned_fixed
12786 : DW_ATE_signed_fixed);
12787 break;
12788 }
12789 }
12790 if (TYPE_STRING_FLAG (type))
12791 {
12792 if (TYPE_UNSIGNED (type))
12793 encoding = DW_ATE_unsigned_char;
12794 else
12795 encoding = DW_ATE_signed_char;
12796 }
12797 else if (TYPE_UNSIGNED (type))
12798 encoding = DW_ATE_unsigned;
12799 else
12800 encoding = DW_ATE_signed;
12801
12802 if (!dwarf_strict
12803 && lang_hooks.types.get_type_bias)
12804 type_bias = lang_hooks.types.get_type_bias (type);
12805 break;
12806
12807 case REAL_TYPE:
12808 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12809 {
12810 if (dwarf_version >= 3 || !dwarf_strict)
12811 encoding = DW_ATE_decimal_float;
12812 else
12813 encoding = DW_ATE_lo_user;
12814 }
12815 else
12816 encoding = DW_ATE_float;
12817 break;
12818
12819 case FIXED_POINT_TYPE:
12820 if (!(dwarf_version >= 3 || !dwarf_strict))
12821 encoding = DW_ATE_lo_user;
12822 else if (TYPE_UNSIGNED (type))
12823 encoding = DW_ATE_unsigned_fixed;
12824 else
12825 encoding = DW_ATE_signed_fixed;
12826 break;
12827
12828 /* Dwarf2 doesn't know anything about complex ints, so use
12829 a user defined type for it. */
12830 case COMPLEX_TYPE:
12831 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12832 encoding = DW_ATE_complex_float;
12833 else
12834 encoding = DW_ATE_lo_user;
12835 break;
12836
12837 case BOOLEAN_TYPE:
12838 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12839 encoding = DW_ATE_boolean;
12840 break;
12841
12842 default:
12843 /* No other TREE_CODEs are Dwarf fundamental types. */
12844 gcc_unreachable ();
12845 }
12846
12847 base_type_result = new_die_raw (DW_TAG_base_type);
12848
12849 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12850 int_size_in_bytes (type));
12851 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12852
12853 if (need_endianity_attribute_p (reverse))
12854 add_AT_unsigned (base_type_result, DW_AT_endianity,
12855 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12856
12857 add_alignment_attribute (base_type_result, type);
12858
12859 if (fpt_used)
12860 {
12861 switch (fpt_info.scale_factor_kind)
12862 {
12863 case fixed_point_scale_factor_binary:
12864 add_AT_int (base_type_result, DW_AT_binary_scale,
12865 fpt_info.scale_factor.binary);
12866 break;
12867
12868 case fixed_point_scale_factor_decimal:
12869 add_AT_int (base_type_result, DW_AT_decimal_scale,
12870 fpt_info.scale_factor.decimal);
12871 break;
12872
12873 case fixed_point_scale_factor_arbitrary:
12874 /* Arbitrary scale factors cannot be described in standard DWARF,
12875 yet. */
12876 if (!dwarf_strict)
12877 {
12878 /* Describe the scale factor as a rational constant. */
12879 const dw_die_ref scale_factor
12880 = new_die (DW_TAG_constant, comp_unit_die (), type);
12881
12882 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12883 fpt_info.scale_factor.arbitrary.numerator);
12884 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12885 fpt_info.scale_factor.arbitrary.denominator);
12886
12887 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12888 }
12889 break;
12890
12891 default:
12892 gcc_unreachable ();
12893 }
12894 }
12895
12896 if (type_bias)
12897 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12898 dw_scalar_form_constant
12899 | dw_scalar_form_exprloc
12900 | dw_scalar_form_reference,
12901 NULL);
12902
12903 return base_type_result;
12904 }
12905
12906 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12907 named 'auto' in its type: return true for it, false otherwise. */
12908
12909 static inline bool
is_cxx_auto(tree type)12910 is_cxx_auto (tree type)
12911 {
12912 if (is_cxx ())
12913 {
12914 tree name = TYPE_IDENTIFIER (type);
12915 if (name == get_identifier ("auto")
12916 || name == get_identifier ("decltype(auto)"))
12917 return true;
12918 }
12919 return false;
12920 }
12921
12922 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12923 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12924
12925 static inline int
is_base_type(tree type)12926 is_base_type (tree type)
12927 {
12928 switch (TREE_CODE (type))
12929 {
12930 case INTEGER_TYPE:
12931 case REAL_TYPE:
12932 case FIXED_POINT_TYPE:
12933 case COMPLEX_TYPE:
12934 case BOOLEAN_TYPE:
12935 case POINTER_BOUNDS_TYPE:
12936 return 1;
12937
12938 case VOID_TYPE:
12939 case ARRAY_TYPE:
12940 case RECORD_TYPE:
12941 case UNION_TYPE:
12942 case QUAL_UNION_TYPE:
12943 case ENUMERAL_TYPE:
12944 case FUNCTION_TYPE:
12945 case METHOD_TYPE:
12946 case POINTER_TYPE:
12947 case REFERENCE_TYPE:
12948 case NULLPTR_TYPE:
12949 case OFFSET_TYPE:
12950 case LANG_TYPE:
12951 case VECTOR_TYPE:
12952 return 0;
12953
12954 default:
12955 if (is_cxx_auto (type))
12956 return 0;
12957 gcc_unreachable ();
12958 }
12959
12960 return 0;
12961 }
12962
12963 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12964 node, return the size in bits for the type if it is a constant, or else
12965 return the alignment for the type if the type's size is not constant, or
12966 else return BITS_PER_WORD if the type actually turns out to be an
12967 ERROR_MARK node. */
12968
12969 static inline unsigned HOST_WIDE_INT
simple_type_size_in_bits(const_tree type)12970 simple_type_size_in_bits (const_tree type)
12971 {
12972 if (TREE_CODE (type) == ERROR_MARK)
12973 return BITS_PER_WORD;
12974 else if (TYPE_SIZE (type) == NULL_TREE)
12975 return 0;
12976 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12977 return tree_to_uhwi (TYPE_SIZE (type));
12978 else
12979 return TYPE_ALIGN (type);
12980 }
12981
12982 /* Similarly, but return an offset_int instead of UHWI. */
12983
12984 static inline offset_int
offset_int_type_size_in_bits(const_tree type)12985 offset_int_type_size_in_bits (const_tree type)
12986 {
12987 if (TREE_CODE (type) == ERROR_MARK)
12988 return BITS_PER_WORD;
12989 else if (TYPE_SIZE (type) == NULL_TREE)
12990 return 0;
12991 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12992 return wi::to_offset (TYPE_SIZE (type));
12993 else
12994 return TYPE_ALIGN (type);
12995 }
12996
12997 /* Given a pointer to a tree node for a subrange type, return a pointer
12998 to a DIE that describes the given type. */
12999
13000 static dw_die_ref
subrange_type_die(tree type,tree low,tree high,tree bias,dw_die_ref context_die)13001 subrange_type_die (tree type, tree low, tree high, tree bias,
13002 dw_die_ref context_die)
13003 {
13004 dw_die_ref subrange_die;
13005 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13006
13007 if (context_die == NULL)
13008 context_die = comp_unit_die ();
13009
13010 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13011
13012 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13013 {
13014 /* The size of the subrange type and its base type do not match,
13015 so we need to generate a size attribute for the subrange type. */
13016 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13017 }
13018
13019 add_alignment_attribute (subrange_die, type);
13020
13021 if (low)
13022 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13023 if (high)
13024 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13025 if (bias && !dwarf_strict)
13026 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13027 dw_scalar_form_constant
13028 | dw_scalar_form_exprloc
13029 | dw_scalar_form_reference,
13030 NULL);
13031
13032 return subrange_die;
13033 }
13034
13035 /* Returns the (const and/or volatile) cv_qualifiers associated with
13036 the decl node. This will normally be augmented with the
13037 cv_qualifiers of the underlying type in add_type_attribute. */
13038
13039 static int
decl_quals(const_tree decl)13040 decl_quals (const_tree decl)
13041 {
13042 return ((TREE_READONLY (decl)
13043 /* The C++ front-end correctly marks reference-typed
13044 variables as readonly, but from a language (and debug
13045 info) standpoint they are not const-qualified. */
13046 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13047 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13048 | (TREE_THIS_VOLATILE (decl)
13049 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13050 }
13051
13052 /* Determine the TYPE whose qualifiers match the largest strict subset
13053 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13054 qualifiers outside QUAL_MASK. */
13055
13056 static int
get_nearest_type_subqualifiers(tree type,int type_quals,int qual_mask)13057 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13058 {
13059 tree t;
13060 int best_rank = 0, best_qual = 0, max_rank;
13061
13062 type_quals &= qual_mask;
13063 max_rank = popcount_hwi (type_quals) - 1;
13064
13065 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13066 t = TYPE_NEXT_VARIANT (t))
13067 {
13068 int q = TYPE_QUALS (t) & qual_mask;
13069
13070 if ((q & type_quals) == q && q != type_quals
13071 && check_base_type (t, type))
13072 {
13073 int rank = popcount_hwi (q);
13074
13075 if (rank > best_rank)
13076 {
13077 best_rank = rank;
13078 best_qual = q;
13079 }
13080 }
13081 }
13082
13083 return best_qual;
13084 }
13085
13086 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13087 static const dwarf_qual_info_t dwarf_qual_info[] =
13088 {
13089 { TYPE_QUAL_CONST, DW_TAG_const_type },
13090 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13091 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13092 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13093 };
13094 static const unsigned int dwarf_qual_info_size
13095 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13096
13097 /* If DIE is a qualified DIE of some base DIE with the same parent,
13098 return the base DIE, otherwise return NULL. Set MASK to the
13099 qualifiers added compared to the returned DIE. */
13100
13101 static dw_die_ref
qualified_die_p(dw_die_ref die,int * mask,unsigned int depth)13102 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13103 {
13104 unsigned int i;
13105 for (i = 0; i < dwarf_qual_info_size; i++)
13106 if (die->die_tag == dwarf_qual_info[i].t)
13107 break;
13108 if (i == dwarf_qual_info_size)
13109 return NULL;
13110 if (vec_safe_length (die->die_attr) != 1)
13111 return NULL;
13112 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13113 if (type == NULL || type->die_parent != die->die_parent)
13114 return NULL;
13115 *mask |= dwarf_qual_info[i].q;
13116 if (depth)
13117 {
13118 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13119 if (ret)
13120 return ret;
13121 }
13122 return type;
13123 }
13124
13125 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13126 entry that chains the modifiers specified by CV_QUALS in front of the
13127 given type. REVERSE is true if the type is to be interpreted in the
13128 reverse storage order wrt the target order. */
13129
13130 static dw_die_ref
modified_type_die(tree type,int cv_quals,bool reverse,dw_die_ref context_die)13131 modified_type_die (tree type, int cv_quals, bool reverse,
13132 dw_die_ref context_die)
13133 {
13134 enum tree_code code = TREE_CODE (type);
13135 dw_die_ref mod_type_die;
13136 dw_die_ref sub_die = NULL;
13137 tree item_type = NULL;
13138 tree qualified_type;
13139 tree name, low, high;
13140 dw_die_ref mod_scope;
13141 /* Only these cv-qualifiers are currently handled. */
13142 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13143 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13144 ENCODE_QUAL_ADDR_SPACE(~0U));
13145 const bool reverse_base_type
13146 = need_endianity_attribute_p (reverse) && is_base_type (type);
13147
13148 if (code == ERROR_MARK)
13149 return NULL;
13150
13151 if (lang_hooks.types.get_debug_type)
13152 {
13153 tree debug_type = lang_hooks.types.get_debug_type (type);
13154
13155 if (debug_type != NULL_TREE && debug_type != type)
13156 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13157 }
13158
13159 cv_quals &= cv_qual_mask;
13160
13161 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13162 tag modifier (and not an attribute) old consumers won't be able
13163 to handle it. */
13164 if (dwarf_version < 3)
13165 cv_quals &= ~TYPE_QUAL_RESTRICT;
13166
13167 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13168 if (dwarf_version < 5)
13169 cv_quals &= ~TYPE_QUAL_ATOMIC;
13170
13171 /* See if we already have the appropriately qualified variant of
13172 this type. */
13173 qualified_type = get_qualified_type (type, cv_quals);
13174
13175 if (qualified_type == sizetype)
13176 {
13177 /* Try not to expose the internal sizetype type's name. */
13178 if (TYPE_NAME (qualified_type)
13179 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13180 {
13181 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13182
13183 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13184 && (TYPE_PRECISION (t)
13185 == TYPE_PRECISION (qualified_type))
13186 && (TYPE_UNSIGNED (t)
13187 == TYPE_UNSIGNED (qualified_type)));
13188 qualified_type = t;
13189 }
13190 else if (qualified_type == sizetype
13191 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13192 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13193 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13194 qualified_type = size_type_node;
13195 if (type == sizetype)
13196 type = qualified_type;
13197 }
13198
13199 /* If we do, then we can just use its DIE, if it exists. */
13200 if (qualified_type)
13201 {
13202 mod_type_die = lookup_type_die (qualified_type);
13203
13204 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13205 dealt with specially: the DIE with the attribute, if it exists, is
13206 placed immediately after the regular DIE for the same base type. */
13207 if (mod_type_die
13208 && (!reverse_base_type
13209 || ((mod_type_die = mod_type_die->die_sib) != NULL
13210 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13211 return mod_type_die;
13212 }
13213
13214 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13215
13216 /* Handle C typedef types. */
13217 if (name
13218 && TREE_CODE (name) == TYPE_DECL
13219 && DECL_ORIGINAL_TYPE (name)
13220 && !DECL_ARTIFICIAL (name))
13221 {
13222 tree dtype = TREE_TYPE (name);
13223
13224 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13225 if (qualified_type == dtype && !reverse_base_type)
13226 {
13227 tree origin = decl_ultimate_origin (name);
13228
13229 /* Typedef variants that have an abstract origin don't get their own
13230 type DIE (see gen_typedef_die), so fall back on the ultimate
13231 abstract origin instead. */
13232 if (origin != NULL && origin != name)
13233 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13234 context_die);
13235
13236 /* For a named type, use the typedef. */
13237 gen_type_die (qualified_type, context_die);
13238 return lookup_type_die (qualified_type);
13239 }
13240 else
13241 {
13242 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13243 dquals &= cv_qual_mask;
13244 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13245 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13246 /* cv-unqualified version of named type. Just use
13247 the unnamed type to which it refers. */
13248 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13249 reverse, context_die);
13250 /* Else cv-qualified version of named type; fall through. */
13251 }
13252 }
13253
13254 mod_scope = scope_die_for (type, context_die);
13255
13256 if (cv_quals)
13257 {
13258 int sub_quals = 0, first_quals = 0;
13259 unsigned i;
13260 dw_die_ref first = NULL, last = NULL;
13261
13262 /* Determine a lesser qualified type that most closely matches
13263 this one. Then generate DW_TAG_* entries for the remaining
13264 qualifiers. */
13265 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13266 cv_qual_mask);
13267 if (sub_quals && use_debug_types)
13268 {
13269 bool needed = false;
13270 /* If emitting type units, make sure the order of qualifiers
13271 is canonical. Thus, start from unqualified type if
13272 an earlier qualifier is missing in sub_quals, but some later
13273 one is present there. */
13274 for (i = 0; i < dwarf_qual_info_size; i++)
13275 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13276 needed = true;
13277 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13278 {
13279 sub_quals = 0;
13280 break;
13281 }
13282 }
13283 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13284 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13285 {
13286 /* As not all intermediate qualified DIEs have corresponding
13287 tree types, ensure that qualified DIEs in the same scope
13288 as their DW_AT_type are emitted after their DW_AT_type,
13289 only with other qualified DIEs for the same type possibly
13290 in between them. Determine the range of such qualified
13291 DIEs now (first being the base type, last being corresponding
13292 last qualified DIE for it). */
13293 unsigned int count = 0;
13294 first = qualified_die_p (mod_type_die, &first_quals,
13295 dwarf_qual_info_size);
13296 if (first == NULL)
13297 first = mod_type_die;
13298 gcc_assert ((first_quals & ~sub_quals) == 0);
13299 for (count = 0, last = first;
13300 count < (1U << dwarf_qual_info_size);
13301 count++, last = last->die_sib)
13302 {
13303 int quals = 0;
13304 if (last == mod_scope->die_child)
13305 break;
13306 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13307 != first)
13308 break;
13309 }
13310 }
13311
13312 for (i = 0; i < dwarf_qual_info_size; i++)
13313 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13314 {
13315 dw_die_ref d;
13316 if (first && first != last)
13317 {
13318 for (d = first->die_sib; ; d = d->die_sib)
13319 {
13320 int quals = 0;
13321 qualified_die_p (d, &quals, dwarf_qual_info_size);
13322 if (quals == (first_quals | dwarf_qual_info[i].q))
13323 break;
13324 if (d == last)
13325 {
13326 d = NULL;
13327 break;
13328 }
13329 }
13330 if (d)
13331 {
13332 mod_type_die = d;
13333 continue;
13334 }
13335 }
13336 if (first)
13337 {
13338 d = new_die_raw (dwarf_qual_info[i].t);
13339 add_child_die_after (mod_scope, d, last);
13340 last = d;
13341 }
13342 else
13343 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13344 if (mod_type_die)
13345 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13346 mod_type_die = d;
13347 first_quals |= dwarf_qual_info[i].q;
13348 }
13349 }
13350 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13351 {
13352 dwarf_tag tag = DW_TAG_pointer_type;
13353 if (code == REFERENCE_TYPE)
13354 {
13355 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13356 tag = DW_TAG_rvalue_reference_type;
13357 else
13358 tag = DW_TAG_reference_type;
13359 }
13360 mod_type_die = new_die (tag, mod_scope, type);
13361
13362 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13363 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13364 add_alignment_attribute (mod_type_die, type);
13365 item_type = TREE_TYPE (type);
13366
13367 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13368 if (!ADDR_SPACE_GENERIC_P (as))
13369 {
13370 int action = targetm.addr_space.debug (as);
13371 if (action >= 0)
13372 {
13373 /* Positive values indicate an address_class. */
13374 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13375 }
13376 else
13377 {
13378 /* Negative values indicate an (inverted) segment base reg. */
13379 dw_loc_descr_ref d
13380 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13381 add_AT_loc (mod_type_die, DW_AT_segment, d);
13382 }
13383 }
13384 }
13385 else if (code == INTEGER_TYPE
13386 && TREE_TYPE (type) != NULL_TREE
13387 && subrange_type_for_debug_p (type, &low, &high))
13388 {
13389 tree bias = NULL_TREE;
13390 if (lang_hooks.types.get_type_bias)
13391 bias = lang_hooks.types.get_type_bias (type);
13392 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13393 item_type = TREE_TYPE (type);
13394 }
13395 else if (is_base_type (type))
13396 {
13397 mod_type_die = base_type_die (type, reverse);
13398
13399 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13400 if (reverse_base_type)
13401 {
13402 dw_die_ref after_die
13403 = modified_type_die (type, cv_quals, false, context_die);
13404 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13405 }
13406 else
13407 add_child_die (comp_unit_die (), mod_type_die);
13408
13409 add_pubtype (type, mod_type_die);
13410 }
13411 else
13412 {
13413 gen_type_die (type, context_die);
13414
13415 /* We have to get the type_main_variant here (and pass that to the
13416 `lookup_type_die' routine) because the ..._TYPE node we have
13417 might simply be a *copy* of some original type node (where the
13418 copy was created to help us keep track of typedef names) and
13419 that copy might have a different TYPE_UID from the original
13420 ..._TYPE node. */
13421 if (TREE_CODE (type) == FUNCTION_TYPE
13422 || TREE_CODE (type) == METHOD_TYPE)
13423 {
13424 /* For function/method types, can't just use type_main_variant here,
13425 because that can have different ref-qualifiers for C++,
13426 but try to canonicalize. */
13427 tree main = TYPE_MAIN_VARIANT (type);
13428 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13429 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13430 && check_base_type (t, main)
13431 && check_lang_type (t, type))
13432 return lookup_type_die (t);
13433 return lookup_type_die (type);
13434 }
13435 else if (TREE_CODE (type) != VECTOR_TYPE
13436 && TREE_CODE (type) != ARRAY_TYPE)
13437 return lookup_type_die (type_main_variant (type));
13438 else
13439 /* Vectors have the debugging information in the type,
13440 not the main variant. */
13441 return lookup_type_die (type);
13442 }
13443
13444 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13445 don't output a DW_TAG_typedef, since there isn't one in the
13446 user's program; just attach a DW_AT_name to the type.
13447 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13448 if the base type already has the same name. */
13449 if (name
13450 && ((TREE_CODE (name) != TYPE_DECL
13451 && (qualified_type == TYPE_MAIN_VARIANT (type)
13452 || (cv_quals == TYPE_UNQUALIFIED)))
13453 || (TREE_CODE (name) == TYPE_DECL
13454 && TREE_TYPE (name) == qualified_type
13455 && DECL_NAME (name))))
13456 {
13457 if (TREE_CODE (name) == TYPE_DECL)
13458 /* Could just call add_name_and_src_coords_attributes here,
13459 but since this is a builtin type it doesn't have any
13460 useful source coordinates anyway. */
13461 name = DECL_NAME (name);
13462 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13463 }
13464 /* This probably indicates a bug. */
13465 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13466 {
13467 name = TYPE_IDENTIFIER (type);
13468 add_name_attribute (mod_type_die,
13469 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13470 }
13471
13472 if (qualified_type && !reverse_base_type)
13473 equate_type_number_to_die (qualified_type, mod_type_die);
13474
13475 if (item_type)
13476 /* We must do this after the equate_type_number_to_die call, in case
13477 this is a recursive type. This ensures that the modified_type_die
13478 recursion will terminate even if the type is recursive. Recursive
13479 types are possible in Ada. */
13480 sub_die = modified_type_die (item_type,
13481 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13482 reverse,
13483 context_die);
13484
13485 if (sub_die != NULL)
13486 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13487
13488 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13489 if (TYPE_ARTIFICIAL (type))
13490 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13491
13492 return mod_type_die;
13493 }
13494
13495 /* Generate DIEs for the generic parameters of T.
13496 T must be either a generic type or a generic function.
13497 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13498
13499 static void
gen_generic_params_dies(tree t)13500 gen_generic_params_dies (tree t)
13501 {
13502 tree parms, args;
13503 int parms_num, i;
13504 dw_die_ref die = NULL;
13505 int non_default;
13506
13507 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13508 return;
13509
13510 if (TYPE_P (t))
13511 die = lookup_type_die (t);
13512 else if (DECL_P (t))
13513 die = lookup_decl_die (t);
13514
13515 gcc_assert (die);
13516
13517 parms = lang_hooks.get_innermost_generic_parms (t);
13518 if (!parms)
13519 /* T has no generic parameter. It means T is neither a generic type
13520 or function. End of story. */
13521 return;
13522
13523 parms_num = TREE_VEC_LENGTH (parms);
13524 args = lang_hooks.get_innermost_generic_args (t);
13525 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13526 non_default = int_cst_value (TREE_CHAIN (args));
13527 else
13528 non_default = TREE_VEC_LENGTH (args);
13529 for (i = 0; i < parms_num; i++)
13530 {
13531 tree parm, arg, arg_pack_elems;
13532 dw_die_ref parm_die;
13533
13534 parm = TREE_VEC_ELT (parms, i);
13535 arg = TREE_VEC_ELT (args, i);
13536 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13537 gcc_assert (parm && TREE_VALUE (parm) && arg);
13538
13539 if (parm && TREE_VALUE (parm) && arg)
13540 {
13541 /* If PARM represents a template parameter pack,
13542 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13543 by DW_TAG_template_*_parameter DIEs for the argument
13544 pack elements of ARG. Note that ARG would then be
13545 an argument pack. */
13546 if (arg_pack_elems)
13547 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13548 arg_pack_elems,
13549 die);
13550 else
13551 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13552 true /* emit name */, die);
13553 if (i >= non_default)
13554 add_AT_flag (parm_die, DW_AT_default_value, 1);
13555 }
13556 }
13557 }
13558
13559 /* Create and return a DIE for PARM which should be
13560 the representation of a generic type parameter.
13561 For instance, in the C++ front end, PARM would be a template parameter.
13562 ARG is the argument to PARM.
13563 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13564 name of the PARM.
13565 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13566 as a child node. */
13567
13568 static dw_die_ref
generic_parameter_die(tree parm,tree arg,bool emit_name_p,dw_die_ref parent_die)13569 generic_parameter_die (tree parm, tree arg,
13570 bool emit_name_p,
13571 dw_die_ref parent_die)
13572 {
13573 dw_die_ref tmpl_die = NULL;
13574 const char *name = NULL;
13575
13576 if (!parm || !DECL_NAME (parm) || !arg)
13577 return NULL;
13578
13579 /* We support non-type generic parameters and arguments,
13580 type generic parameters and arguments, as well as
13581 generic generic parameters (a.k.a. template template parameters in C++)
13582 and arguments. */
13583 if (TREE_CODE (parm) == PARM_DECL)
13584 /* PARM is a nontype generic parameter */
13585 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13586 else if (TREE_CODE (parm) == TYPE_DECL)
13587 /* PARM is a type generic parameter. */
13588 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13589 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13590 /* PARM is a generic generic parameter.
13591 Its DIE is a GNU extension. It shall have a
13592 DW_AT_name attribute to represent the name of the template template
13593 parameter, and a DW_AT_GNU_template_name attribute to represent the
13594 name of the template template argument. */
13595 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13596 parent_die, parm);
13597 else
13598 gcc_unreachable ();
13599
13600 if (tmpl_die)
13601 {
13602 tree tmpl_type;
13603
13604 /* If PARM is a generic parameter pack, it means we are
13605 emitting debug info for a template argument pack element.
13606 In other terms, ARG is a template argument pack element.
13607 In that case, we don't emit any DW_AT_name attribute for
13608 the die. */
13609 if (emit_name_p)
13610 {
13611 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13612 gcc_assert (name);
13613 add_AT_string (tmpl_die, DW_AT_name, name);
13614 }
13615
13616 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13617 {
13618 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13619 TMPL_DIE should have a child DW_AT_type attribute that is set
13620 to the type of the argument to PARM, which is ARG.
13621 If PARM is a type generic parameter, TMPL_DIE should have a
13622 child DW_AT_type that is set to ARG. */
13623 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13624 add_type_attribute (tmpl_die, tmpl_type,
13625 (TREE_THIS_VOLATILE (tmpl_type)
13626 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13627 false, parent_die);
13628 }
13629 else
13630 {
13631 /* So TMPL_DIE is a DIE representing a
13632 a generic generic template parameter, a.k.a template template
13633 parameter in C++ and arg is a template. */
13634
13635 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13636 to the name of the argument. */
13637 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13638 if (name)
13639 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13640 }
13641
13642 if (TREE_CODE (parm) == PARM_DECL)
13643 /* So PARM is a non-type generic parameter.
13644 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13645 attribute of TMPL_DIE which value represents the value
13646 of ARG.
13647 We must be careful here:
13648 The value of ARG might reference some function decls.
13649 We might currently be emitting debug info for a generic
13650 type and types are emitted before function decls, we don't
13651 know if the function decls referenced by ARG will actually be
13652 emitted after cgraph computations.
13653 So must defer the generation of the DW_AT_const_value to
13654 after cgraph is ready. */
13655 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13656 }
13657
13658 return tmpl_die;
13659 }
13660
13661 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13662 PARM_PACK must be a template parameter pack. The returned DIE
13663 will be child DIE of PARENT_DIE. */
13664
13665 static dw_die_ref
template_parameter_pack_die(tree parm_pack,tree parm_pack_args,dw_die_ref parent_die)13666 template_parameter_pack_die (tree parm_pack,
13667 tree parm_pack_args,
13668 dw_die_ref parent_die)
13669 {
13670 dw_die_ref die;
13671 int j;
13672
13673 gcc_assert (parent_die && parm_pack);
13674
13675 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13676 add_name_and_src_coords_attributes (die, parm_pack);
13677 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13678 generic_parameter_die (parm_pack,
13679 TREE_VEC_ELT (parm_pack_args, j),
13680 false /* Don't emit DW_AT_name */,
13681 die);
13682 return die;
13683 }
13684
13685 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13686 an enumerated type. */
13687
13688 static inline int
type_is_enum(const_tree type)13689 type_is_enum (const_tree type)
13690 {
13691 return TREE_CODE (type) == ENUMERAL_TYPE;
13692 }
13693
13694 /* Return the DBX register number described by a given RTL node. */
13695
13696 static unsigned int
dbx_reg_number(const_rtx rtl)13697 dbx_reg_number (const_rtx rtl)
13698 {
13699 unsigned regno = REGNO (rtl);
13700
13701 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13702
13703 #ifdef LEAF_REG_REMAP
13704 if (crtl->uses_only_leaf_regs)
13705 {
13706 int leaf_reg = LEAF_REG_REMAP (regno);
13707 if (leaf_reg != -1)
13708 regno = (unsigned) leaf_reg;
13709 }
13710 #endif
13711
13712 regno = DBX_REGISTER_NUMBER (regno);
13713 gcc_assert (regno != INVALID_REGNUM);
13714 return regno;
13715 }
13716
13717 /* Optionally add a DW_OP_piece term to a location description expression.
13718 DW_OP_piece is only added if the location description expression already
13719 doesn't end with DW_OP_piece. */
13720
13721 static void
add_loc_descr_op_piece(dw_loc_descr_ref * list_head,int size)13722 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13723 {
13724 dw_loc_descr_ref loc;
13725
13726 if (*list_head != NULL)
13727 {
13728 /* Find the end of the chain. */
13729 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13730 ;
13731
13732 if (loc->dw_loc_opc != DW_OP_piece)
13733 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13734 }
13735 }
13736
13737 /* Return a location descriptor that designates a machine register or
13738 zero if there is none. */
13739
13740 static dw_loc_descr_ref
reg_loc_descriptor(rtx rtl,enum var_init_status initialized)13741 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13742 {
13743 rtx regs;
13744
13745 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13746 return 0;
13747
13748 /* We only use "frame base" when we're sure we're talking about the
13749 post-prologue local stack frame. We do this by *not* running
13750 register elimination until this point, and recognizing the special
13751 argument pointer and soft frame pointer rtx's.
13752 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13753 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13754 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13755 {
13756 dw_loc_descr_ref result = NULL;
13757
13758 if (dwarf_version >= 4 || !dwarf_strict)
13759 {
13760 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13761 initialized);
13762 if (result)
13763 add_loc_descr (&result,
13764 new_loc_descr (DW_OP_stack_value, 0, 0));
13765 }
13766 return result;
13767 }
13768
13769 regs = targetm.dwarf_register_span (rtl);
13770
13771 if (REG_NREGS (rtl) > 1 || regs)
13772 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13773 else
13774 {
13775 unsigned int dbx_regnum = dbx_reg_number (rtl);
13776 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13777 return 0;
13778 return one_reg_loc_descriptor (dbx_regnum, initialized);
13779 }
13780 }
13781
13782 /* Return a location descriptor that designates a machine register for
13783 a given hard register number. */
13784
13785 static dw_loc_descr_ref
one_reg_loc_descriptor(unsigned int regno,enum var_init_status initialized)13786 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13787 {
13788 dw_loc_descr_ref reg_loc_descr;
13789
13790 if (regno <= 31)
13791 reg_loc_descr
13792 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13793 else
13794 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13795
13796 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13797 add_loc_descr (®_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13798
13799 return reg_loc_descr;
13800 }
13801
13802 /* Given an RTL of a register, return a location descriptor that
13803 designates a value that spans more than one register. */
13804
13805 static dw_loc_descr_ref
multiple_reg_loc_descriptor(rtx rtl,rtx regs,enum var_init_status initialized)13806 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13807 enum var_init_status initialized)
13808 {
13809 int size, i;
13810 dw_loc_descr_ref loc_result = NULL;
13811
13812 /* Simple, contiguous registers. */
13813 if (regs == NULL_RTX)
13814 {
13815 unsigned reg = REGNO (rtl);
13816 int nregs;
13817
13818 #ifdef LEAF_REG_REMAP
13819 if (crtl->uses_only_leaf_regs)
13820 {
13821 int leaf_reg = LEAF_REG_REMAP (reg);
13822 if (leaf_reg != -1)
13823 reg = (unsigned) leaf_reg;
13824 }
13825 #endif
13826
13827 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13828 nregs = REG_NREGS (rtl);
13829
13830 /* At present we only track constant-sized pieces. */
13831 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13832 return NULL;
13833 size /= nregs;
13834
13835 loc_result = NULL;
13836 while (nregs--)
13837 {
13838 dw_loc_descr_ref t;
13839
13840 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13841 VAR_INIT_STATUS_INITIALIZED);
13842 add_loc_descr (&loc_result, t);
13843 add_loc_descr_op_piece (&loc_result, size);
13844 ++reg;
13845 }
13846 return loc_result;
13847 }
13848
13849 /* Now onto stupid register sets in non contiguous locations. */
13850
13851 gcc_assert (GET_CODE (regs) == PARALLEL);
13852
13853 /* At present we only track constant-sized pieces. */
13854 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13855 return NULL;
13856 loc_result = NULL;
13857
13858 for (i = 0; i < XVECLEN (regs, 0); ++i)
13859 {
13860 dw_loc_descr_ref t;
13861
13862 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13863 VAR_INIT_STATUS_INITIALIZED);
13864 add_loc_descr (&loc_result, t);
13865 add_loc_descr_op_piece (&loc_result, size);
13866 }
13867
13868 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13869 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13870 return loc_result;
13871 }
13872
13873 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13874
13875 /* Return a location descriptor that designates a constant i,
13876 as a compound operation from constant (i >> shift), constant shift
13877 and DW_OP_shl. */
13878
13879 static dw_loc_descr_ref
int_shift_loc_descriptor(HOST_WIDE_INT i,int shift)13880 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13881 {
13882 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13883 add_loc_descr (&ret, int_loc_descriptor (shift));
13884 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13885 return ret;
13886 }
13887
13888 /* Return a location descriptor that designates constant POLY_I. */
13889
13890 static dw_loc_descr_ref
int_loc_descriptor(poly_int64 poly_i)13891 int_loc_descriptor (poly_int64 poly_i)
13892 {
13893 enum dwarf_location_atom op;
13894
13895 HOST_WIDE_INT i;
13896 if (!poly_i.is_constant (&i))
13897 {
13898 /* Create location descriptions for the non-constant part and
13899 add any constant offset at the end. */
13900 dw_loc_descr_ref ret = NULL;
13901 HOST_WIDE_INT constant = poly_i.coeffs[0];
13902 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13903 {
13904 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13905 if (coeff != 0)
13906 {
13907 dw_loc_descr_ref start = ret;
13908 unsigned int factor;
13909 int bias;
13910 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13911 (j, &factor, &bias);
13912
13913 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13914 add COEFF * (REGNO / FACTOR) now and subtract
13915 COEFF * BIAS from the final constant part. */
13916 constant -= coeff * bias;
13917 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13918 if (coeff % factor == 0)
13919 coeff /= factor;
13920 else
13921 {
13922 int amount = exact_log2 (factor);
13923 gcc_assert (amount >= 0);
13924 add_loc_descr (&ret, int_loc_descriptor (amount));
13925 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13926 }
13927 if (coeff != 1)
13928 {
13929 add_loc_descr (&ret, int_loc_descriptor (coeff));
13930 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13931 }
13932 if (start)
13933 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13934 }
13935 }
13936 loc_descr_plus_const (&ret, constant);
13937 return ret;
13938 }
13939
13940 /* Pick the smallest representation of a constant, rather than just
13941 defaulting to the LEB encoding. */
13942 if (i >= 0)
13943 {
13944 int clz = clz_hwi (i);
13945 int ctz = ctz_hwi (i);
13946 if (i <= 31)
13947 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13948 else if (i <= 0xff)
13949 op = DW_OP_const1u;
13950 else if (i <= 0xffff)
13951 op = DW_OP_const2u;
13952 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13953 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13954 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13955 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13956 while DW_OP_const4u is 5 bytes. */
13957 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13958 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13959 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13960 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13961 while DW_OP_const4u is 5 bytes. */
13962 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13963
13964 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13965 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13966 <= 4)
13967 {
13968 /* As i >= 2**31, the double cast above will yield a negative number.
13969 Since wrapping is defined in DWARF expressions we can output big
13970 positive integers as small negative ones, regardless of the size
13971 of host wide ints.
13972
13973 Here, since the evaluator will handle 32-bit values and since i >=
13974 2**31, we know it's going to be interpreted as a negative literal:
13975 store it this way if we can do better than 5 bytes this way. */
13976 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13977 }
13978 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13979 op = DW_OP_const4u;
13980
13981 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13982 least 6 bytes: see if we can do better before falling back to it. */
13983 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13984 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13985 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13986 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13987 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13988 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13989 >= HOST_BITS_PER_WIDE_INT)
13990 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13991 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13992 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13993 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13994 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13995 && size_of_uleb128 (i) > 6)
13996 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13997 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13998 else
13999 op = DW_OP_constu;
14000 }
14001 else
14002 {
14003 if (i >= -0x80)
14004 op = DW_OP_const1s;
14005 else if (i >= -0x8000)
14006 op = DW_OP_const2s;
14007 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14008 {
14009 if (size_of_int_loc_descriptor (i) < 5)
14010 {
14011 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14012 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14013 return ret;
14014 }
14015 op = DW_OP_const4s;
14016 }
14017 else
14018 {
14019 if (size_of_int_loc_descriptor (i)
14020 < (unsigned long) 1 + size_of_sleb128 (i))
14021 {
14022 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14023 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14024 return ret;
14025 }
14026 op = DW_OP_consts;
14027 }
14028 }
14029
14030 return new_loc_descr (op, i, 0);
14031 }
14032
14033 /* Likewise, for unsigned constants. */
14034
14035 static dw_loc_descr_ref
uint_loc_descriptor(unsigned HOST_WIDE_INT i)14036 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14037 {
14038 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14039 const unsigned HOST_WIDE_INT max_uint
14040 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14041
14042 /* If possible, use the clever signed constants handling. */
14043 if (i <= max_int)
14044 return int_loc_descriptor ((HOST_WIDE_INT) i);
14045
14046 /* Here, we are left with positive numbers that cannot be represented as
14047 HOST_WIDE_INT, i.e.:
14048 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14049
14050 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14051 whereas may be better to output a negative integer: thanks to integer
14052 wrapping, we know that:
14053 x = x - 2 ** DWARF2_ADDR_SIZE
14054 = x - 2 * (max (HOST_WIDE_INT) + 1)
14055 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14056 small negative integers. Let's try that in cases it will clearly improve
14057 the encoding: there is no gain turning DW_OP_const4u into
14058 DW_OP_const4s. */
14059 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14060 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14061 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14062 {
14063 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14064
14065 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14066 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14067 const HOST_WIDE_INT second_shift
14068 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14069
14070 /* So we finally have:
14071 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14072 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14073 return int_loc_descriptor (second_shift);
14074 }
14075
14076 /* Last chance: fallback to a simple constant operation. */
14077 return new_loc_descr
14078 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14079 ? DW_OP_const4u
14080 : DW_OP_const8u,
14081 i, 0);
14082 }
14083
14084 /* Generate and return a location description that computes the unsigned
14085 comparison of the two stack top entries (a OP b where b is the top-most
14086 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14087 LE_EXPR, GT_EXPR or GE_EXPR. */
14088
14089 static dw_loc_descr_ref
uint_comparison_loc_list(enum tree_code kind)14090 uint_comparison_loc_list (enum tree_code kind)
14091 {
14092 enum dwarf_location_atom op, flip_op;
14093 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14094
14095 switch (kind)
14096 {
14097 case LT_EXPR:
14098 op = DW_OP_lt;
14099 break;
14100 case LE_EXPR:
14101 op = DW_OP_le;
14102 break;
14103 case GT_EXPR:
14104 op = DW_OP_gt;
14105 break;
14106 case GE_EXPR:
14107 op = DW_OP_ge;
14108 break;
14109 default:
14110 gcc_unreachable ();
14111 }
14112
14113 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14114 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14115
14116 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14117 possible to perform unsigned comparisons: we just have to distinguish
14118 three cases:
14119
14120 1. when a and b have the same sign (as signed integers); then we should
14121 return: a OP(signed) b;
14122
14123 2. when a is a negative signed integer while b is a positive one, then a
14124 is a greater unsigned integer than b; likewise when a and b's roles
14125 are flipped.
14126
14127 So first, compare the sign of the two operands. */
14128 ret = new_loc_descr (DW_OP_over, 0, 0);
14129 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14130 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14131 /* If they have different signs (i.e. they have different sign bits), then
14132 the stack top value has now the sign bit set and thus it's smaller than
14133 zero. */
14134 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14135 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14136 add_loc_descr (&ret, bra_node);
14137
14138 /* We are in case 1. At this point, we know both operands have the same
14139 sign, to it's safe to use the built-in signed comparison. */
14140 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14141 add_loc_descr (&ret, jmp_node);
14142
14143 /* We are in case 2. Here, we know both operands do not have the same sign,
14144 so we have to flip the signed comparison. */
14145 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14146 tmp = new_loc_descr (flip_op, 0, 0);
14147 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14148 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14149 add_loc_descr (&ret, tmp);
14150
14151 /* This dummy operation is necessary to make the two branches join. */
14152 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14153 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14154 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14155 add_loc_descr (&ret, tmp);
14156
14157 return ret;
14158 }
14159
14160 /* Likewise, but takes the location description lists (might be destructive on
14161 them). Return NULL if either is NULL or if concatenation fails. */
14162
14163 static dw_loc_list_ref
loc_list_from_uint_comparison(dw_loc_list_ref left,dw_loc_list_ref right,enum tree_code kind)14164 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14165 enum tree_code kind)
14166 {
14167 if (left == NULL || right == NULL)
14168 return NULL;
14169
14170 add_loc_list (&left, right);
14171 if (left == NULL)
14172 return NULL;
14173
14174 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14175 return left;
14176 }
14177
14178 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14179 without actually allocating it. */
14180
14181 static unsigned long
size_of_int_shift_loc_descriptor(HOST_WIDE_INT i,int shift)14182 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14183 {
14184 return size_of_int_loc_descriptor (i >> shift)
14185 + size_of_int_loc_descriptor (shift)
14186 + 1;
14187 }
14188
14189 /* Return size_of_locs (int_loc_descriptor (i)) without
14190 actually allocating it. */
14191
14192 static unsigned long
size_of_int_loc_descriptor(HOST_WIDE_INT i)14193 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14194 {
14195 unsigned long s;
14196
14197 if (i >= 0)
14198 {
14199 int clz, ctz;
14200 if (i <= 31)
14201 return 1;
14202 else if (i <= 0xff)
14203 return 2;
14204 else if (i <= 0xffff)
14205 return 3;
14206 clz = clz_hwi (i);
14207 ctz = ctz_hwi (i);
14208 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14209 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14210 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14211 - clz - 5);
14212 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14213 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14214 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14215 - clz - 8);
14216 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14217 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14218 <= 4)
14219 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14220 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14221 return 5;
14222 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14223 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14224 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14225 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14226 - clz - 8);
14227 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14228 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14229 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14230 - clz - 16);
14231 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14232 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14233 && s > 6)
14234 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14235 - clz - 32);
14236 else
14237 return 1 + s;
14238 }
14239 else
14240 {
14241 if (i >= -0x80)
14242 return 2;
14243 else if (i >= -0x8000)
14244 return 3;
14245 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14246 {
14247 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14248 {
14249 s = size_of_int_loc_descriptor (-i) + 1;
14250 if (s < 5)
14251 return s;
14252 }
14253 return 5;
14254 }
14255 else
14256 {
14257 unsigned long r = 1 + size_of_sleb128 (i);
14258 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14259 {
14260 s = size_of_int_loc_descriptor (-i) + 1;
14261 if (s < r)
14262 return s;
14263 }
14264 return r;
14265 }
14266 }
14267 }
14268
14269 /* Return loc description representing "address" of integer value.
14270 This can appear only as toplevel expression. */
14271
14272 static dw_loc_descr_ref
address_of_int_loc_descriptor(int size,HOST_WIDE_INT i)14273 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14274 {
14275 int litsize;
14276 dw_loc_descr_ref loc_result = NULL;
14277
14278 if (!(dwarf_version >= 4 || !dwarf_strict))
14279 return NULL;
14280
14281 litsize = size_of_int_loc_descriptor (i);
14282 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14283 is more compact. For DW_OP_stack_value we need:
14284 litsize + 1 (DW_OP_stack_value)
14285 and for DW_OP_implicit_value:
14286 1 (DW_OP_implicit_value) + 1 (length) + size. */
14287 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14288 {
14289 loc_result = int_loc_descriptor (i);
14290 add_loc_descr (&loc_result,
14291 new_loc_descr (DW_OP_stack_value, 0, 0));
14292 return loc_result;
14293 }
14294
14295 loc_result = new_loc_descr (DW_OP_implicit_value,
14296 size, 0);
14297 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14298 loc_result->dw_loc_oprnd2.v.val_int = i;
14299 return loc_result;
14300 }
14301
14302 /* Return a location descriptor that designates a base+offset location. */
14303
14304 static dw_loc_descr_ref
based_loc_descr(rtx reg,poly_int64 offset,enum var_init_status initialized)14305 based_loc_descr (rtx reg, poly_int64 offset,
14306 enum var_init_status initialized)
14307 {
14308 unsigned int regno;
14309 dw_loc_descr_ref result;
14310 dw_fde_ref fde = cfun->fde;
14311
14312 /* We only use "frame base" when we're sure we're talking about the
14313 post-prologue local stack frame. We do this by *not* running
14314 register elimination until this point, and recognizing the special
14315 argument pointer and soft frame pointer rtx's. */
14316 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14317 {
14318 rtx elim = (ira_use_lra_p
14319 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14320 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14321
14322 if (elim != reg)
14323 {
14324 elim = strip_offset_and_add (elim, &offset);
14325 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14326 && (elim == hard_frame_pointer_rtx
14327 || elim == stack_pointer_rtx))
14328 || elim == (frame_pointer_needed
14329 ? hard_frame_pointer_rtx
14330 : stack_pointer_rtx));
14331
14332 /* If drap register is used to align stack, use frame
14333 pointer + offset to access stack variables. If stack
14334 is aligned without drap, use stack pointer + offset to
14335 access stack variables. */
14336 if (crtl->stack_realign_tried
14337 && reg == frame_pointer_rtx)
14338 {
14339 int base_reg
14340 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14341 ? HARD_FRAME_POINTER_REGNUM
14342 : REGNO (elim));
14343 return new_reg_loc_descr (base_reg, offset);
14344 }
14345
14346 gcc_assert (frame_pointer_fb_offset_valid);
14347 offset += frame_pointer_fb_offset;
14348 HOST_WIDE_INT const_offset;
14349 if (offset.is_constant (&const_offset))
14350 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14351 else
14352 {
14353 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14354 loc_descr_plus_const (&ret, offset);
14355 return ret;
14356 }
14357 }
14358 }
14359
14360 regno = REGNO (reg);
14361 #ifdef LEAF_REG_REMAP
14362 if (crtl->uses_only_leaf_regs)
14363 {
14364 int leaf_reg = LEAF_REG_REMAP (regno);
14365 if (leaf_reg != -1)
14366 regno = (unsigned) leaf_reg;
14367 }
14368 #endif
14369 regno = DWARF_FRAME_REGNUM (regno);
14370
14371 HOST_WIDE_INT const_offset;
14372 if (!optimize && fde
14373 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14374 && offset.is_constant (&const_offset))
14375 {
14376 /* Use cfa+offset to represent the location of arguments passed
14377 on the stack when drap is used to align stack.
14378 Only do this when not optimizing, for optimized code var-tracking
14379 is supposed to track where the arguments live and the register
14380 used as vdrap or drap in some spot might be used for something
14381 else in other part of the routine. */
14382 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14383 }
14384
14385 result = new_reg_loc_descr (regno, offset);
14386
14387 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14388 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14389
14390 return result;
14391 }
14392
14393 /* Return true if this RTL expression describes a base+offset calculation. */
14394
14395 static inline int
is_based_loc(const_rtx rtl)14396 is_based_loc (const_rtx rtl)
14397 {
14398 return (GET_CODE (rtl) == PLUS
14399 && ((REG_P (XEXP (rtl, 0))
14400 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14401 && CONST_INT_P (XEXP (rtl, 1)))));
14402 }
14403
14404 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14405 failed. */
14406
14407 static dw_loc_descr_ref
tls_mem_loc_descriptor(rtx mem)14408 tls_mem_loc_descriptor (rtx mem)
14409 {
14410 tree base;
14411 dw_loc_descr_ref loc_result;
14412
14413 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14414 return NULL;
14415
14416 base = get_base_address (MEM_EXPR (mem));
14417 if (base == NULL
14418 || !VAR_P (base)
14419 || !DECL_THREAD_LOCAL_P (base))
14420 return NULL;
14421
14422 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14423 if (loc_result == NULL)
14424 return NULL;
14425
14426 if (maybe_ne (MEM_OFFSET (mem), 0))
14427 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14428
14429 return loc_result;
14430 }
14431
14432 /* Output debug info about reason why we failed to expand expression as dwarf
14433 expression. */
14434
14435 static void
expansion_failed(tree expr,rtx rtl,char const * reason)14436 expansion_failed (tree expr, rtx rtl, char const *reason)
14437 {
14438 if (dump_file && (dump_flags & TDF_DETAILS))
14439 {
14440 fprintf (dump_file, "Failed to expand as dwarf: ");
14441 if (expr)
14442 print_generic_expr (dump_file, expr, dump_flags);
14443 if (rtl)
14444 {
14445 fprintf (dump_file, "\n");
14446 print_rtl (dump_file, rtl);
14447 }
14448 fprintf (dump_file, "\nReason: %s\n", reason);
14449 }
14450 }
14451
14452 /* True if handling a former CONST by mem_loc_descriptor piecewise. */
14453
14454 static bool in_const_p;
14455
14456 /* Helper function for const_ok_for_output. */
14457
14458 static bool
const_ok_for_output_1(rtx rtl)14459 const_ok_for_output_1 (rtx rtl)
14460 {
14461 if (targetm.const_not_ok_for_debug_p (rtl))
14462 {
14463 if (GET_CODE (rtl) != UNSPEC)
14464 {
14465 expansion_failed (NULL_TREE, rtl,
14466 "Expression rejected for debug by the backend.\n");
14467 return false;
14468 }
14469
14470 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14471 the target hook doesn't explicitly allow it in debug info, assume
14472 we can't express it in the debug info. */
14473 /* Don't complain about TLS UNSPECs, those are just too hard to
14474 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14475 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14476 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14477 if (flag_checking
14478 && !in_const_p
14479 && (XVECLEN (rtl, 0) == 0
14480 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14481 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14482 inform (current_function_decl
14483 ? DECL_SOURCE_LOCATION (current_function_decl)
14484 : UNKNOWN_LOCATION,
14485 #if NUM_UNSPEC_VALUES > 0
14486 "non-delegitimized UNSPEC %s (%d) found in variable location",
14487 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14488 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14489 XINT (rtl, 1));
14490 #else
14491 "non-delegitimized UNSPEC %d found in variable location",
14492 XINT (rtl, 1));
14493 #endif
14494 expansion_failed (NULL_TREE, rtl,
14495 "UNSPEC hasn't been delegitimized.\n");
14496 return false;
14497 }
14498
14499 if (CONST_POLY_INT_P (rtl))
14500 return false;
14501
14502 /* FIXME: Refer to PR60655. It is possible for simplification
14503 of rtl expressions in var tracking to produce such expressions.
14504 We should really identify / validate expressions
14505 enclosed in CONST that can be handled by assemblers on various
14506 targets and only handle legitimate cases here. */
14507 switch (GET_CODE (rtl))
14508 {
14509 case SYMBOL_REF:
14510 break;
14511 case NOT:
14512 case NEG:
14513 return false;
14514 case PLUS:
14515 {
14516 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14517 operands. */
14518 subrtx_var_iterator::array_type array;
14519 bool first = false;
14520 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14521 if (SYMBOL_REF_P (*iter)
14522 || LABEL_P (*iter)
14523 || GET_CODE (*iter) == UNSPEC)
14524 {
14525 first = true;
14526 break;
14527 }
14528 if (!first)
14529 return true;
14530 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14531 if (SYMBOL_REF_P (*iter)
14532 || LABEL_P (*iter)
14533 || GET_CODE (*iter) == UNSPEC)
14534 return false;
14535 return true;
14536 }
14537 case MINUS:
14538 {
14539 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14540 appear in the second operand of MINUS. */
14541 subrtx_var_iterator::array_type array;
14542 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14543 if (SYMBOL_REF_P (*iter)
14544 || LABEL_P (*iter)
14545 || GET_CODE (*iter) == UNSPEC)
14546 return false;
14547 return true;
14548 }
14549 default:
14550 return true;
14551 }
14552
14553 if (CONSTANT_POOL_ADDRESS_P (rtl))
14554 {
14555 bool marked;
14556 get_pool_constant_mark (rtl, &marked);
14557 /* If all references to this pool constant were optimized away,
14558 it was not output and thus we can't represent it. */
14559 if (!marked)
14560 {
14561 expansion_failed (NULL_TREE, rtl,
14562 "Constant was removed from constant pool.\n");
14563 return false;
14564 }
14565 }
14566
14567 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14568 return false;
14569
14570 /* Avoid references to external symbols in debug info, on several targets
14571 the linker might even refuse to link when linking a shared library,
14572 and in many other cases the relocations for .debug_info/.debug_loc are
14573 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14574 to be defined within the same shared library or executable are fine. */
14575 if (SYMBOL_REF_EXTERNAL_P (rtl))
14576 {
14577 tree decl = SYMBOL_REF_DECL (rtl);
14578
14579 if (decl == NULL || !targetm.binds_local_p (decl))
14580 {
14581 expansion_failed (NULL_TREE, rtl,
14582 "Symbol not defined in current TU.\n");
14583 return false;
14584 }
14585 }
14586
14587 return true;
14588 }
14589
14590 /* Return true if constant RTL can be emitted in DW_OP_addr or
14591 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14592 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14593
14594 static bool
const_ok_for_output(rtx rtl)14595 const_ok_for_output (rtx rtl)
14596 {
14597 if (GET_CODE (rtl) == SYMBOL_REF)
14598 return const_ok_for_output_1 (rtl);
14599
14600 if (GET_CODE (rtl) == CONST)
14601 {
14602 subrtx_var_iterator::array_type array;
14603 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14604 if (!const_ok_for_output_1 (*iter))
14605 return false;
14606 return true;
14607 }
14608
14609 return true;
14610 }
14611
14612 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14613 if possible, NULL otherwise. */
14614
14615 static dw_die_ref
base_type_for_mode(machine_mode mode,bool unsignedp)14616 base_type_for_mode (machine_mode mode, bool unsignedp)
14617 {
14618 dw_die_ref type_die;
14619 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14620
14621 if (type == NULL)
14622 return NULL;
14623 switch (TREE_CODE (type))
14624 {
14625 case INTEGER_TYPE:
14626 case REAL_TYPE:
14627 break;
14628 default:
14629 return NULL;
14630 }
14631 type_die = lookup_type_die (type);
14632 if (!type_die)
14633 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14634 comp_unit_die ());
14635 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14636 return NULL;
14637 return type_die;
14638 }
14639
14640 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14641 type matching MODE, or, if MODE is narrower than or as wide as
14642 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14643 possible. */
14644
14645 static dw_loc_descr_ref
convert_descriptor_to_mode(scalar_int_mode mode,dw_loc_descr_ref op)14646 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14647 {
14648 machine_mode outer_mode = mode;
14649 dw_die_ref type_die;
14650 dw_loc_descr_ref cvt;
14651
14652 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14653 {
14654 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14655 return op;
14656 }
14657 type_die = base_type_for_mode (outer_mode, 1);
14658 if (type_die == NULL)
14659 return NULL;
14660 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14661 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14662 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14663 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14664 add_loc_descr (&op, cvt);
14665 return op;
14666 }
14667
14668 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14669
14670 static dw_loc_descr_ref
compare_loc_descriptor(enum dwarf_location_atom op,dw_loc_descr_ref op0,dw_loc_descr_ref op1)14671 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14672 dw_loc_descr_ref op1)
14673 {
14674 dw_loc_descr_ref ret = op0;
14675 add_loc_descr (&ret, op1);
14676 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14677 if (STORE_FLAG_VALUE != 1)
14678 {
14679 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14680 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14681 }
14682 return ret;
14683 }
14684
14685 /* Subroutine of scompare_loc_descriptor for the case in which we're
14686 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14687 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14688
14689 static dw_loc_descr_ref
scompare_loc_descriptor_wide(enum dwarf_location_atom op,scalar_int_mode op_mode,dw_loc_descr_ref op0,dw_loc_descr_ref op1)14690 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14691 scalar_int_mode op_mode,
14692 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14693 {
14694 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14695 dw_loc_descr_ref cvt;
14696
14697 if (type_die == NULL)
14698 return NULL;
14699 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14700 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14701 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14702 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14703 add_loc_descr (&op0, cvt);
14704 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14705 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14706 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14707 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14708 add_loc_descr (&op1, cvt);
14709 return compare_loc_descriptor (op, op0, op1);
14710 }
14711
14712 /* Subroutine of scompare_loc_descriptor for the case in which we're
14713 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14714 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14715
14716 static dw_loc_descr_ref
scompare_loc_descriptor_narrow(enum dwarf_location_atom op,rtx rtl,scalar_int_mode op_mode,dw_loc_descr_ref op0,dw_loc_descr_ref op1)14717 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14718 scalar_int_mode op_mode,
14719 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14720 {
14721 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14722 /* For eq/ne, if the operands are known to be zero-extended,
14723 there is no need to do the fancy shifting up. */
14724 if (op == DW_OP_eq || op == DW_OP_ne)
14725 {
14726 dw_loc_descr_ref last0, last1;
14727 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14728 ;
14729 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14730 ;
14731 /* deref_size zero extends, and for constants we can check
14732 whether they are zero extended or not. */
14733 if (((last0->dw_loc_opc == DW_OP_deref_size
14734 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14735 || (CONST_INT_P (XEXP (rtl, 0))
14736 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14737 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14738 && ((last1->dw_loc_opc == DW_OP_deref_size
14739 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14740 || (CONST_INT_P (XEXP (rtl, 1))
14741 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14742 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14743 return compare_loc_descriptor (op, op0, op1);
14744
14745 /* EQ/NE comparison against constant in narrower type than
14746 DWARF2_ADDR_SIZE can be performed either as
14747 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14748 DW_OP_{eq,ne}
14749 or
14750 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14751 DW_OP_{eq,ne}. Pick whatever is shorter. */
14752 if (CONST_INT_P (XEXP (rtl, 1))
14753 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14754 && (size_of_int_loc_descriptor (shift) + 1
14755 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14756 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14757 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14758 & GET_MODE_MASK (op_mode))))
14759 {
14760 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14761 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14762 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14763 & GET_MODE_MASK (op_mode));
14764 return compare_loc_descriptor (op, op0, op1);
14765 }
14766 }
14767 add_loc_descr (&op0, int_loc_descriptor (shift));
14768 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14769 if (CONST_INT_P (XEXP (rtl, 1)))
14770 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14771 else
14772 {
14773 add_loc_descr (&op1, int_loc_descriptor (shift));
14774 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14775 }
14776 return compare_loc_descriptor (op, op0, op1);
14777 }
14778
14779 /* Return location descriptor for unsigned comparison OP RTL. */
14780
14781 static dw_loc_descr_ref
scompare_loc_descriptor(enum dwarf_location_atom op,rtx rtl,machine_mode mem_mode)14782 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14783 machine_mode mem_mode)
14784 {
14785 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14786 dw_loc_descr_ref op0, op1;
14787
14788 if (op_mode == VOIDmode)
14789 op_mode = GET_MODE (XEXP (rtl, 1));
14790 if (op_mode == VOIDmode)
14791 return NULL;
14792
14793 scalar_int_mode int_op_mode;
14794 if (dwarf_strict
14795 && dwarf_version < 5
14796 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14797 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14798 return NULL;
14799
14800 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14801 VAR_INIT_STATUS_INITIALIZED);
14802 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14803 VAR_INIT_STATUS_INITIALIZED);
14804
14805 if (op0 == NULL || op1 == NULL)
14806 return NULL;
14807
14808 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14809 {
14810 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14811 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14812
14813 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14814 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14815 }
14816 return compare_loc_descriptor (op, op0, op1);
14817 }
14818
14819 /* Return location descriptor for unsigned comparison OP RTL. */
14820
14821 static dw_loc_descr_ref
ucompare_loc_descriptor(enum dwarf_location_atom op,rtx rtl,machine_mode mem_mode)14822 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14823 machine_mode mem_mode)
14824 {
14825 dw_loc_descr_ref op0, op1;
14826
14827 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14828 if (test_op_mode == VOIDmode)
14829 test_op_mode = GET_MODE (XEXP (rtl, 1));
14830
14831 scalar_int_mode op_mode;
14832 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14833 return NULL;
14834
14835 if (dwarf_strict
14836 && dwarf_version < 5
14837 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14838 return NULL;
14839
14840 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14841 VAR_INIT_STATUS_INITIALIZED);
14842 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14843 VAR_INIT_STATUS_INITIALIZED);
14844
14845 if (op0 == NULL || op1 == NULL)
14846 return NULL;
14847
14848 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14849 {
14850 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14851 dw_loc_descr_ref last0, last1;
14852 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14853 ;
14854 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14855 ;
14856 if (CONST_INT_P (XEXP (rtl, 0)))
14857 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14858 /* deref_size zero extends, so no need to mask it again. */
14859 else if (last0->dw_loc_opc != DW_OP_deref_size
14860 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14861 {
14862 add_loc_descr (&op0, int_loc_descriptor (mask));
14863 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14864 }
14865 if (CONST_INT_P (XEXP (rtl, 1)))
14866 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14867 /* deref_size zero extends, so no need to mask it again. */
14868 else if (last1->dw_loc_opc != DW_OP_deref_size
14869 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14870 {
14871 add_loc_descr (&op1, int_loc_descriptor (mask));
14872 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14873 }
14874 }
14875 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14876 {
14877 HOST_WIDE_INT bias = 1;
14878 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14879 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14880 if (CONST_INT_P (XEXP (rtl, 1)))
14881 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14882 + INTVAL (XEXP (rtl, 1)));
14883 else
14884 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14885 bias, 0));
14886 }
14887 return compare_loc_descriptor (op, op0, op1);
14888 }
14889
14890 /* Return location descriptor for {U,S}{MIN,MAX}. */
14891
14892 static dw_loc_descr_ref
minmax_loc_descriptor(rtx rtl,machine_mode mode,machine_mode mem_mode)14893 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14894 machine_mode mem_mode)
14895 {
14896 enum dwarf_location_atom op;
14897 dw_loc_descr_ref op0, op1, ret;
14898 dw_loc_descr_ref bra_node, drop_node;
14899
14900 scalar_int_mode int_mode;
14901 if (dwarf_strict
14902 && dwarf_version < 5
14903 && (!is_a <scalar_int_mode> (mode, &int_mode)
14904 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14905 return NULL;
14906
14907 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14908 VAR_INIT_STATUS_INITIALIZED);
14909 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14910 VAR_INIT_STATUS_INITIALIZED);
14911
14912 if (op0 == NULL || op1 == NULL)
14913 return NULL;
14914
14915 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14916 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14917 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14918 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14919 {
14920 /* Checked by the caller. */
14921 int_mode = as_a <scalar_int_mode> (mode);
14922 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14923 {
14924 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14925 add_loc_descr (&op0, int_loc_descriptor (mask));
14926 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14927 add_loc_descr (&op1, int_loc_descriptor (mask));
14928 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14929 }
14930 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14931 {
14932 HOST_WIDE_INT bias = 1;
14933 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14934 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14935 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14936 }
14937 }
14938 else if (is_a <scalar_int_mode> (mode, &int_mode)
14939 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14940 {
14941 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14942 add_loc_descr (&op0, int_loc_descriptor (shift));
14943 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14944 add_loc_descr (&op1, int_loc_descriptor (shift));
14945 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14946 }
14947 else if (is_a <scalar_int_mode> (mode, &int_mode)
14948 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14949 {
14950 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14951 dw_loc_descr_ref cvt;
14952 if (type_die == NULL)
14953 return NULL;
14954 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14955 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14956 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14957 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14958 add_loc_descr (&op0, cvt);
14959 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14960 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14961 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14962 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14963 add_loc_descr (&op1, cvt);
14964 }
14965
14966 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14967 op = DW_OP_lt;
14968 else
14969 op = DW_OP_gt;
14970 ret = op0;
14971 add_loc_descr (&ret, op1);
14972 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14973 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14974 add_loc_descr (&ret, bra_node);
14975 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14976 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14977 add_loc_descr (&ret, drop_node);
14978 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14979 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14980 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14981 && is_a <scalar_int_mode> (mode, &int_mode)
14982 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14983 ret = convert_descriptor_to_mode (int_mode, ret);
14984 return ret;
14985 }
14986
14987 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14988 but after converting arguments to type_die, afterwards
14989 convert back to unsigned. */
14990
14991 static dw_loc_descr_ref
typed_binop(enum dwarf_location_atom op,rtx rtl,dw_die_ref type_die,scalar_int_mode mode,machine_mode mem_mode)14992 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14993 scalar_int_mode mode, machine_mode mem_mode)
14994 {
14995 dw_loc_descr_ref cvt, op0, op1;
14996
14997 if (type_die == NULL)
14998 return NULL;
14999 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15000 VAR_INIT_STATUS_INITIALIZED);
15001 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15002 VAR_INIT_STATUS_INITIALIZED);
15003 if (op0 == NULL || op1 == NULL)
15004 return NULL;
15005 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15006 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15007 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15008 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15009 add_loc_descr (&op0, cvt);
15010 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15011 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15012 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15013 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15014 add_loc_descr (&op1, cvt);
15015 add_loc_descr (&op0, op1);
15016 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15017 return convert_descriptor_to_mode (mode, op0);
15018 }
15019
15020 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15021 const0 is DW_OP_lit0 or corresponding typed constant,
15022 const1 is DW_OP_lit1 or corresponding typed constant
15023 and constMSB is constant with just the MSB bit set
15024 for the mode):
15025 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15026 L1: const0 DW_OP_swap
15027 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15028 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15029 L3: DW_OP_drop
15030 L4: DW_OP_nop
15031
15032 CTZ is similar:
15033 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15034 L1: const0 DW_OP_swap
15035 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15036 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15037 L3: DW_OP_drop
15038 L4: DW_OP_nop
15039
15040 FFS is similar:
15041 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15042 L1: const1 DW_OP_swap
15043 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15044 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15045 L3: DW_OP_drop
15046 L4: DW_OP_nop */
15047
15048 static dw_loc_descr_ref
clz_loc_descriptor(rtx rtl,scalar_int_mode mode,machine_mode mem_mode)15049 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15050 machine_mode mem_mode)
15051 {
15052 dw_loc_descr_ref op0, ret, tmp;
15053 HOST_WIDE_INT valv;
15054 dw_loc_descr_ref l1jump, l1label;
15055 dw_loc_descr_ref l2jump, l2label;
15056 dw_loc_descr_ref l3jump, l3label;
15057 dw_loc_descr_ref l4jump, l4label;
15058 rtx msb;
15059
15060 if (GET_MODE (XEXP (rtl, 0)) != mode)
15061 return NULL;
15062
15063 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15064 VAR_INIT_STATUS_INITIALIZED);
15065 if (op0 == NULL)
15066 return NULL;
15067 ret = op0;
15068 if (GET_CODE (rtl) == CLZ)
15069 {
15070 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15071 valv = GET_MODE_BITSIZE (mode);
15072 }
15073 else if (GET_CODE (rtl) == FFS)
15074 valv = 0;
15075 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15076 valv = GET_MODE_BITSIZE (mode);
15077 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15078 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15079 add_loc_descr (&ret, l1jump);
15080 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15081 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15082 VAR_INIT_STATUS_INITIALIZED);
15083 if (tmp == NULL)
15084 return NULL;
15085 add_loc_descr (&ret, tmp);
15086 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15087 add_loc_descr (&ret, l4jump);
15088 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15089 ? const1_rtx : const0_rtx,
15090 mode, mem_mode,
15091 VAR_INIT_STATUS_INITIALIZED);
15092 if (l1label == NULL)
15093 return NULL;
15094 add_loc_descr (&ret, l1label);
15095 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15096 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15097 add_loc_descr (&ret, l2label);
15098 if (GET_CODE (rtl) != CLZ)
15099 msb = const1_rtx;
15100 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15101 msb = GEN_INT (HOST_WIDE_INT_1U
15102 << (GET_MODE_BITSIZE (mode) - 1));
15103 else
15104 msb = immed_wide_int_const
15105 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15106 GET_MODE_PRECISION (mode)), mode);
15107 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15108 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15109 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15110 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15111 else
15112 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15113 VAR_INIT_STATUS_INITIALIZED);
15114 if (tmp == NULL)
15115 return NULL;
15116 add_loc_descr (&ret, tmp);
15117 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15118 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15119 add_loc_descr (&ret, l3jump);
15120 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15121 VAR_INIT_STATUS_INITIALIZED);
15122 if (tmp == NULL)
15123 return NULL;
15124 add_loc_descr (&ret, tmp);
15125 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15126 ? DW_OP_shl : DW_OP_shr, 0, 0));
15127 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15128 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15129 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15130 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15131 add_loc_descr (&ret, l2jump);
15132 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15133 add_loc_descr (&ret, l3label);
15134 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15135 add_loc_descr (&ret, l4label);
15136 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15137 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15138 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15139 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15140 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15141 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15142 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15143 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15144 return ret;
15145 }
15146
15147 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15148 const1 is DW_OP_lit1 or corresponding typed constant):
15149 const0 DW_OP_swap
15150 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15151 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15152 L2: DW_OP_drop
15153
15154 PARITY is similar:
15155 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15156 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15157 L2: DW_OP_drop */
15158
15159 static dw_loc_descr_ref
popcount_loc_descriptor(rtx rtl,scalar_int_mode mode,machine_mode mem_mode)15160 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15161 machine_mode mem_mode)
15162 {
15163 dw_loc_descr_ref op0, ret, tmp;
15164 dw_loc_descr_ref l1jump, l1label;
15165 dw_loc_descr_ref l2jump, l2label;
15166
15167 if (GET_MODE (XEXP (rtl, 0)) != mode)
15168 return NULL;
15169
15170 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15171 VAR_INIT_STATUS_INITIALIZED);
15172 if (op0 == NULL)
15173 return NULL;
15174 ret = op0;
15175 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15176 VAR_INIT_STATUS_INITIALIZED);
15177 if (tmp == NULL)
15178 return NULL;
15179 add_loc_descr (&ret, tmp);
15180 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15181 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15182 add_loc_descr (&ret, l1label);
15183 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15184 add_loc_descr (&ret, l2jump);
15185 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15186 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15187 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15188 VAR_INIT_STATUS_INITIALIZED);
15189 if (tmp == NULL)
15190 return NULL;
15191 add_loc_descr (&ret, tmp);
15192 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15193 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15194 ? DW_OP_plus : DW_OP_xor, 0, 0));
15195 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15196 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15197 VAR_INIT_STATUS_INITIALIZED);
15198 add_loc_descr (&ret, tmp);
15199 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15200 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15201 add_loc_descr (&ret, l1jump);
15202 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15203 add_loc_descr (&ret, l2label);
15204 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15205 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15206 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15207 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15208 return ret;
15209 }
15210
15211 /* BSWAP (constS is initial shift count, either 56 or 24):
15212 constS const0
15213 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15214 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15215 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15216 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15217 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15218
15219 static dw_loc_descr_ref
bswap_loc_descriptor(rtx rtl,scalar_int_mode mode,machine_mode mem_mode)15220 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15221 machine_mode mem_mode)
15222 {
15223 dw_loc_descr_ref op0, ret, tmp;
15224 dw_loc_descr_ref l1jump, l1label;
15225 dw_loc_descr_ref l2jump, l2label;
15226
15227 if (BITS_PER_UNIT != 8
15228 || (GET_MODE_BITSIZE (mode) != 32
15229 && GET_MODE_BITSIZE (mode) != 64))
15230 return NULL;
15231
15232 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15233 VAR_INIT_STATUS_INITIALIZED);
15234 if (op0 == NULL)
15235 return NULL;
15236
15237 ret = op0;
15238 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15239 mode, mem_mode,
15240 VAR_INIT_STATUS_INITIALIZED);
15241 if (tmp == NULL)
15242 return NULL;
15243 add_loc_descr (&ret, tmp);
15244 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15245 VAR_INIT_STATUS_INITIALIZED);
15246 if (tmp == NULL)
15247 return NULL;
15248 add_loc_descr (&ret, tmp);
15249 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15250 add_loc_descr (&ret, l1label);
15251 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15252 mode, mem_mode,
15253 VAR_INIT_STATUS_INITIALIZED);
15254 add_loc_descr (&ret, tmp);
15255 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15256 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15257 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15258 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15259 VAR_INIT_STATUS_INITIALIZED);
15260 if (tmp == NULL)
15261 return NULL;
15262 add_loc_descr (&ret, tmp);
15263 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15264 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15265 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15266 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15267 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15268 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15269 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15270 VAR_INIT_STATUS_INITIALIZED);
15271 add_loc_descr (&ret, tmp);
15272 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15273 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15274 add_loc_descr (&ret, l2jump);
15275 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277 add_loc_descr (&ret, tmp);
15278 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15279 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15280 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15281 add_loc_descr (&ret, l1jump);
15282 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15283 add_loc_descr (&ret, l2label);
15284 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15285 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15286 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15287 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15288 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15289 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15290 return ret;
15291 }
15292
15293 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15294 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15295 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15296 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15297
15298 ROTATERT is similar:
15299 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15300 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15301 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15302
15303 static dw_loc_descr_ref
rotate_loc_descriptor(rtx rtl,scalar_int_mode mode,machine_mode mem_mode)15304 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15305 machine_mode mem_mode)
15306 {
15307 rtx rtlop1 = XEXP (rtl, 1);
15308 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15309 int i;
15310
15311 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15312 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15313 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15314 VAR_INIT_STATUS_INITIALIZED);
15315 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15316 VAR_INIT_STATUS_INITIALIZED);
15317 if (op0 == NULL || op1 == NULL)
15318 return NULL;
15319 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15320 for (i = 0; i < 2; i++)
15321 {
15322 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15323 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15324 mode, mem_mode,
15325 VAR_INIT_STATUS_INITIALIZED);
15326 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15327 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15328 ? DW_OP_const4u
15329 : HOST_BITS_PER_WIDE_INT == 64
15330 ? DW_OP_const8u : DW_OP_constu,
15331 GET_MODE_MASK (mode), 0);
15332 else
15333 mask[i] = NULL;
15334 if (mask[i] == NULL)
15335 return NULL;
15336 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15337 }
15338 ret = op0;
15339 add_loc_descr (&ret, op1);
15340 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15341 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15342 if (GET_CODE (rtl) == ROTATERT)
15343 {
15344 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15345 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15346 GET_MODE_BITSIZE (mode), 0));
15347 }
15348 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15349 if (mask[0] != NULL)
15350 add_loc_descr (&ret, mask[0]);
15351 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15352 if (mask[1] != NULL)
15353 {
15354 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15355 add_loc_descr (&ret, mask[1]);
15356 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15357 }
15358 if (GET_CODE (rtl) == ROTATE)
15359 {
15360 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15361 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15362 GET_MODE_BITSIZE (mode), 0));
15363 }
15364 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15365 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15366 return ret;
15367 }
15368
15369 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15370 for DEBUG_PARAMETER_REF RTL. */
15371
15372 static dw_loc_descr_ref
parameter_ref_descriptor(rtx rtl)15373 parameter_ref_descriptor (rtx rtl)
15374 {
15375 dw_loc_descr_ref ret;
15376 dw_die_ref ref;
15377
15378 if (dwarf_strict)
15379 return NULL;
15380 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15381 /* With LTO during LTRANS we get the late DIE that refers to the early
15382 DIE, thus we add another indirection here. This seems to confuse
15383 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15384 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15385 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15386 if (ref)
15387 {
15388 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15389 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15390 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15391 }
15392 else
15393 {
15394 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15395 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15396 }
15397 return ret;
15398 }
15399
15400 /* The following routine converts the RTL for a variable or parameter
15401 (resident in memory) into an equivalent Dwarf representation of a
15402 mechanism for getting the address of that same variable onto the top of a
15403 hypothetical "address evaluation" stack.
15404
15405 When creating memory location descriptors, we are effectively transforming
15406 the RTL for a memory-resident object into its Dwarf postfix expression
15407 equivalent. This routine recursively descends an RTL tree, turning
15408 it into Dwarf postfix code as it goes.
15409
15410 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15411
15412 MEM_MODE is the mode of the memory reference, needed to handle some
15413 autoincrement addressing modes.
15414
15415 Return 0 if we can't represent the location. */
15416
15417 dw_loc_descr_ref
mem_loc_descriptor(rtx rtl,machine_mode mode,machine_mode mem_mode,enum var_init_status initialized)15418 mem_loc_descriptor (rtx rtl, machine_mode mode,
15419 machine_mode mem_mode,
15420 enum var_init_status initialized)
15421 {
15422 dw_loc_descr_ref mem_loc_result = NULL;
15423 enum dwarf_location_atom op;
15424 dw_loc_descr_ref op0, op1;
15425 rtx inner = NULL_RTX;
15426 poly_int64 offset;
15427
15428 if (mode == VOIDmode)
15429 mode = GET_MODE (rtl);
15430
15431 /* Note that for a dynamically sized array, the location we will generate a
15432 description of here will be the lowest numbered location which is
15433 actually within the array. That's *not* necessarily the same as the
15434 zeroth element of the array. */
15435
15436 rtl = targetm.delegitimize_address (rtl);
15437
15438 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15439 return NULL;
15440
15441 scalar_int_mode int_mode, inner_mode, op1_mode;
15442 switch (GET_CODE (rtl))
15443 {
15444 case POST_INC:
15445 case POST_DEC:
15446 case POST_MODIFY:
15447 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15448
15449 case SUBREG:
15450 /* The case of a subreg may arise when we have a local (register)
15451 variable or a formal (register) parameter which doesn't quite fill
15452 up an entire register. For now, just assume that it is
15453 legitimate to make the Dwarf info refer to the whole register which
15454 contains the given subreg. */
15455 if (!subreg_lowpart_p (rtl))
15456 break;
15457 inner = SUBREG_REG (rtl);
15458 /* FALLTHRU */
15459 case TRUNCATE:
15460 if (inner == NULL_RTX)
15461 inner = XEXP (rtl, 0);
15462 if (is_a <scalar_int_mode> (mode, &int_mode)
15463 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15464 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15465 #ifdef POINTERS_EXTEND_UNSIGNED
15466 || (int_mode == Pmode && mem_mode != VOIDmode)
15467 #endif
15468 )
15469 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15470 {
15471 mem_loc_result = mem_loc_descriptor (inner,
15472 inner_mode,
15473 mem_mode, initialized);
15474 break;
15475 }
15476 if (dwarf_strict && dwarf_version < 5)
15477 break;
15478 if (is_a <scalar_int_mode> (mode, &int_mode)
15479 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15480 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15481 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15482 {
15483 dw_die_ref type_die;
15484 dw_loc_descr_ref cvt;
15485
15486 mem_loc_result = mem_loc_descriptor (inner,
15487 GET_MODE (inner),
15488 mem_mode, initialized);
15489 if (mem_loc_result == NULL)
15490 break;
15491 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15492 if (type_die == NULL)
15493 {
15494 mem_loc_result = NULL;
15495 break;
15496 }
15497 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15498 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15499 else
15500 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15501 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15502 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15503 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15504 add_loc_descr (&mem_loc_result, cvt);
15505 if (is_a <scalar_int_mode> (mode, &int_mode)
15506 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15507 {
15508 /* Convert it to untyped afterwards. */
15509 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15510 add_loc_descr (&mem_loc_result, cvt);
15511 }
15512 }
15513 break;
15514
15515 case REG:
15516 if (!is_a <scalar_int_mode> (mode, &int_mode)
15517 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15518 && rtl != arg_pointer_rtx
15519 && rtl != frame_pointer_rtx
15520 #ifdef POINTERS_EXTEND_UNSIGNED
15521 && (int_mode != Pmode || mem_mode == VOIDmode)
15522 #endif
15523 ))
15524 {
15525 dw_die_ref type_die;
15526 unsigned int dbx_regnum;
15527
15528 if (dwarf_strict && dwarf_version < 5)
15529 break;
15530 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15531 break;
15532 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15533 if (type_die == NULL)
15534 break;
15535
15536 dbx_regnum = dbx_reg_number (rtl);
15537 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15538 break;
15539 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15540 dbx_regnum, 0);
15541 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15542 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15543 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15544 break;
15545 }
15546 /* Whenever a register number forms a part of the description of the
15547 method for calculating the (dynamic) address of a memory resident
15548 object, DWARF rules require the register number be referred to as
15549 a "base register". This distinction is not based in any way upon
15550 what category of register the hardware believes the given register
15551 belongs to. This is strictly DWARF terminology we're dealing with
15552 here. Note that in cases where the location of a memory-resident
15553 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15554 OP_CONST (0)) the actual DWARF location descriptor that we generate
15555 may just be OP_BASEREG (basereg). This may look deceptively like
15556 the object in question was allocated to a register (rather than in
15557 memory) so DWARF consumers need to be aware of the subtle
15558 distinction between OP_REG and OP_BASEREG. */
15559 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15560 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15561 else if (stack_realign_drap
15562 && crtl->drap_reg
15563 && crtl->args.internal_arg_pointer == rtl
15564 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15565 {
15566 /* If RTL is internal_arg_pointer, which has been optimized
15567 out, use DRAP instead. */
15568 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15569 VAR_INIT_STATUS_INITIALIZED);
15570 }
15571 break;
15572
15573 case SIGN_EXTEND:
15574 case ZERO_EXTEND:
15575 if (!is_a <scalar_int_mode> (mode, &int_mode)
15576 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15577 break;
15578 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15579 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15580 if (op0 == 0)
15581 break;
15582 else if (GET_CODE (rtl) == ZERO_EXTEND
15583 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15584 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15585 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15586 to expand zero extend as two shifts instead of
15587 masking. */
15588 && GET_MODE_SIZE (inner_mode) <= 4)
15589 {
15590 mem_loc_result = op0;
15591 add_loc_descr (&mem_loc_result,
15592 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15593 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15594 }
15595 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15596 {
15597 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15598 shift *= BITS_PER_UNIT;
15599 if (GET_CODE (rtl) == SIGN_EXTEND)
15600 op = DW_OP_shra;
15601 else
15602 op = DW_OP_shr;
15603 mem_loc_result = op0;
15604 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15605 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15606 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15607 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15608 }
15609 else if (!dwarf_strict || dwarf_version >= 5)
15610 {
15611 dw_die_ref type_die1, type_die2;
15612 dw_loc_descr_ref cvt;
15613
15614 type_die1 = base_type_for_mode (inner_mode,
15615 GET_CODE (rtl) == ZERO_EXTEND);
15616 if (type_die1 == NULL)
15617 break;
15618 type_die2 = base_type_for_mode (int_mode, 1);
15619 if (type_die2 == NULL)
15620 break;
15621 mem_loc_result = op0;
15622 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15623 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15624 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15625 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15626 add_loc_descr (&mem_loc_result, cvt);
15627 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15628 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15629 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15630 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15631 add_loc_descr (&mem_loc_result, cvt);
15632 }
15633 break;
15634
15635 case MEM:
15636 {
15637 rtx new_rtl = avoid_constant_pool_reference (rtl);
15638 if (new_rtl != rtl)
15639 {
15640 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15641 initialized);
15642 if (mem_loc_result != NULL)
15643 return mem_loc_result;
15644 }
15645 }
15646 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15647 get_address_mode (rtl), mode,
15648 VAR_INIT_STATUS_INITIALIZED);
15649 if (mem_loc_result == NULL)
15650 mem_loc_result = tls_mem_loc_descriptor (rtl);
15651 if (mem_loc_result != NULL)
15652 {
15653 if (!is_a <scalar_int_mode> (mode, &int_mode)
15654 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15655 {
15656 dw_die_ref type_die;
15657 dw_loc_descr_ref deref;
15658 HOST_WIDE_INT size;
15659
15660 if (dwarf_strict && dwarf_version < 5)
15661 return NULL;
15662 if (!GET_MODE_SIZE (mode).is_constant (&size))
15663 return NULL;
15664 type_die
15665 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15666 if (type_die == NULL)
15667 return NULL;
15668 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15669 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15670 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15671 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15672 add_loc_descr (&mem_loc_result, deref);
15673 }
15674 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15675 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15676 else
15677 add_loc_descr (&mem_loc_result,
15678 new_loc_descr (DW_OP_deref_size,
15679 GET_MODE_SIZE (int_mode), 0));
15680 }
15681 break;
15682
15683 case LO_SUM:
15684 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15685
15686 case LABEL_REF:
15687 /* Some ports can transform a symbol ref into a label ref, because
15688 the symbol ref is too far away and has to be dumped into a constant
15689 pool. */
15690 case CONST:
15691 case SYMBOL_REF:
15692 case UNSPEC:
15693 if (!is_a <scalar_int_mode> (mode, &int_mode)
15694 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15695 #ifdef POINTERS_EXTEND_UNSIGNED
15696 && (int_mode != Pmode || mem_mode == VOIDmode)
15697 #endif
15698 ))
15699 break;
15700
15701 if (GET_CODE (rtl) == UNSPEC)
15702 {
15703 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15704 can't express it in the debug info. This can happen e.g. with some
15705 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15706 approves. */
15707 bool not_ok = false;
15708
15709 if (!in_const_p)
15710 break;
15711
15712 subrtx_var_iterator::array_type array;
15713 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15714 if (*iter != rtl && !CONSTANT_P (*iter))
15715 {
15716 not_ok = true;
15717 break;
15718 }
15719
15720 if (not_ok)
15721 break;
15722
15723 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15724 if (!const_ok_for_output_1 (*iter))
15725 {
15726 not_ok = true;
15727 break;
15728 }
15729
15730 if (not_ok)
15731 break;
15732
15733 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15734 goto symref;
15735 }
15736
15737 if (GET_CODE (rtl) == SYMBOL_REF
15738 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15739 {
15740 dw_loc_descr_ref temp;
15741
15742 /* If this is not defined, we have no way to emit the data. */
15743 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15744 break;
15745
15746 temp = new_addr_loc_descr (rtl, dtprel_true);
15747
15748 /* We check for DWARF 5 here because gdb did not implement
15749 DW_OP_form_tls_address until after 7.12. */
15750 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15751 ? DW_OP_form_tls_address
15752 : DW_OP_GNU_push_tls_address),
15753 0, 0);
15754 add_loc_descr (&mem_loc_result, temp);
15755
15756 break;
15757 }
15758
15759 if (!const_ok_for_output (rtl))
15760 {
15761 if (GET_CODE (rtl) == CONST)
15762 switch (GET_CODE (XEXP (rtl, 0)))
15763 {
15764 case NOT:
15765 op = DW_OP_not;
15766 goto try_const_unop;
15767 case NEG:
15768 op = DW_OP_neg;
15769 goto try_const_unop;
15770 try_const_unop:
15771 rtx arg;
15772 arg = XEXP (XEXP (rtl, 0), 0);
15773 if (!CONSTANT_P (arg))
15774 arg = gen_rtx_CONST (int_mode, arg);
15775 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15776 initialized);
15777 if (op0)
15778 {
15779 mem_loc_result = op0;
15780 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15781 }
15782 break;
15783 default:
15784 {
15785 bool save_in_const_p = in_const_p;
15786 in_const_p = true;
15787 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15788 mem_mode, initialized);
15789 in_const_p = save_in_const_p;
15790 }
15791 break;
15792 }
15793 break;
15794 }
15795
15796 symref:
15797 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15798 vec_safe_push (used_rtx_array, rtl);
15799 break;
15800
15801 case CONCAT:
15802 case CONCATN:
15803 case VAR_LOCATION:
15804 case DEBUG_IMPLICIT_PTR:
15805 expansion_failed (NULL_TREE, rtl,
15806 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15807 return 0;
15808
15809 case ENTRY_VALUE:
15810 if (dwarf_strict && dwarf_version < 5)
15811 return NULL;
15812 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15813 {
15814 if (!is_a <scalar_int_mode> (mode, &int_mode)
15815 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15816 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15817 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15818 else
15819 {
15820 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15821 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15822 return NULL;
15823 op0 = one_reg_loc_descriptor (dbx_regnum,
15824 VAR_INIT_STATUS_INITIALIZED);
15825 }
15826 }
15827 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15828 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15829 {
15830 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15831 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15832 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15833 return NULL;
15834 }
15835 else
15836 gcc_unreachable ();
15837 if (op0 == NULL)
15838 return NULL;
15839 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15840 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15841 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15842 break;
15843
15844 case DEBUG_PARAMETER_REF:
15845 mem_loc_result = parameter_ref_descriptor (rtl);
15846 break;
15847
15848 case PRE_MODIFY:
15849 /* Extract the PLUS expression nested inside and fall into
15850 PLUS code below. */
15851 rtl = XEXP (rtl, 1);
15852 goto plus;
15853
15854 case PRE_INC:
15855 case PRE_DEC:
15856 /* Turn these into a PLUS expression and fall into the PLUS code
15857 below. */
15858 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15859 gen_int_mode (GET_CODE (rtl) == PRE_INC
15860 ? GET_MODE_UNIT_SIZE (mem_mode)
15861 : -GET_MODE_UNIT_SIZE (mem_mode),
15862 mode));
15863
15864 /* fall through */
15865
15866 case PLUS:
15867 plus:
15868 if (is_based_loc (rtl)
15869 && is_a <scalar_int_mode> (mode, &int_mode)
15870 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15871 || XEXP (rtl, 0) == arg_pointer_rtx
15872 || XEXP (rtl, 0) == frame_pointer_rtx))
15873 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15874 INTVAL (XEXP (rtl, 1)),
15875 VAR_INIT_STATUS_INITIALIZED);
15876 else
15877 {
15878 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15879 VAR_INIT_STATUS_INITIALIZED);
15880 if (mem_loc_result == 0)
15881 break;
15882
15883 if (CONST_INT_P (XEXP (rtl, 1))
15884 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15885 <= DWARF2_ADDR_SIZE))
15886 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15887 else
15888 {
15889 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15890 VAR_INIT_STATUS_INITIALIZED);
15891 if (op1 == 0)
15892 return NULL;
15893 add_loc_descr (&mem_loc_result, op1);
15894 add_loc_descr (&mem_loc_result,
15895 new_loc_descr (DW_OP_plus, 0, 0));
15896 }
15897 }
15898 break;
15899
15900 /* If a pseudo-reg is optimized away, it is possible for it to
15901 be replaced with a MEM containing a multiply or shift. */
15902 case MINUS:
15903 op = DW_OP_minus;
15904 goto do_binop;
15905
15906 case MULT:
15907 op = DW_OP_mul;
15908 goto do_binop;
15909
15910 case DIV:
15911 if ((!dwarf_strict || dwarf_version >= 5)
15912 && is_a <scalar_int_mode> (mode, &int_mode)
15913 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15914 {
15915 mem_loc_result = typed_binop (DW_OP_div, rtl,
15916 base_type_for_mode (mode, 0),
15917 int_mode, mem_mode);
15918 break;
15919 }
15920 op = DW_OP_div;
15921 goto do_binop;
15922
15923 case UMOD:
15924 op = DW_OP_mod;
15925 goto do_binop;
15926
15927 case ASHIFT:
15928 op = DW_OP_shl;
15929 goto do_shift;
15930
15931 case ASHIFTRT:
15932 op = DW_OP_shra;
15933 goto do_shift;
15934
15935 case LSHIFTRT:
15936 op = DW_OP_shr;
15937 goto do_shift;
15938
15939 do_shift:
15940 if (!is_a <scalar_int_mode> (mode, &int_mode))
15941 break;
15942 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15943 VAR_INIT_STATUS_INITIALIZED);
15944 {
15945 rtx rtlop1 = XEXP (rtl, 1);
15946 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15947 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15948 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15949 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15950 VAR_INIT_STATUS_INITIALIZED);
15951 }
15952
15953 if (op0 == 0 || op1 == 0)
15954 break;
15955
15956 mem_loc_result = op0;
15957 add_loc_descr (&mem_loc_result, op1);
15958 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15959 break;
15960
15961 case AND:
15962 op = DW_OP_and;
15963 goto do_binop;
15964
15965 case IOR:
15966 op = DW_OP_or;
15967 goto do_binop;
15968
15969 case XOR:
15970 op = DW_OP_xor;
15971 goto do_binop;
15972
15973 do_binop:
15974 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15975 VAR_INIT_STATUS_INITIALIZED);
15976 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15977 VAR_INIT_STATUS_INITIALIZED);
15978
15979 if (op0 == 0 || op1 == 0)
15980 break;
15981
15982 mem_loc_result = op0;
15983 add_loc_descr (&mem_loc_result, op1);
15984 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15985 break;
15986
15987 case MOD:
15988 if ((!dwarf_strict || dwarf_version >= 5)
15989 && is_a <scalar_int_mode> (mode, &int_mode)
15990 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15991 {
15992 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15993 base_type_for_mode (mode, 0),
15994 int_mode, mem_mode);
15995 break;
15996 }
15997
15998 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15999 VAR_INIT_STATUS_INITIALIZED);
16000 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16001 VAR_INIT_STATUS_INITIALIZED);
16002
16003 if (op0 == 0 || op1 == 0)
16004 break;
16005
16006 mem_loc_result = op0;
16007 add_loc_descr (&mem_loc_result, op1);
16008 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16009 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16010 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16011 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16012 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16013 break;
16014
16015 case UDIV:
16016 if ((!dwarf_strict || dwarf_version >= 5)
16017 && is_a <scalar_int_mode> (mode, &int_mode))
16018 {
16019 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16020 {
16021 op = DW_OP_div;
16022 goto do_binop;
16023 }
16024 mem_loc_result = typed_binop (DW_OP_div, rtl,
16025 base_type_for_mode (int_mode, 1),
16026 int_mode, mem_mode);
16027 }
16028 break;
16029
16030 case NOT:
16031 op = DW_OP_not;
16032 goto do_unop;
16033
16034 case ABS:
16035 op = DW_OP_abs;
16036 goto do_unop;
16037
16038 case NEG:
16039 op = DW_OP_neg;
16040 goto do_unop;
16041
16042 do_unop:
16043 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16044 VAR_INIT_STATUS_INITIALIZED);
16045
16046 if (op0 == 0)
16047 break;
16048
16049 mem_loc_result = op0;
16050 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16051 break;
16052
16053 case CONST_INT:
16054 if (!is_a <scalar_int_mode> (mode, &int_mode)
16055 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16056 #ifdef POINTERS_EXTEND_UNSIGNED
16057 || (int_mode == Pmode
16058 && mem_mode != VOIDmode
16059 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16060 #endif
16061 )
16062 {
16063 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16064 break;
16065 }
16066 if ((!dwarf_strict || dwarf_version >= 5)
16067 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16068 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16069 {
16070 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16071 scalar_int_mode amode;
16072 if (type_die == NULL)
16073 return NULL;
16074 if (INTVAL (rtl) >= 0
16075 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16076 .exists (&amode))
16077 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16078 /* const DW_OP_convert <XXX> vs.
16079 DW_OP_const_type <XXX, 1, const>. */
16080 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16081 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16082 {
16083 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16084 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16085 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16086 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16087 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16088 add_loc_descr (&mem_loc_result, op0);
16089 return mem_loc_result;
16090 }
16091 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16092 INTVAL (rtl));
16093 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16094 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16095 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16096 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16097 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16098 else
16099 {
16100 mem_loc_result->dw_loc_oprnd2.val_class
16101 = dw_val_class_const_double;
16102 mem_loc_result->dw_loc_oprnd2.v.val_double
16103 = double_int::from_shwi (INTVAL (rtl));
16104 }
16105 }
16106 break;
16107
16108 case CONST_DOUBLE:
16109 if (!dwarf_strict || dwarf_version >= 5)
16110 {
16111 dw_die_ref type_die;
16112
16113 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16114 CONST_DOUBLE rtx could represent either a large integer
16115 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16116 the value is always a floating point constant.
16117
16118 When it is an integer, a CONST_DOUBLE is used whenever
16119 the constant requires 2 HWIs to be adequately represented.
16120 We output CONST_DOUBLEs as blocks. */
16121 if (mode == VOIDmode
16122 || (GET_MODE (rtl) == VOIDmode
16123 && maybe_ne (GET_MODE_BITSIZE (mode),
16124 HOST_BITS_PER_DOUBLE_INT)))
16125 break;
16126 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16127 if (type_die == NULL)
16128 return NULL;
16129 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16130 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16131 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16132 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16133 #if TARGET_SUPPORTS_WIDE_INT == 0
16134 if (!SCALAR_FLOAT_MODE_P (mode))
16135 {
16136 mem_loc_result->dw_loc_oprnd2.val_class
16137 = dw_val_class_const_double;
16138 mem_loc_result->dw_loc_oprnd2.v.val_double
16139 = rtx_to_double_int (rtl);
16140 }
16141 else
16142 #endif
16143 {
16144 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16145 unsigned int length = GET_MODE_SIZE (float_mode);
16146 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16147
16148 insert_float (rtl, array);
16149 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16150 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16151 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16152 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16153 }
16154 }
16155 break;
16156
16157 case CONST_WIDE_INT:
16158 if (!dwarf_strict || dwarf_version >= 5)
16159 {
16160 dw_die_ref type_die;
16161
16162 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16163 if (type_die == NULL)
16164 return NULL;
16165 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16166 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16167 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16168 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16169 mem_loc_result->dw_loc_oprnd2.val_class
16170 = dw_val_class_wide_int;
16171 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16172 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16173 }
16174 break;
16175
16176 case CONST_POLY_INT:
16177 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16178 break;
16179
16180 case EQ:
16181 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16182 break;
16183
16184 case GE:
16185 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16186 break;
16187
16188 case GT:
16189 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16190 break;
16191
16192 case LE:
16193 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16194 break;
16195
16196 case LT:
16197 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16198 break;
16199
16200 case NE:
16201 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16202 break;
16203
16204 case GEU:
16205 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16206 break;
16207
16208 case GTU:
16209 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16210 break;
16211
16212 case LEU:
16213 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16214 break;
16215
16216 case LTU:
16217 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16218 break;
16219
16220 case UMIN:
16221 case UMAX:
16222 if (!SCALAR_INT_MODE_P (mode))
16223 break;
16224 /* FALLTHRU */
16225 case SMIN:
16226 case SMAX:
16227 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16228 break;
16229
16230 case ZERO_EXTRACT:
16231 case SIGN_EXTRACT:
16232 if (CONST_INT_P (XEXP (rtl, 1))
16233 && CONST_INT_P (XEXP (rtl, 2))
16234 && is_a <scalar_int_mode> (mode, &int_mode)
16235 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16236 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16237 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16238 && ((unsigned) INTVAL (XEXP (rtl, 1))
16239 + (unsigned) INTVAL (XEXP (rtl, 2))
16240 <= GET_MODE_BITSIZE (int_mode)))
16241 {
16242 int shift, size;
16243 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16244 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16245 if (op0 == 0)
16246 break;
16247 if (GET_CODE (rtl) == SIGN_EXTRACT)
16248 op = DW_OP_shra;
16249 else
16250 op = DW_OP_shr;
16251 mem_loc_result = op0;
16252 size = INTVAL (XEXP (rtl, 1));
16253 shift = INTVAL (XEXP (rtl, 2));
16254 if (BITS_BIG_ENDIAN)
16255 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16256 if (shift + size != (int) DWARF2_ADDR_SIZE)
16257 {
16258 add_loc_descr (&mem_loc_result,
16259 int_loc_descriptor (DWARF2_ADDR_SIZE
16260 - shift - size));
16261 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16262 }
16263 if (size != (int) DWARF2_ADDR_SIZE)
16264 {
16265 add_loc_descr (&mem_loc_result,
16266 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16267 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16268 }
16269 }
16270 break;
16271
16272 case IF_THEN_ELSE:
16273 {
16274 dw_loc_descr_ref op2, bra_node, drop_node;
16275 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16276 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16277 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16278 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16279 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16280 VAR_INIT_STATUS_INITIALIZED);
16281 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16282 VAR_INIT_STATUS_INITIALIZED);
16283 if (op0 == NULL || op1 == NULL || op2 == NULL)
16284 break;
16285
16286 mem_loc_result = op1;
16287 add_loc_descr (&mem_loc_result, op2);
16288 add_loc_descr (&mem_loc_result, op0);
16289 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16290 add_loc_descr (&mem_loc_result, bra_node);
16291 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16292 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16293 add_loc_descr (&mem_loc_result, drop_node);
16294 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16295 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16296 }
16297 break;
16298
16299 case FLOAT_EXTEND:
16300 case FLOAT_TRUNCATE:
16301 case FLOAT:
16302 case UNSIGNED_FLOAT:
16303 case FIX:
16304 case UNSIGNED_FIX:
16305 if (!dwarf_strict || dwarf_version >= 5)
16306 {
16307 dw_die_ref type_die;
16308 dw_loc_descr_ref cvt;
16309
16310 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16311 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16312 if (op0 == NULL)
16313 break;
16314 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16315 && (GET_CODE (rtl) == FLOAT
16316 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16317 {
16318 type_die = base_type_for_mode (int_mode,
16319 GET_CODE (rtl) == UNSIGNED_FLOAT);
16320 if (type_die == NULL)
16321 break;
16322 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16323 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16324 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16325 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16326 add_loc_descr (&op0, cvt);
16327 }
16328 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16329 if (type_die == NULL)
16330 break;
16331 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16332 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16333 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16334 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16335 add_loc_descr (&op0, cvt);
16336 if (is_a <scalar_int_mode> (mode, &int_mode)
16337 && (GET_CODE (rtl) == FIX
16338 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16339 {
16340 op0 = convert_descriptor_to_mode (int_mode, op0);
16341 if (op0 == NULL)
16342 break;
16343 }
16344 mem_loc_result = op0;
16345 }
16346 break;
16347
16348 case CLZ:
16349 case CTZ:
16350 case FFS:
16351 if (is_a <scalar_int_mode> (mode, &int_mode))
16352 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16353 break;
16354
16355 case POPCOUNT:
16356 case PARITY:
16357 if (is_a <scalar_int_mode> (mode, &int_mode))
16358 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16359 break;
16360
16361 case BSWAP:
16362 if (is_a <scalar_int_mode> (mode, &int_mode))
16363 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16364 break;
16365
16366 case ROTATE:
16367 case ROTATERT:
16368 if (is_a <scalar_int_mode> (mode, &int_mode))
16369 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16370 break;
16371
16372 case COMPARE:
16373 /* In theory, we could implement the above. */
16374 /* DWARF cannot represent the unsigned compare operations
16375 natively. */
16376 case SS_MULT:
16377 case US_MULT:
16378 case SS_DIV:
16379 case US_DIV:
16380 case SS_PLUS:
16381 case US_PLUS:
16382 case SS_MINUS:
16383 case US_MINUS:
16384 case SS_NEG:
16385 case US_NEG:
16386 case SS_ABS:
16387 case SS_ASHIFT:
16388 case US_ASHIFT:
16389 case SS_TRUNCATE:
16390 case US_TRUNCATE:
16391 case UNORDERED:
16392 case ORDERED:
16393 case UNEQ:
16394 case UNGE:
16395 case UNGT:
16396 case UNLE:
16397 case UNLT:
16398 case LTGT:
16399 case FRACT_CONVERT:
16400 case UNSIGNED_FRACT_CONVERT:
16401 case SAT_FRACT:
16402 case UNSIGNED_SAT_FRACT:
16403 case SQRT:
16404 case ASM_OPERANDS:
16405 case VEC_MERGE:
16406 case VEC_SELECT:
16407 case VEC_CONCAT:
16408 case VEC_DUPLICATE:
16409 case VEC_SERIES:
16410 case HIGH:
16411 case FMA:
16412 case STRICT_LOW_PART:
16413 case CONST_VECTOR:
16414 case CONST_FIXED:
16415 case CLRSB:
16416 case CLOBBER:
16417 break;
16418
16419 case CONST_STRING:
16420 resolve_one_addr (&rtl);
16421 goto symref;
16422
16423 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16424 the expression. An UNSPEC rtx represents a raw DWARF operation,
16425 new_loc_descr is called for it to build the operation directly.
16426 Otherwise mem_loc_descriptor is called recursively. */
16427 case PARALLEL:
16428 {
16429 int index = 0;
16430 dw_loc_descr_ref exp_result = NULL;
16431
16432 for (; index < XVECLEN (rtl, 0); index++)
16433 {
16434 rtx elem = XVECEXP (rtl, 0, index);
16435 if (GET_CODE (elem) == UNSPEC)
16436 {
16437 /* Each DWARF operation UNSPEC contain two operands, if
16438 one operand is not used for the operation, const0_rtx is
16439 passed. */
16440 gcc_assert (XVECLEN (elem, 0) == 2);
16441
16442 HOST_WIDE_INT dw_op = XINT (elem, 1);
16443 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16444 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16445 exp_result
16446 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16447 oprnd2);
16448 }
16449 else
16450 exp_result
16451 = mem_loc_descriptor (elem, mode, mem_mode,
16452 VAR_INIT_STATUS_INITIALIZED);
16453
16454 if (!mem_loc_result)
16455 mem_loc_result = exp_result;
16456 else
16457 add_loc_descr (&mem_loc_result, exp_result);
16458 }
16459
16460 break;
16461 }
16462
16463 default:
16464 if (flag_checking)
16465 {
16466 print_rtl (stderr, rtl);
16467 gcc_unreachable ();
16468 }
16469 break;
16470 }
16471
16472 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16473 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16474
16475 return mem_loc_result;
16476 }
16477
16478 /* Return a descriptor that describes the concatenation of two locations.
16479 This is typically a complex variable. */
16480
16481 static dw_loc_descr_ref
concat_loc_descriptor(rtx x0,rtx x1,enum var_init_status initialized)16482 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16483 {
16484 /* At present we only track constant-sized pieces. */
16485 unsigned int size0, size1;
16486 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16487 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16488 return 0;
16489
16490 dw_loc_descr_ref cc_loc_result = NULL;
16491 dw_loc_descr_ref x0_ref
16492 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16493 dw_loc_descr_ref x1_ref
16494 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16495
16496 if (x0_ref == 0 || x1_ref == 0)
16497 return 0;
16498
16499 cc_loc_result = x0_ref;
16500 add_loc_descr_op_piece (&cc_loc_result, size0);
16501
16502 add_loc_descr (&cc_loc_result, x1_ref);
16503 add_loc_descr_op_piece (&cc_loc_result, size1);
16504
16505 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16506 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16507
16508 return cc_loc_result;
16509 }
16510
16511 /* Return a descriptor that describes the concatenation of N
16512 locations. */
16513
16514 static dw_loc_descr_ref
concatn_loc_descriptor(rtx concatn,enum var_init_status initialized)16515 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16516 {
16517 unsigned int i;
16518 dw_loc_descr_ref cc_loc_result = NULL;
16519 unsigned int n = XVECLEN (concatn, 0);
16520 unsigned int size;
16521
16522 for (i = 0; i < n; ++i)
16523 {
16524 dw_loc_descr_ref ref;
16525 rtx x = XVECEXP (concatn, 0, i);
16526
16527 /* At present we only track constant-sized pieces. */
16528 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16529 return NULL;
16530
16531 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16532 if (ref == NULL)
16533 return NULL;
16534
16535 add_loc_descr (&cc_loc_result, ref);
16536 add_loc_descr_op_piece (&cc_loc_result, size);
16537 }
16538
16539 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16540 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16541
16542 return cc_loc_result;
16543 }
16544
16545 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16546 for DEBUG_IMPLICIT_PTR RTL. */
16547
16548 static dw_loc_descr_ref
implicit_ptr_descriptor(rtx rtl,HOST_WIDE_INT offset)16549 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16550 {
16551 dw_loc_descr_ref ret;
16552 dw_die_ref ref;
16553
16554 if (dwarf_strict && dwarf_version < 5)
16555 return NULL;
16556 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16557 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16558 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16559 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16560 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16561 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16562 if (ref)
16563 {
16564 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16565 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16566 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16567 }
16568 else
16569 {
16570 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16571 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16572 }
16573 return ret;
16574 }
16575
16576 /* Output a proper Dwarf location descriptor for a variable or parameter
16577 which is either allocated in a register or in a memory location. For a
16578 register, we just generate an OP_REG and the register number. For a
16579 memory location we provide a Dwarf postfix expression describing how to
16580 generate the (dynamic) address of the object onto the address stack.
16581
16582 MODE is mode of the decl if this loc_descriptor is going to be used in
16583 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16584 allowed, VOIDmode otherwise.
16585
16586 If we don't know how to describe it, return 0. */
16587
16588 static dw_loc_descr_ref
loc_descriptor(rtx rtl,machine_mode mode,enum var_init_status initialized)16589 loc_descriptor (rtx rtl, machine_mode mode,
16590 enum var_init_status initialized)
16591 {
16592 dw_loc_descr_ref loc_result = NULL;
16593 scalar_int_mode int_mode;
16594
16595 switch (GET_CODE (rtl))
16596 {
16597 case SUBREG:
16598 /* The case of a subreg may arise when we have a local (register)
16599 variable or a formal (register) parameter which doesn't quite fill
16600 up an entire register. For now, just assume that it is
16601 legitimate to make the Dwarf info refer to the whole register which
16602 contains the given subreg. */
16603 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16604 loc_result = loc_descriptor (SUBREG_REG (rtl),
16605 GET_MODE (SUBREG_REG (rtl)), initialized);
16606 else
16607 goto do_default;
16608 break;
16609
16610 case REG:
16611 loc_result = reg_loc_descriptor (rtl, initialized);
16612 break;
16613
16614 case MEM:
16615 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16616 GET_MODE (rtl), initialized);
16617 if (loc_result == NULL)
16618 loc_result = tls_mem_loc_descriptor (rtl);
16619 if (loc_result == NULL)
16620 {
16621 rtx new_rtl = avoid_constant_pool_reference (rtl);
16622 if (new_rtl != rtl)
16623 loc_result = loc_descriptor (new_rtl, mode, initialized);
16624 }
16625 break;
16626
16627 case CONCAT:
16628 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16629 initialized);
16630 break;
16631
16632 case CONCATN:
16633 loc_result = concatn_loc_descriptor (rtl, initialized);
16634 break;
16635
16636 case VAR_LOCATION:
16637 /* Single part. */
16638 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16639 {
16640 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16641 if (GET_CODE (loc) == EXPR_LIST)
16642 loc = XEXP (loc, 0);
16643 loc_result = loc_descriptor (loc, mode, initialized);
16644 break;
16645 }
16646
16647 rtl = XEXP (rtl, 1);
16648 /* FALLTHRU */
16649
16650 case PARALLEL:
16651 {
16652 rtvec par_elems = XVEC (rtl, 0);
16653 int num_elem = GET_NUM_ELEM (par_elems);
16654 machine_mode mode;
16655 int i, size;
16656
16657 /* Create the first one, so we have something to add to. */
16658 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16659 VOIDmode, initialized);
16660 if (loc_result == NULL)
16661 return NULL;
16662 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16663 /* At present we only track constant-sized pieces. */
16664 if (!GET_MODE_SIZE (mode).is_constant (&size))
16665 return NULL;
16666 add_loc_descr_op_piece (&loc_result, size);
16667 for (i = 1; i < num_elem; i++)
16668 {
16669 dw_loc_descr_ref temp;
16670
16671 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16672 VOIDmode, initialized);
16673 if (temp == NULL)
16674 return NULL;
16675 add_loc_descr (&loc_result, temp);
16676 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16677 /* At present we only track constant-sized pieces. */
16678 if (!GET_MODE_SIZE (mode).is_constant (&size))
16679 return NULL;
16680 add_loc_descr_op_piece (&loc_result, size);
16681 }
16682 }
16683 break;
16684
16685 case CONST_INT:
16686 if (mode != VOIDmode && mode != BLKmode)
16687 {
16688 int_mode = as_a <scalar_int_mode> (mode);
16689 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16690 INTVAL (rtl));
16691 }
16692 break;
16693
16694 case CONST_DOUBLE:
16695 if (mode == VOIDmode)
16696 mode = GET_MODE (rtl);
16697
16698 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16699 {
16700 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16701
16702 /* Note that a CONST_DOUBLE rtx could represent either an integer
16703 or a floating-point constant. A CONST_DOUBLE is used whenever
16704 the constant requires more than one word in order to be
16705 adequately represented. We output CONST_DOUBLEs as blocks. */
16706 scalar_mode smode = as_a <scalar_mode> (mode);
16707 loc_result = new_loc_descr (DW_OP_implicit_value,
16708 GET_MODE_SIZE (smode), 0);
16709 #if TARGET_SUPPORTS_WIDE_INT == 0
16710 if (!SCALAR_FLOAT_MODE_P (smode))
16711 {
16712 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16713 loc_result->dw_loc_oprnd2.v.val_double
16714 = rtx_to_double_int (rtl);
16715 }
16716 else
16717 #endif
16718 {
16719 unsigned int length = GET_MODE_SIZE (smode);
16720 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16721
16722 insert_float (rtl, array);
16723 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16724 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16725 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16726 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16727 }
16728 }
16729 break;
16730
16731 case CONST_WIDE_INT:
16732 if (mode == VOIDmode)
16733 mode = GET_MODE (rtl);
16734
16735 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16736 {
16737 int_mode = as_a <scalar_int_mode> (mode);
16738 loc_result = new_loc_descr (DW_OP_implicit_value,
16739 GET_MODE_SIZE (int_mode), 0);
16740 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16741 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16742 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16743 }
16744 break;
16745
16746 case CONST_VECTOR:
16747 if (mode == VOIDmode)
16748 mode = GET_MODE (rtl);
16749
16750 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16751 {
16752 unsigned int length;
16753 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16754 return NULL;
16755
16756 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16757 unsigned char *array
16758 = ggc_vec_alloc<unsigned char> (length * elt_size);
16759 unsigned int i;
16760 unsigned char *p;
16761 machine_mode imode = GET_MODE_INNER (mode);
16762
16763 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16764 switch (GET_MODE_CLASS (mode))
16765 {
16766 case MODE_VECTOR_INT:
16767 for (i = 0, p = array; i < length; i++, p += elt_size)
16768 {
16769 rtx elt = CONST_VECTOR_ELT (rtl, i);
16770 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16771 }
16772 break;
16773
16774 case MODE_VECTOR_FLOAT:
16775 for (i = 0, p = array; i < length; i++, p += elt_size)
16776 {
16777 rtx elt = CONST_VECTOR_ELT (rtl, i);
16778 insert_float (elt, p);
16779 }
16780 break;
16781
16782 default:
16783 gcc_unreachable ();
16784 }
16785
16786 loc_result = new_loc_descr (DW_OP_implicit_value,
16787 length * elt_size, 0);
16788 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16789 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16790 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16791 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16792 }
16793 break;
16794
16795 case CONST:
16796 if (mode == VOIDmode
16797 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16798 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16799 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16800 {
16801 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16802 break;
16803 }
16804 /* FALLTHROUGH */
16805 case SYMBOL_REF:
16806 if (!const_ok_for_output (rtl))
16807 break;
16808 /* FALLTHROUGH */
16809 case LABEL_REF:
16810 if (is_a <scalar_int_mode> (mode, &int_mode)
16811 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16812 && (dwarf_version >= 4 || !dwarf_strict))
16813 {
16814 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16815 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16816 vec_safe_push (used_rtx_array, rtl);
16817 }
16818 break;
16819
16820 case DEBUG_IMPLICIT_PTR:
16821 loc_result = implicit_ptr_descriptor (rtl, 0);
16822 break;
16823
16824 case PLUS:
16825 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16826 && CONST_INT_P (XEXP (rtl, 1)))
16827 {
16828 loc_result
16829 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16830 break;
16831 }
16832 /* FALLTHRU */
16833 do_default:
16834 default:
16835 if ((is_a <scalar_int_mode> (mode, &int_mode)
16836 && GET_MODE (rtl) == int_mode
16837 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16838 && dwarf_version >= 4)
16839 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16840 {
16841 /* Value expression. */
16842 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16843 if (loc_result)
16844 add_loc_descr (&loc_result,
16845 new_loc_descr (DW_OP_stack_value, 0, 0));
16846 }
16847 break;
16848 }
16849
16850 return loc_result;
16851 }
16852
16853 /* We need to figure out what section we should use as the base for the
16854 address ranges where a given location is valid.
16855 1. If this particular DECL has a section associated with it, use that.
16856 2. If this function has a section associated with it, use that.
16857 3. Otherwise, use the text section.
16858 XXX: If you split a variable across multiple sections, we won't notice. */
16859
16860 static const char *
secname_for_decl(const_tree decl)16861 secname_for_decl (const_tree decl)
16862 {
16863 const char *secname;
16864
16865 if (VAR_OR_FUNCTION_DECL_P (decl)
16866 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16867 && DECL_SECTION_NAME (decl))
16868 secname = DECL_SECTION_NAME (decl);
16869 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16870 {
16871 if (in_cold_section_p)
16872 {
16873 section *sec = current_function_section ();
16874 if (sec->common.flags & SECTION_NAMED)
16875 return sec->named.name;
16876 }
16877 secname = DECL_SECTION_NAME (current_function_decl);
16878 }
16879 else if (cfun && in_cold_section_p)
16880 secname = crtl->subsections.cold_section_label;
16881 else
16882 secname = text_section_label;
16883
16884 return secname;
16885 }
16886
16887 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16888
16889 static bool
decl_by_reference_p(tree decl)16890 decl_by_reference_p (tree decl)
16891 {
16892 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16893 || VAR_P (decl))
16894 && DECL_BY_REFERENCE (decl));
16895 }
16896
16897 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16898 for VARLOC. */
16899
16900 static dw_loc_descr_ref
dw_loc_list_1(tree loc,rtx varloc,int want_address,enum var_init_status initialized)16901 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16902 enum var_init_status initialized)
16903 {
16904 int have_address = 0;
16905 dw_loc_descr_ref descr;
16906 machine_mode mode;
16907
16908 if (want_address != 2)
16909 {
16910 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16911 /* Single part. */
16912 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16913 {
16914 varloc = PAT_VAR_LOCATION_LOC (varloc);
16915 if (GET_CODE (varloc) == EXPR_LIST)
16916 varloc = XEXP (varloc, 0);
16917 mode = GET_MODE (varloc);
16918 if (MEM_P (varloc))
16919 {
16920 rtx addr = XEXP (varloc, 0);
16921 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16922 mode, initialized);
16923 if (descr)
16924 have_address = 1;
16925 else
16926 {
16927 rtx x = avoid_constant_pool_reference (varloc);
16928 if (x != varloc)
16929 descr = mem_loc_descriptor (x, mode, VOIDmode,
16930 initialized);
16931 }
16932 }
16933 else
16934 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16935 }
16936 else
16937 return 0;
16938 }
16939 else
16940 {
16941 if (GET_CODE (varloc) == VAR_LOCATION)
16942 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16943 else
16944 mode = DECL_MODE (loc);
16945 descr = loc_descriptor (varloc, mode, initialized);
16946 have_address = 1;
16947 }
16948
16949 if (!descr)
16950 return 0;
16951
16952 if (want_address == 2 && !have_address
16953 && (dwarf_version >= 4 || !dwarf_strict))
16954 {
16955 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16956 {
16957 expansion_failed (loc, NULL_RTX,
16958 "DWARF address size mismatch");
16959 return 0;
16960 }
16961 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16962 have_address = 1;
16963 }
16964 /* Show if we can't fill the request for an address. */
16965 if (want_address && !have_address)
16966 {
16967 expansion_failed (loc, NULL_RTX,
16968 "Want address and only have value");
16969 return 0;
16970 }
16971
16972 /* If we've got an address and don't want one, dereference. */
16973 if (!want_address && have_address)
16974 {
16975 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16976 enum dwarf_location_atom op;
16977
16978 if (size > DWARF2_ADDR_SIZE || size == -1)
16979 {
16980 expansion_failed (loc, NULL_RTX,
16981 "DWARF address size mismatch");
16982 return 0;
16983 }
16984 else if (size == DWARF2_ADDR_SIZE)
16985 op = DW_OP_deref;
16986 else
16987 op = DW_OP_deref_size;
16988
16989 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16990 }
16991
16992 return descr;
16993 }
16994
16995 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16996 if it is not possible. */
16997
16998 static dw_loc_descr_ref
new_loc_descr_op_bit_piece(HOST_WIDE_INT bitsize,HOST_WIDE_INT offset)16999 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17000 {
17001 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17002 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17003 else if (dwarf_version >= 3 || !dwarf_strict)
17004 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17005 else
17006 return NULL;
17007 }
17008
17009 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17010 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17011
17012 static dw_loc_descr_ref
dw_sra_loc_expr(tree decl,rtx loc)17013 dw_sra_loc_expr (tree decl, rtx loc)
17014 {
17015 rtx p;
17016 unsigned HOST_WIDE_INT padsize = 0;
17017 dw_loc_descr_ref descr, *descr_tail;
17018 unsigned HOST_WIDE_INT decl_size;
17019 rtx varloc;
17020 enum var_init_status initialized;
17021
17022 if (DECL_SIZE (decl) == NULL
17023 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17024 return NULL;
17025
17026 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17027 descr = NULL;
17028 descr_tail = &descr;
17029
17030 for (p = loc; p; p = XEXP (p, 1))
17031 {
17032 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17033 rtx loc_note = *decl_piece_varloc_ptr (p);
17034 dw_loc_descr_ref cur_descr;
17035 dw_loc_descr_ref *tail, last = NULL;
17036 unsigned HOST_WIDE_INT opsize = 0;
17037
17038 if (loc_note == NULL_RTX
17039 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17040 {
17041 padsize += bitsize;
17042 continue;
17043 }
17044 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17045 varloc = NOTE_VAR_LOCATION (loc_note);
17046 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17047 if (cur_descr == NULL)
17048 {
17049 padsize += bitsize;
17050 continue;
17051 }
17052
17053 /* Check that cur_descr either doesn't use
17054 DW_OP_*piece operations, or their sum is equal
17055 to bitsize. Otherwise we can't embed it. */
17056 for (tail = &cur_descr; *tail != NULL;
17057 tail = &(*tail)->dw_loc_next)
17058 if ((*tail)->dw_loc_opc == DW_OP_piece)
17059 {
17060 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17061 * BITS_PER_UNIT;
17062 last = *tail;
17063 }
17064 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17065 {
17066 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17067 last = *tail;
17068 }
17069
17070 if (last != NULL && opsize != bitsize)
17071 {
17072 padsize += bitsize;
17073 /* Discard the current piece of the descriptor and release any
17074 addr_table entries it uses. */
17075 remove_loc_list_addr_table_entries (cur_descr);
17076 continue;
17077 }
17078
17079 /* If there is a hole, add DW_OP_*piece after empty DWARF
17080 expression, which means that those bits are optimized out. */
17081 if (padsize)
17082 {
17083 if (padsize > decl_size)
17084 {
17085 remove_loc_list_addr_table_entries (cur_descr);
17086 goto discard_descr;
17087 }
17088 decl_size -= padsize;
17089 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17090 if (*descr_tail == NULL)
17091 {
17092 remove_loc_list_addr_table_entries (cur_descr);
17093 goto discard_descr;
17094 }
17095 descr_tail = &(*descr_tail)->dw_loc_next;
17096 padsize = 0;
17097 }
17098 *descr_tail = cur_descr;
17099 descr_tail = tail;
17100 if (bitsize > decl_size)
17101 goto discard_descr;
17102 decl_size -= bitsize;
17103 if (last == NULL)
17104 {
17105 HOST_WIDE_INT offset = 0;
17106 if (GET_CODE (varloc) == VAR_LOCATION
17107 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17108 {
17109 varloc = PAT_VAR_LOCATION_LOC (varloc);
17110 if (GET_CODE (varloc) == EXPR_LIST)
17111 varloc = XEXP (varloc, 0);
17112 }
17113 do
17114 {
17115 if (GET_CODE (varloc) == CONST
17116 || GET_CODE (varloc) == SIGN_EXTEND
17117 || GET_CODE (varloc) == ZERO_EXTEND)
17118 varloc = XEXP (varloc, 0);
17119 else if (GET_CODE (varloc) == SUBREG)
17120 varloc = SUBREG_REG (varloc);
17121 else
17122 break;
17123 }
17124 while (1);
17125 /* DW_OP_bit_size offset should be zero for register
17126 or implicit location descriptions and empty location
17127 descriptions, but for memory addresses needs big endian
17128 adjustment. */
17129 if (MEM_P (varloc))
17130 {
17131 unsigned HOST_WIDE_INT memsize;
17132 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17133 goto discard_descr;
17134 memsize *= BITS_PER_UNIT;
17135 if (memsize != bitsize)
17136 {
17137 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17138 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17139 goto discard_descr;
17140 if (memsize < bitsize)
17141 goto discard_descr;
17142 if (BITS_BIG_ENDIAN)
17143 offset = memsize - bitsize;
17144 }
17145 }
17146
17147 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17148 if (*descr_tail == NULL)
17149 goto discard_descr;
17150 descr_tail = &(*descr_tail)->dw_loc_next;
17151 }
17152 }
17153
17154 /* If there were any non-empty expressions, add padding till the end of
17155 the decl. */
17156 if (descr != NULL && decl_size != 0)
17157 {
17158 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17159 if (*descr_tail == NULL)
17160 goto discard_descr;
17161 }
17162 return descr;
17163
17164 discard_descr:
17165 /* Discard the descriptor and release any addr_table entries it uses. */
17166 remove_loc_list_addr_table_entries (descr);
17167 return NULL;
17168 }
17169
17170 /* Return the dwarf representation of the location list LOC_LIST of
17171 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17172 function. */
17173
17174 static dw_loc_list_ref
dw_loc_list(var_loc_list * loc_list,tree decl,int want_address)17175 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17176 {
17177 const char *endname, *secname;
17178 var_loc_view endview;
17179 rtx varloc;
17180 enum var_init_status initialized;
17181 struct var_loc_node *node;
17182 dw_loc_descr_ref descr;
17183 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17184 dw_loc_list_ref list = NULL;
17185 dw_loc_list_ref *listp = &list;
17186
17187 /* Now that we know what section we are using for a base,
17188 actually construct the list of locations.
17189 The first location information is what is passed to the
17190 function that creates the location list, and the remaining
17191 locations just get added on to that list.
17192 Note that we only know the start address for a location
17193 (IE location changes), so to build the range, we use
17194 the range [current location start, next location start].
17195 This means we have to special case the last node, and generate
17196 a range of [last location start, end of function label]. */
17197
17198 if (cfun && crtl->has_bb_partition)
17199 {
17200 bool save_in_cold_section_p = in_cold_section_p;
17201 in_cold_section_p = first_function_block_is_cold;
17202 if (loc_list->last_before_switch == NULL)
17203 in_cold_section_p = !in_cold_section_p;
17204 secname = secname_for_decl (decl);
17205 in_cold_section_p = save_in_cold_section_p;
17206 }
17207 else
17208 secname = secname_for_decl (decl);
17209
17210 for (node = loc_list->first; node; node = node->next)
17211 {
17212 bool range_across_switch = false;
17213 if (GET_CODE (node->loc) == EXPR_LIST
17214 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17215 {
17216 if (GET_CODE (node->loc) == EXPR_LIST)
17217 {
17218 descr = NULL;
17219 /* This requires DW_OP_{,bit_}piece, which is not usable
17220 inside DWARF expressions. */
17221 if (want_address == 2)
17222 descr = dw_sra_loc_expr (decl, node->loc);
17223 }
17224 else
17225 {
17226 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17227 varloc = NOTE_VAR_LOCATION (node->loc);
17228 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17229 }
17230 if (descr)
17231 {
17232 /* If section switch happens in between node->label
17233 and node->next->label (or end of function) and
17234 we can't emit it as a single entry list,
17235 emit two ranges, first one ending at the end
17236 of first partition and second one starting at the
17237 beginning of second partition. */
17238 if (node == loc_list->last_before_switch
17239 && (node != loc_list->first || loc_list->first->next
17240 /* If we are to emit a view number, we will emit
17241 a loclist rather than a single location
17242 expression for the entire function (see
17243 loc_list_has_views), so we have to split the
17244 range that straddles across partitions. */
17245 || !ZERO_VIEW_P (node->view))
17246 && current_function_decl)
17247 {
17248 endname = cfun->fde->dw_fde_end;
17249 endview = 0;
17250 range_across_switch = true;
17251 }
17252 /* The variable has a location between NODE->LABEL and
17253 NODE->NEXT->LABEL. */
17254 else if (node->next)
17255 endname = node->next->label, endview = node->next->view;
17256 /* If the variable has a location at the last label
17257 it keeps its location until the end of function. */
17258 else if (!current_function_decl)
17259 endname = text_end_label, endview = 0;
17260 else
17261 {
17262 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17263 current_function_funcdef_no);
17264 endname = ggc_strdup (label_id);
17265 endview = 0;
17266 }
17267
17268 *listp = new_loc_list (descr, node->label, node->view,
17269 endname, endview, secname);
17270 if (TREE_CODE (decl) == PARM_DECL
17271 && node == loc_list->first
17272 && NOTE_P (node->loc)
17273 && strcmp (node->label, endname) == 0)
17274 (*listp)->force = true;
17275 listp = &(*listp)->dw_loc_next;
17276 }
17277 }
17278
17279 if (cfun
17280 && crtl->has_bb_partition
17281 && node == loc_list->last_before_switch)
17282 {
17283 bool save_in_cold_section_p = in_cold_section_p;
17284 in_cold_section_p = !first_function_block_is_cold;
17285 secname = secname_for_decl (decl);
17286 in_cold_section_p = save_in_cold_section_p;
17287 }
17288
17289 if (range_across_switch)
17290 {
17291 if (GET_CODE (node->loc) == EXPR_LIST)
17292 descr = dw_sra_loc_expr (decl, node->loc);
17293 else
17294 {
17295 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17296 varloc = NOTE_VAR_LOCATION (node->loc);
17297 descr = dw_loc_list_1 (decl, varloc, want_address,
17298 initialized);
17299 }
17300 gcc_assert (descr);
17301 /* The variable has a location between NODE->LABEL and
17302 NODE->NEXT->LABEL. */
17303 if (node->next)
17304 endname = node->next->label, endview = node->next->view;
17305 else
17306 endname = cfun->fde->dw_fde_second_end, endview = 0;
17307 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17308 endname, endview, secname);
17309 listp = &(*listp)->dw_loc_next;
17310 }
17311 }
17312
17313 /* Try to avoid the overhead of a location list emitting a location
17314 expression instead, but only if we didn't have more than one
17315 location entry in the first place. If some entries were not
17316 representable, we don't want to pretend a single entry that was
17317 applies to the entire scope in which the variable is
17318 available. */
17319 if (list && loc_list->first->next)
17320 gen_llsym (list);
17321 else
17322 maybe_gen_llsym (list);
17323
17324 return list;
17325 }
17326
17327 /* Return if the loc_list has only single element and thus can be represented
17328 as location description. */
17329
17330 static bool
single_element_loc_list_p(dw_loc_list_ref list)17331 single_element_loc_list_p (dw_loc_list_ref list)
17332 {
17333 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17334 return !list->ll_symbol;
17335 }
17336
17337 /* Duplicate a single element of location list. */
17338
17339 static inline dw_loc_descr_ref
copy_loc_descr(dw_loc_descr_ref ref)17340 copy_loc_descr (dw_loc_descr_ref ref)
17341 {
17342 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17343 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17344 return copy;
17345 }
17346
17347 /* To each location in list LIST append loc descr REF. */
17348
17349 static void
add_loc_descr_to_each(dw_loc_list_ref list,dw_loc_descr_ref ref)17350 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17351 {
17352 dw_loc_descr_ref copy;
17353 add_loc_descr (&list->expr, ref);
17354 list = list->dw_loc_next;
17355 while (list)
17356 {
17357 copy = copy_loc_descr (ref);
17358 add_loc_descr (&list->expr, copy);
17359 while (copy->dw_loc_next)
17360 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17361 list = list->dw_loc_next;
17362 }
17363 }
17364
17365 /* To each location in list LIST prepend loc descr REF. */
17366
17367 static void
prepend_loc_descr_to_each(dw_loc_list_ref list,dw_loc_descr_ref ref)17368 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17369 {
17370 dw_loc_descr_ref copy;
17371 dw_loc_descr_ref ref_end = list->expr;
17372 add_loc_descr (&ref, list->expr);
17373 list->expr = ref;
17374 list = list->dw_loc_next;
17375 while (list)
17376 {
17377 dw_loc_descr_ref end = list->expr;
17378 list->expr = copy = copy_loc_descr (ref);
17379 while (copy->dw_loc_next != ref_end)
17380 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17381 copy->dw_loc_next = end;
17382 list = list->dw_loc_next;
17383 }
17384 }
17385
17386 /* Given two lists RET and LIST
17387 produce location list that is result of adding expression in LIST
17388 to expression in RET on each position in program.
17389 Might be destructive on both RET and LIST.
17390
17391 TODO: We handle only simple cases of RET or LIST having at most one
17392 element. General case would involve sorting the lists in program order
17393 and merging them that will need some additional work.
17394 Adding that will improve quality of debug info especially for SRA-ed
17395 structures. */
17396
17397 static void
add_loc_list(dw_loc_list_ref * ret,dw_loc_list_ref list)17398 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17399 {
17400 if (!list)
17401 return;
17402 if (!*ret)
17403 {
17404 *ret = list;
17405 return;
17406 }
17407 if (!list->dw_loc_next)
17408 {
17409 add_loc_descr_to_each (*ret, list->expr);
17410 return;
17411 }
17412 if (!(*ret)->dw_loc_next)
17413 {
17414 prepend_loc_descr_to_each (list, (*ret)->expr);
17415 *ret = list;
17416 return;
17417 }
17418 expansion_failed (NULL_TREE, NULL_RTX,
17419 "Don't know how to merge two non-trivial"
17420 " location lists.\n");
17421 *ret = NULL;
17422 return;
17423 }
17424
17425 /* LOC is constant expression. Try a luck, look it up in constant
17426 pool and return its loc_descr of its address. */
17427
17428 static dw_loc_descr_ref
cst_pool_loc_descr(tree loc)17429 cst_pool_loc_descr (tree loc)
17430 {
17431 /* Get an RTL for this, if something has been emitted. */
17432 rtx rtl = lookup_constant_def (loc);
17433
17434 if (!rtl || !MEM_P (rtl))
17435 {
17436 gcc_assert (!rtl);
17437 return 0;
17438 }
17439 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17440
17441 /* TODO: We might get more coverage if we was actually delaying expansion
17442 of all expressions till end of compilation when constant pools are fully
17443 populated. */
17444 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17445 {
17446 expansion_failed (loc, NULL_RTX,
17447 "CST value in contant pool but not marked.");
17448 return 0;
17449 }
17450 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17451 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17452 }
17453
17454 /* Return dw_loc_list representing address of addr_expr LOC
17455 by looking for inner INDIRECT_REF expression and turning
17456 it into simple arithmetics.
17457
17458 See loc_list_from_tree for the meaning of CONTEXT. */
17459
17460 static dw_loc_list_ref
loc_list_for_address_of_addr_expr_of_indirect_ref(tree loc,bool toplev,loc_descr_context * context)17461 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17462 loc_descr_context *context)
17463 {
17464 tree obj, offset;
17465 poly_int64 bitsize, bitpos, bytepos;
17466 machine_mode mode;
17467 int unsignedp, reversep, volatilep = 0;
17468 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17469
17470 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17471 &bitsize, &bitpos, &offset, &mode,
17472 &unsignedp, &reversep, &volatilep);
17473 STRIP_NOPS (obj);
17474 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17475 {
17476 expansion_failed (loc, NULL_RTX, "bitfield access");
17477 return 0;
17478 }
17479 if (!INDIRECT_REF_P (obj))
17480 {
17481 expansion_failed (obj,
17482 NULL_RTX, "no indirect ref in inner refrence");
17483 return 0;
17484 }
17485 if (!offset && known_eq (bitpos, 0))
17486 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17487 context);
17488 else if (toplev
17489 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17490 && (dwarf_version >= 4 || !dwarf_strict))
17491 {
17492 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17493 if (!list_ret)
17494 return 0;
17495 if (offset)
17496 {
17497 /* Variable offset. */
17498 list_ret1 = loc_list_from_tree (offset, 0, context);
17499 if (list_ret1 == 0)
17500 return 0;
17501 add_loc_list (&list_ret, list_ret1);
17502 if (!list_ret)
17503 return 0;
17504 add_loc_descr_to_each (list_ret,
17505 new_loc_descr (DW_OP_plus, 0, 0));
17506 }
17507 HOST_WIDE_INT value;
17508 if (bytepos.is_constant (&value) && value > 0)
17509 add_loc_descr_to_each (list_ret,
17510 new_loc_descr (DW_OP_plus_uconst, value, 0));
17511 else if (maybe_ne (bytepos, 0))
17512 loc_list_plus_const (list_ret, bytepos);
17513 add_loc_descr_to_each (list_ret,
17514 new_loc_descr (DW_OP_stack_value, 0, 0));
17515 }
17516 return list_ret;
17517 }
17518
17519 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17520 all operations from LOC are nops, move to the last one. Insert in NOPS all
17521 operations that are skipped. */
17522
17523 static void
loc_descr_to_next_no_nop(dw_loc_descr_ref & loc,hash_set<dw_loc_descr_ref> & nops)17524 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17525 hash_set<dw_loc_descr_ref> &nops)
17526 {
17527 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17528 {
17529 nops.add (loc);
17530 loc = loc->dw_loc_next;
17531 }
17532 }
17533
17534 /* Helper for loc_descr_without_nops: free the location description operation
17535 P. */
17536
17537 bool
free_loc_descr(const dw_loc_descr_ref & loc,void * data ATTRIBUTE_UNUSED)17538 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17539 {
17540 ggc_free (loc);
17541 return true;
17542 }
17543
17544 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17545 finishes LOC. */
17546
17547 static void
loc_descr_without_nops(dw_loc_descr_ref & loc)17548 loc_descr_without_nops (dw_loc_descr_ref &loc)
17549 {
17550 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17551 return;
17552
17553 /* Set of all DW_OP_nop operations we remove. */
17554 hash_set<dw_loc_descr_ref> nops;
17555
17556 /* First, strip all prefix NOP operations in order to keep the head of the
17557 operations list. */
17558 loc_descr_to_next_no_nop (loc, nops);
17559
17560 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17561 {
17562 /* For control flow operations: strip "prefix" nops in destination
17563 labels. */
17564 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17565 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17566 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17567 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17568
17569 /* Do the same for the operations that follow, then move to the next
17570 iteration. */
17571 if (cur->dw_loc_next != NULL)
17572 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17573 cur = cur->dw_loc_next;
17574 }
17575
17576 nops.traverse<void *, free_loc_descr> (NULL);
17577 }
17578
17579
17580 struct dwarf_procedure_info;
17581
17582 /* Helper structure for location descriptions generation. */
17583 struct loc_descr_context
17584 {
17585 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17586 NULL_TREE if DW_OP_push_object_address in invalid for this location
17587 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17588 tree context_type;
17589 /* The ..._DECL node that should be translated as a
17590 DW_OP_push_object_address operation. */
17591 tree base_decl;
17592 /* Information about the DWARF procedure we are currently generating. NULL if
17593 we are not generating a DWARF procedure. */
17594 struct dwarf_procedure_info *dpi;
17595 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17596 by consumer. Used for DW_TAG_generic_subrange attributes. */
17597 bool placeholder_arg;
17598 /* True if PLACEHOLDER_EXPR has been seen. */
17599 bool placeholder_seen;
17600 };
17601
17602 /* DWARF procedures generation
17603
17604 DWARF expressions (aka. location descriptions) are used to encode variable
17605 things such as sizes or offsets. Such computations can have redundant parts
17606 that can be factorized in order to reduce the size of the output debug
17607 information. This is the whole point of DWARF procedures.
17608
17609 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17610 already factorized into functions ("size functions") in order to handle very
17611 big and complex types. Such functions are quite simple: they have integral
17612 arguments, they return an integral result and their body contains only a
17613 return statement with arithmetic expressions. This is the only kind of
17614 function we are interested in translating into DWARF procedures, here.
17615
17616 DWARF expressions and DWARF procedure are executed using a stack, so we have
17617 to define some calling convention for them to interact. Let's say that:
17618
17619 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17620 all arguments in reverse order (right-to-left) so that when the DWARF
17621 procedure execution starts, the first argument is the top of the stack.
17622
17623 - Then, when returning, the DWARF procedure must have consumed all arguments
17624 on the stack, must have pushed the result and touched nothing else.
17625
17626 - Each integral argument and the result are integral types can be hold in a
17627 single stack slot.
17628
17629 - We call "frame offset" the number of stack slots that are "under DWARF
17630 procedure control": it includes the arguments slots, the temporaries and
17631 the result slot. Thus, it is equal to the number of arguments when the
17632 procedure execution starts and must be equal to one (the result) when it
17633 returns. */
17634
17635 /* Helper structure used when generating operations for a DWARF procedure. */
17636 struct dwarf_procedure_info
17637 {
17638 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17639 currently translated. */
17640 tree fndecl;
17641 /* The number of arguments FNDECL takes. */
17642 unsigned args_count;
17643 };
17644
17645 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17646 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17647 equate it to this DIE. */
17648
17649 static dw_die_ref
new_dwarf_proc_die(dw_loc_descr_ref location,tree fndecl,dw_die_ref parent_die)17650 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17651 dw_die_ref parent_die)
17652 {
17653 dw_die_ref dwarf_proc_die;
17654
17655 if ((dwarf_version < 3 && dwarf_strict)
17656 || location == NULL)
17657 return NULL;
17658
17659 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17660 if (fndecl)
17661 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17662 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17663 return dwarf_proc_die;
17664 }
17665
17666 /* Return whether TYPE is a supported type as a DWARF procedure argument
17667 type or return type (we handle only scalar types and pointer types that
17668 aren't wider than the DWARF expression evaluation stack. */
17669
17670 static bool
is_handled_procedure_type(tree type)17671 is_handled_procedure_type (tree type)
17672 {
17673 return ((INTEGRAL_TYPE_P (type)
17674 || TREE_CODE (type) == OFFSET_TYPE
17675 || TREE_CODE (type) == POINTER_TYPE)
17676 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17677 }
17678
17679 /* Helper for resolve_args_picking: do the same but stop when coming across
17680 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17681 offset *before* evaluating the corresponding operation. */
17682
17683 static bool
resolve_args_picking_1(dw_loc_descr_ref loc,unsigned initial_frame_offset,struct dwarf_procedure_info * dpi,hash_map<dw_loc_descr_ref,unsigned> & frame_offsets)17684 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17685 struct dwarf_procedure_info *dpi,
17686 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17687 {
17688 /* The "frame_offset" identifier is already used to name a macro... */
17689 unsigned frame_offset_ = initial_frame_offset;
17690 dw_loc_descr_ref l;
17691
17692 for (l = loc; l != NULL;)
17693 {
17694 bool existed;
17695 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17696
17697 /* If we already met this node, there is nothing to compute anymore. */
17698 if (existed)
17699 {
17700 /* Make sure that the stack size is consistent wherever the execution
17701 flow comes from. */
17702 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17703 break;
17704 }
17705 l_frame_offset = frame_offset_;
17706
17707 /* If needed, relocate the picking offset with respect to the frame
17708 offset. */
17709 if (l->frame_offset_rel)
17710 {
17711 unsigned HOST_WIDE_INT off;
17712 switch (l->dw_loc_opc)
17713 {
17714 case DW_OP_pick:
17715 off = l->dw_loc_oprnd1.v.val_unsigned;
17716 break;
17717 case DW_OP_dup:
17718 off = 0;
17719 break;
17720 case DW_OP_over:
17721 off = 1;
17722 break;
17723 default:
17724 gcc_unreachable ();
17725 }
17726 /* frame_offset_ is the size of the current stack frame, including
17727 incoming arguments. Besides, the arguments are pushed
17728 right-to-left. Thus, in order to access the Nth argument from
17729 this operation node, the picking has to skip temporaries *plus*
17730 one stack slot per argument (0 for the first one, 1 for the second
17731 one, etc.).
17732
17733 The targetted argument number (N) is already set as the operand,
17734 and the number of temporaries can be computed with:
17735 frame_offsets_ - dpi->args_count */
17736 off += frame_offset_ - dpi->args_count;
17737
17738 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17739 if (off > 255)
17740 return false;
17741
17742 if (off == 0)
17743 {
17744 l->dw_loc_opc = DW_OP_dup;
17745 l->dw_loc_oprnd1.v.val_unsigned = 0;
17746 }
17747 else if (off == 1)
17748 {
17749 l->dw_loc_opc = DW_OP_over;
17750 l->dw_loc_oprnd1.v.val_unsigned = 0;
17751 }
17752 else
17753 {
17754 l->dw_loc_opc = DW_OP_pick;
17755 l->dw_loc_oprnd1.v.val_unsigned = off;
17756 }
17757 }
17758
17759 /* Update frame_offset according to the effect the current operation has
17760 on the stack. */
17761 switch (l->dw_loc_opc)
17762 {
17763 case DW_OP_deref:
17764 case DW_OP_swap:
17765 case DW_OP_rot:
17766 case DW_OP_abs:
17767 case DW_OP_neg:
17768 case DW_OP_not:
17769 case DW_OP_plus_uconst:
17770 case DW_OP_skip:
17771 case DW_OP_reg0:
17772 case DW_OP_reg1:
17773 case DW_OP_reg2:
17774 case DW_OP_reg3:
17775 case DW_OP_reg4:
17776 case DW_OP_reg5:
17777 case DW_OP_reg6:
17778 case DW_OP_reg7:
17779 case DW_OP_reg8:
17780 case DW_OP_reg9:
17781 case DW_OP_reg10:
17782 case DW_OP_reg11:
17783 case DW_OP_reg12:
17784 case DW_OP_reg13:
17785 case DW_OP_reg14:
17786 case DW_OP_reg15:
17787 case DW_OP_reg16:
17788 case DW_OP_reg17:
17789 case DW_OP_reg18:
17790 case DW_OP_reg19:
17791 case DW_OP_reg20:
17792 case DW_OP_reg21:
17793 case DW_OP_reg22:
17794 case DW_OP_reg23:
17795 case DW_OP_reg24:
17796 case DW_OP_reg25:
17797 case DW_OP_reg26:
17798 case DW_OP_reg27:
17799 case DW_OP_reg28:
17800 case DW_OP_reg29:
17801 case DW_OP_reg30:
17802 case DW_OP_reg31:
17803 case DW_OP_bregx:
17804 case DW_OP_piece:
17805 case DW_OP_deref_size:
17806 case DW_OP_nop:
17807 case DW_OP_bit_piece:
17808 case DW_OP_implicit_value:
17809 case DW_OP_stack_value:
17810 break;
17811
17812 case DW_OP_addr:
17813 case DW_OP_const1u:
17814 case DW_OP_const1s:
17815 case DW_OP_const2u:
17816 case DW_OP_const2s:
17817 case DW_OP_const4u:
17818 case DW_OP_const4s:
17819 case DW_OP_const8u:
17820 case DW_OP_const8s:
17821 case DW_OP_constu:
17822 case DW_OP_consts:
17823 case DW_OP_dup:
17824 case DW_OP_over:
17825 case DW_OP_pick:
17826 case DW_OP_lit0:
17827 case DW_OP_lit1:
17828 case DW_OP_lit2:
17829 case DW_OP_lit3:
17830 case DW_OP_lit4:
17831 case DW_OP_lit5:
17832 case DW_OP_lit6:
17833 case DW_OP_lit7:
17834 case DW_OP_lit8:
17835 case DW_OP_lit9:
17836 case DW_OP_lit10:
17837 case DW_OP_lit11:
17838 case DW_OP_lit12:
17839 case DW_OP_lit13:
17840 case DW_OP_lit14:
17841 case DW_OP_lit15:
17842 case DW_OP_lit16:
17843 case DW_OP_lit17:
17844 case DW_OP_lit18:
17845 case DW_OP_lit19:
17846 case DW_OP_lit20:
17847 case DW_OP_lit21:
17848 case DW_OP_lit22:
17849 case DW_OP_lit23:
17850 case DW_OP_lit24:
17851 case DW_OP_lit25:
17852 case DW_OP_lit26:
17853 case DW_OP_lit27:
17854 case DW_OP_lit28:
17855 case DW_OP_lit29:
17856 case DW_OP_lit30:
17857 case DW_OP_lit31:
17858 case DW_OP_breg0:
17859 case DW_OP_breg1:
17860 case DW_OP_breg2:
17861 case DW_OP_breg3:
17862 case DW_OP_breg4:
17863 case DW_OP_breg5:
17864 case DW_OP_breg6:
17865 case DW_OP_breg7:
17866 case DW_OP_breg8:
17867 case DW_OP_breg9:
17868 case DW_OP_breg10:
17869 case DW_OP_breg11:
17870 case DW_OP_breg12:
17871 case DW_OP_breg13:
17872 case DW_OP_breg14:
17873 case DW_OP_breg15:
17874 case DW_OP_breg16:
17875 case DW_OP_breg17:
17876 case DW_OP_breg18:
17877 case DW_OP_breg19:
17878 case DW_OP_breg20:
17879 case DW_OP_breg21:
17880 case DW_OP_breg22:
17881 case DW_OP_breg23:
17882 case DW_OP_breg24:
17883 case DW_OP_breg25:
17884 case DW_OP_breg26:
17885 case DW_OP_breg27:
17886 case DW_OP_breg28:
17887 case DW_OP_breg29:
17888 case DW_OP_breg30:
17889 case DW_OP_breg31:
17890 case DW_OP_fbreg:
17891 case DW_OP_push_object_address:
17892 case DW_OP_call_frame_cfa:
17893 case DW_OP_GNU_variable_value:
17894 ++frame_offset_;
17895 break;
17896
17897 case DW_OP_drop:
17898 case DW_OP_xderef:
17899 case DW_OP_and:
17900 case DW_OP_div:
17901 case DW_OP_minus:
17902 case DW_OP_mod:
17903 case DW_OP_mul:
17904 case DW_OP_or:
17905 case DW_OP_plus:
17906 case DW_OP_shl:
17907 case DW_OP_shr:
17908 case DW_OP_shra:
17909 case DW_OP_xor:
17910 case DW_OP_bra:
17911 case DW_OP_eq:
17912 case DW_OP_ge:
17913 case DW_OP_gt:
17914 case DW_OP_le:
17915 case DW_OP_lt:
17916 case DW_OP_ne:
17917 case DW_OP_regx:
17918 case DW_OP_xderef_size:
17919 --frame_offset_;
17920 break;
17921
17922 case DW_OP_call2:
17923 case DW_OP_call4:
17924 case DW_OP_call_ref:
17925 {
17926 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17927 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17928
17929 if (stack_usage == NULL)
17930 return false;
17931 frame_offset_ += *stack_usage;
17932 break;
17933 }
17934
17935 case DW_OP_implicit_pointer:
17936 case DW_OP_entry_value:
17937 case DW_OP_const_type:
17938 case DW_OP_regval_type:
17939 case DW_OP_deref_type:
17940 case DW_OP_convert:
17941 case DW_OP_reinterpret:
17942 case DW_OP_form_tls_address:
17943 case DW_OP_GNU_push_tls_address:
17944 case DW_OP_GNU_uninit:
17945 case DW_OP_GNU_encoded_addr:
17946 case DW_OP_GNU_implicit_pointer:
17947 case DW_OP_GNU_entry_value:
17948 case DW_OP_GNU_const_type:
17949 case DW_OP_GNU_regval_type:
17950 case DW_OP_GNU_deref_type:
17951 case DW_OP_GNU_convert:
17952 case DW_OP_GNU_reinterpret:
17953 case DW_OP_GNU_parameter_ref:
17954 /* loc_list_from_tree will probably not output these operations for
17955 size functions, so assume they will not appear here. */
17956 /* Fall through... */
17957
17958 default:
17959 gcc_unreachable ();
17960 }
17961
17962 /* Now, follow the control flow (except subroutine calls). */
17963 switch (l->dw_loc_opc)
17964 {
17965 case DW_OP_bra:
17966 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17967 frame_offsets))
17968 return false;
17969 /* Fall through. */
17970
17971 case DW_OP_skip:
17972 l = l->dw_loc_oprnd1.v.val_loc;
17973 break;
17974
17975 case DW_OP_stack_value:
17976 return true;
17977
17978 default:
17979 l = l->dw_loc_next;
17980 break;
17981 }
17982 }
17983
17984 return true;
17985 }
17986
17987 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17988 operations) in order to resolve the operand of DW_OP_pick operations that
17989 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17990 offset *before* LOC is executed. Return if all relocations were
17991 successful. */
17992
17993 static bool
resolve_args_picking(dw_loc_descr_ref loc,unsigned initial_frame_offset,struct dwarf_procedure_info * dpi)17994 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17995 struct dwarf_procedure_info *dpi)
17996 {
17997 /* Associate to all visited operations the frame offset *before* evaluating
17998 this operation. */
17999 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18000
18001 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18002 frame_offsets);
18003 }
18004
18005 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18006 Return NULL if it is not possible. */
18007
18008 static dw_die_ref
function_to_dwarf_procedure(tree fndecl)18009 function_to_dwarf_procedure (tree fndecl)
18010 {
18011 struct loc_descr_context ctx;
18012 struct dwarf_procedure_info dpi;
18013 dw_die_ref dwarf_proc_die;
18014 tree tree_body = DECL_SAVED_TREE (fndecl);
18015 dw_loc_descr_ref loc_body, epilogue;
18016
18017 tree cursor;
18018 unsigned i;
18019
18020 /* Do not generate multiple DWARF procedures for the same function
18021 declaration. */
18022 dwarf_proc_die = lookup_decl_die (fndecl);
18023 if (dwarf_proc_die != NULL)
18024 return dwarf_proc_die;
18025
18026 /* DWARF procedures are available starting with the DWARFv3 standard. */
18027 if (dwarf_version < 3 && dwarf_strict)
18028 return NULL;
18029
18030 /* We handle only functions for which we still have a body, that return a
18031 supported type and that takes arguments with supported types. Note that
18032 there is no point translating functions that return nothing. */
18033 if (tree_body == NULL_TREE
18034 || DECL_RESULT (fndecl) == NULL_TREE
18035 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18036 return NULL;
18037
18038 for (cursor = DECL_ARGUMENTS (fndecl);
18039 cursor != NULL_TREE;
18040 cursor = TREE_CHAIN (cursor))
18041 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18042 return NULL;
18043
18044 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18045 if (TREE_CODE (tree_body) != RETURN_EXPR)
18046 return NULL;
18047 tree_body = TREE_OPERAND (tree_body, 0);
18048 if (TREE_CODE (tree_body) != MODIFY_EXPR
18049 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18050 return NULL;
18051 tree_body = TREE_OPERAND (tree_body, 1);
18052
18053 /* Try to translate the body expression itself. Note that this will probably
18054 cause an infinite recursion if its call graph has a cycle. This is very
18055 unlikely for size functions, however, so don't bother with such things at
18056 the moment. */
18057 ctx.context_type = NULL_TREE;
18058 ctx.base_decl = NULL_TREE;
18059 ctx.dpi = &dpi;
18060 ctx.placeholder_arg = false;
18061 ctx.placeholder_seen = false;
18062 dpi.fndecl = fndecl;
18063 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18064 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18065 if (!loc_body)
18066 return NULL;
18067
18068 /* After evaluating all operands in "loc_body", we should still have on the
18069 stack all arguments plus the desired function result (top of the stack).
18070 Generate code in order to keep only the result in our stack frame. */
18071 epilogue = NULL;
18072 for (i = 0; i < dpi.args_count; ++i)
18073 {
18074 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18075 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18076 op_couple->dw_loc_next->dw_loc_next = epilogue;
18077 epilogue = op_couple;
18078 }
18079 add_loc_descr (&loc_body, epilogue);
18080 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18081 return NULL;
18082
18083 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18084 because they are considered useful. Now there is an epilogue, they are
18085 not anymore, so give it another try. */
18086 loc_descr_without_nops (loc_body);
18087
18088 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18089 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18090 though, given that size functions do not come from source, so they should
18091 not have a dedicated DW_TAG_subprogram DIE. */
18092 dwarf_proc_die
18093 = new_dwarf_proc_die (loc_body, fndecl,
18094 get_context_die (DECL_CONTEXT (fndecl)));
18095
18096 /* The called DWARF procedure consumes one stack slot per argument and
18097 returns one stack slot. */
18098 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18099
18100 return dwarf_proc_die;
18101 }
18102
18103
18104 /* Generate Dwarf location list representing LOC.
18105 If WANT_ADDRESS is false, expression computing LOC will be computed
18106 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18107 if WANT_ADDRESS is 2, expression computing address useable in location
18108 will be returned (i.e. DW_OP_reg can be used
18109 to refer to register values).
18110
18111 CONTEXT provides information to customize the location descriptions
18112 generation. Its context_type field specifies what type is implicitly
18113 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18114 will not be generated.
18115
18116 Its DPI field determines whether we are generating a DWARF expression for a
18117 DWARF procedure, so PARM_DECL references are processed specifically.
18118
18119 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18120 and dpi fields were null. */
18121
18122 static dw_loc_list_ref
loc_list_from_tree_1(tree loc,int want_address,struct loc_descr_context * context)18123 loc_list_from_tree_1 (tree loc, int want_address,
18124 struct loc_descr_context *context)
18125 {
18126 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18127 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18128 int have_address = 0;
18129 enum dwarf_location_atom op;
18130
18131 /* ??? Most of the time we do not take proper care for sign/zero
18132 extending the values properly. Hopefully this won't be a real
18133 problem... */
18134
18135 if (context != NULL
18136 && context->base_decl == loc
18137 && want_address == 0)
18138 {
18139 if (dwarf_version >= 3 || !dwarf_strict)
18140 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18141 NULL, 0, NULL, 0, NULL);
18142 else
18143 return NULL;
18144 }
18145
18146 switch (TREE_CODE (loc))
18147 {
18148 case ERROR_MARK:
18149 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18150 return 0;
18151
18152 case PLACEHOLDER_EXPR:
18153 /* This case involves extracting fields from an object to determine the
18154 position of other fields. It is supposed to appear only as the first
18155 operand of COMPONENT_REF nodes and to reference precisely the type
18156 that the context allows. */
18157 if (context != NULL
18158 && TREE_TYPE (loc) == context->context_type
18159 && want_address >= 1)
18160 {
18161 if (dwarf_version >= 3 || !dwarf_strict)
18162 {
18163 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18164 have_address = 1;
18165 break;
18166 }
18167 else
18168 return NULL;
18169 }
18170 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18171 the single argument passed by consumer. */
18172 else if (context != NULL
18173 && context->placeholder_arg
18174 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18175 && want_address == 0)
18176 {
18177 ret = new_loc_descr (DW_OP_pick, 0, 0);
18178 ret->frame_offset_rel = 1;
18179 context->placeholder_seen = true;
18180 break;
18181 }
18182 else
18183 expansion_failed (loc, NULL_RTX,
18184 "PLACEHOLDER_EXPR for an unexpected type");
18185 break;
18186
18187 case CALL_EXPR:
18188 {
18189 const int nargs = call_expr_nargs (loc);
18190 tree callee = get_callee_fndecl (loc);
18191 int i;
18192 dw_die_ref dwarf_proc;
18193
18194 if (callee == NULL_TREE)
18195 goto call_expansion_failed;
18196
18197 /* We handle only functions that return an integer. */
18198 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18199 goto call_expansion_failed;
18200
18201 dwarf_proc = function_to_dwarf_procedure (callee);
18202 if (dwarf_proc == NULL)
18203 goto call_expansion_failed;
18204
18205 /* Evaluate arguments right-to-left so that the first argument will
18206 be the top-most one on the stack. */
18207 for (i = nargs - 1; i >= 0; --i)
18208 {
18209 dw_loc_descr_ref loc_descr
18210 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18211 context);
18212
18213 if (loc_descr == NULL)
18214 goto call_expansion_failed;
18215
18216 add_loc_descr (&ret, loc_descr);
18217 }
18218
18219 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18220 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18221 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18222 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18223 add_loc_descr (&ret, ret1);
18224 break;
18225
18226 call_expansion_failed:
18227 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18228 /* There are no opcodes for these operations. */
18229 return 0;
18230 }
18231
18232 case PREINCREMENT_EXPR:
18233 case PREDECREMENT_EXPR:
18234 case POSTINCREMENT_EXPR:
18235 case POSTDECREMENT_EXPR:
18236 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18237 /* There are no opcodes for these operations. */
18238 return 0;
18239
18240 case ADDR_EXPR:
18241 /* If we already want an address, see if there is INDIRECT_REF inside
18242 e.g. for &this->field. */
18243 if (want_address)
18244 {
18245 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18246 (loc, want_address == 2, context);
18247 if (list_ret)
18248 have_address = 1;
18249 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18250 && (ret = cst_pool_loc_descr (loc)))
18251 have_address = 1;
18252 }
18253 /* Otherwise, process the argument and look for the address. */
18254 if (!list_ret && !ret)
18255 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18256 else
18257 {
18258 if (want_address)
18259 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18260 return NULL;
18261 }
18262 break;
18263
18264 case VAR_DECL:
18265 if (DECL_THREAD_LOCAL_P (loc))
18266 {
18267 rtx rtl;
18268 enum dwarf_location_atom tls_op;
18269 enum dtprel_bool dtprel = dtprel_false;
18270
18271 if (targetm.have_tls)
18272 {
18273 /* If this is not defined, we have no way to emit the
18274 data. */
18275 if (!targetm.asm_out.output_dwarf_dtprel)
18276 return 0;
18277
18278 /* The way DW_OP_GNU_push_tls_address is specified, we
18279 can only look up addresses of objects in the current
18280 module. We used DW_OP_addr as first op, but that's
18281 wrong, because DW_OP_addr is relocated by the debug
18282 info consumer, while DW_OP_GNU_push_tls_address
18283 operand shouldn't be. */
18284 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18285 return 0;
18286 dtprel = dtprel_true;
18287 /* We check for DWARF 5 here because gdb did not implement
18288 DW_OP_form_tls_address until after 7.12. */
18289 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18290 : DW_OP_GNU_push_tls_address);
18291 }
18292 else
18293 {
18294 if (!targetm.emutls.debug_form_tls_address
18295 || !(dwarf_version >= 3 || !dwarf_strict))
18296 return 0;
18297 /* We stuffed the control variable into the DECL_VALUE_EXPR
18298 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18299 no longer appear in gimple code. We used the control
18300 variable in specific so that we could pick it up here. */
18301 loc = DECL_VALUE_EXPR (loc);
18302 tls_op = DW_OP_form_tls_address;
18303 }
18304
18305 rtl = rtl_for_decl_location (loc);
18306 if (rtl == NULL_RTX)
18307 return 0;
18308
18309 if (!MEM_P (rtl))
18310 return 0;
18311 rtl = XEXP (rtl, 0);
18312 if (! CONSTANT_P (rtl))
18313 return 0;
18314
18315 ret = new_addr_loc_descr (rtl, dtprel);
18316 ret1 = new_loc_descr (tls_op, 0, 0);
18317 add_loc_descr (&ret, ret1);
18318
18319 have_address = 1;
18320 break;
18321 }
18322 /* FALLTHRU */
18323
18324 case PARM_DECL:
18325 if (context != NULL && context->dpi != NULL
18326 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18327 {
18328 /* We are generating code for a DWARF procedure and we want to access
18329 one of its arguments: find the appropriate argument offset and let
18330 the resolve_args_picking pass compute the offset that complies
18331 with the stack frame size. */
18332 unsigned i = 0;
18333 tree cursor;
18334
18335 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18336 cursor != NULL_TREE && cursor != loc;
18337 cursor = TREE_CHAIN (cursor), ++i)
18338 ;
18339 /* If we are translating a DWARF procedure, all referenced parameters
18340 must belong to the current function. */
18341 gcc_assert (cursor != NULL_TREE);
18342
18343 ret = new_loc_descr (DW_OP_pick, i, 0);
18344 ret->frame_offset_rel = 1;
18345 break;
18346 }
18347 /* FALLTHRU */
18348
18349 case RESULT_DECL:
18350 if (DECL_HAS_VALUE_EXPR_P (loc))
18351 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18352 want_address, context);
18353 /* FALLTHRU */
18354
18355 case FUNCTION_DECL:
18356 {
18357 rtx rtl;
18358 var_loc_list *loc_list = lookup_decl_loc (loc);
18359
18360 if (loc_list && loc_list->first)
18361 {
18362 list_ret = dw_loc_list (loc_list, loc, want_address);
18363 have_address = want_address != 0;
18364 break;
18365 }
18366 rtl = rtl_for_decl_location (loc);
18367 if (rtl == NULL_RTX)
18368 {
18369 if (TREE_CODE (loc) != FUNCTION_DECL
18370 && early_dwarf
18371 && current_function_decl
18372 && want_address != 1
18373 && ! DECL_IGNORED_P (loc)
18374 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18375 || POINTER_TYPE_P (TREE_TYPE (loc)))
18376 && DECL_CONTEXT (loc) == current_function_decl
18377 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18378 <= DWARF2_ADDR_SIZE))
18379 {
18380 dw_die_ref ref = lookup_decl_die (loc);
18381 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18382 if (ref)
18383 {
18384 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18385 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18386 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18387 }
18388 else
18389 {
18390 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18391 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18392 }
18393 break;
18394 }
18395 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18396 return 0;
18397 }
18398 else if (CONST_INT_P (rtl))
18399 {
18400 HOST_WIDE_INT val = INTVAL (rtl);
18401 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18402 val &= GET_MODE_MASK (DECL_MODE (loc));
18403 ret = int_loc_descriptor (val);
18404 }
18405 else if (GET_CODE (rtl) == CONST_STRING)
18406 {
18407 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18408 return 0;
18409 }
18410 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18411 ret = new_addr_loc_descr (rtl, dtprel_false);
18412 else
18413 {
18414 machine_mode mode, mem_mode;
18415
18416 /* Certain constructs can only be represented at top-level. */
18417 if (want_address == 2)
18418 {
18419 ret = loc_descriptor (rtl, VOIDmode,
18420 VAR_INIT_STATUS_INITIALIZED);
18421 have_address = 1;
18422 }
18423 else
18424 {
18425 mode = GET_MODE (rtl);
18426 mem_mode = VOIDmode;
18427 if (MEM_P (rtl))
18428 {
18429 mem_mode = mode;
18430 mode = get_address_mode (rtl);
18431 rtl = XEXP (rtl, 0);
18432 have_address = 1;
18433 }
18434 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18435 VAR_INIT_STATUS_INITIALIZED);
18436 }
18437 if (!ret)
18438 expansion_failed (loc, rtl,
18439 "failed to produce loc descriptor for rtl");
18440 }
18441 }
18442 break;
18443
18444 case MEM_REF:
18445 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18446 {
18447 have_address = 1;
18448 goto do_plus;
18449 }
18450 /* Fallthru. */
18451 case INDIRECT_REF:
18452 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18453 have_address = 1;
18454 break;
18455
18456 case TARGET_MEM_REF:
18457 case SSA_NAME:
18458 case DEBUG_EXPR_DECL:
18459 return NULL;
18460
18461 case COMPOUND_EXPR:
18462 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18463 context);
18464
18465 CASE_CONVERT:
18466 case VIEW_CONVERT_EXPR:
18467 case SAVE_EXPR:
18468 case MODIFY_EXPR:
18469 case NON_LVALUE_EXPR:
18470 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18471 context);
18472
18473 case COMPONENT_REF:
18474 case BIT_FIELD_REF:
18475 case ARRAY_REF:
18476 case ARRAY_RANGE_REF:
18477 case REALPART_EXPR:
18478 case IMAGPART_EXPR:
18479 {
18480 tree obj, offset;
18481 poly_int64 bitsize, bitpos, bytepos;
18482 machine_mode mode;
18483 int unsignedp, reversep, volatilep = 0;
18484
18485 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18486 &unsignedp, &reversep, &volatilep);
18487
18488 gcc_assert (obj != loc);
18489
18490 list_ret = loc_list_from_tree_1 (obj,
18491 want_address == 2
18492 && known_eq (bitpos, 0)
18493 && !offset ? 2 : 1,
18494 context);
18495 /* TODO: We can extract value of the small expression via shifting even
18496 for nonzero bitpos. */
18497 if (list_ret == 0)
18498 return 0;
18499 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18500 || !multiple_p (bitsize, BITS_PER_UNIT))
18501 {
18502 expansion_failed (loc, NULL_RTX,
18503 "bitfield access");
18504 return 0;
18505 }
18506
18507 if (offset != NULL_TREE)
18508 {
18509 /* Variable offset. */
18510 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18511 if (list_ret1 == 0)
18512 return 0;
18513 add_loc_list (&list_ret, list_ret1);
18514 if (!list_ret)
18515 return 0;
18516 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18517 }
18518
18519 HOST_WIDE_INT value;
18520 if (bytepos.is_constant (&value) && value > 0)
18521 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18522 value, 0));
18523 else if (maybe_ne (bytepos, 0))
18524 loc_list_plus_const (list_ret, bytepos);
18525
18526 have_address = 1;
18527 break;
18528 }
18529
18530 case INTEGER_CST:
18531 if ((want_address || !tree_fits_shwi_p (loc))
18532 && (ret = cst_pool_loc_descr (loc)))
18533 have_address = 1;
18534 else if (want_address == 2
18535 && tree_fits_shwi_p (loc)
18536 && (ret = address_of_int_loc_descriptor
18537 (int_size_in_bytes (TREE_TYPE (loc)),
18538 tree_to_shwi (loc))))
18539 have_address = 1;
18540 else if (tree_fits_shwi_p (loc))
18541 ret = int_loc_descriptor (tree_to_shwi (loc));
18542 else if (tree_fits_uhwi_p (loc))
18543 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18544 else
18545 {
18546 expansion_failed (loc, NULL_RTX,
18547 "Integer operand is not host integer");
18548 return 0;
18549 }
18550 break;
18551
18552 case CONSTRUCTOR:
18553 case REAL_CST:
18554 case STRING_CST:
18555 case COMPLEX_CST:
18556 if ((ret = cst_pool_loc_descr (loc)))
18557 have_address = 1;
18558 else if (TREE_CODE (loc) == CONSTRUCTOR)
18559 {
18560 tree type = TREE_TYPE (loc);
18561 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18562 unsigned HOST_WIDE_INT offset = 0;
18563 unsigned HOST_WIDE_INT cnt;
18564 constructor_elt *ce;
18565
18566 if (TREE_CODE (type) == RECORD_TYPE)
18567 {
18568 /* This is very limited, but it's enough to output
18569 pointers to member functions, as long as the
18570 referenced function is defined in the current
18571 translation unit. */
18572 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18573 {
18574 tree val = ce->value;
18575
18576 tree field = ce->index;
18577
18578 if (val)
18579 STRIP_NOPS (val);
18580
18581 if (!field || DECL_BIT_FIELD (field))
18582 {
18583 expansion_failed (loc, NULL_RTX,
18584 "bitfield in record type constructor");
18585 size = offset = (unsigned HOST_WIDE_INT)-1;
18586 ret = NULL;
18587 break;
18588 }
18589
18590 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18591 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18592 gcc_assert (pos + fieldsize <= size);
18593 if (pos < offset)
18594 {
18595 expansion_failed (loc, NULL_RTX,
18596 "out-of-order fields in record constructor");
18597 size = offset = (unsigned HOST_WIDE_INT)-1;
18598 ret = NULL;
18599 break;
18600 }
18601 if (pos > offset)
18602 {
18603 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18604 add_loc_descr (&ret, ret1);
18605 offset = pos;
18606 }
18607 if (val && fieldsize != 0)
18608 {
18609 ret1 = loc_descriptor_from_tree (val, want_address, context);
18610 if (!ret1)
18611 {
18612 expansion_failed (loc, NULL_RTX,
18613 "unsupported expression in field");
18614 size = offset = (unsigned HOST_WIDE_INT)-1;
18615 ret = NULL;
18616 break;
18617 }
18618 add_loc_descr (&ret, ret1);
18619 }
18620 if (fieldsize)
18621 {
18622 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18623 add_loc_descr (&ret, ret1);
18624 offset = pos + fieldsize;
18625 }
18626 }
18627
18628 if (offset != size)
18629 {
18630 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18631 add_loc_descr (&ret, ret1);
18632 offset = size;
18633 }
18634
18635 have_address = !!want_address;
18636 }
18637 else
18638 expansion_failed (loc, NULL_RTX,
18639 "constructor of non-record type");
18640 }
18641 else
18642 /* We can construct small constants here using int_loc_descriptor. */
18643 expansion_failed (loc, NULL_RTX,
18644 "constructor or constant not in constant pool");
18645 break;
18646
18647 case TRUTH_AND_EXPR:
18648 case TRUTH_ANDIF_EXPR:
18649 case BIT_AND_EXPR:
18650 op = DW_OP_and;
18651 goto do_binop;
18652
18653 case TRUTH_XOR_EXPR:
18654 case BIT_XOR_EXPR:
18655 op = DW_OP_xor;
18656 goto do_binop;
18657
18658 case TRUTH_OR_EXPR:
18659 case TRUTH_ORIF_EXPR:
18660 case BIT_IOR_EXPR:
18661 op = DW_OP_or;
18662 goto do_binop;
18663
18664 case FLOOR_DIV_EXPR:
18665 case CEIL_DIV_EXPR:
18666 case ROUND_DIV_EXPR:
18667 case TRUNC_DIV_EXPR:
18668 case EXACT_DIV_EXPR:
18669 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18670 return 0;
18671 op = DW_OP_div;
18672 goto do_binop;
18673
18674 case MINUS_EXPR:
18675 op = DW_OP_minus;
18676 goto do_binop;
18677
18678 case FLOOR_MOD_EXPR:
18679 case CEIL_MOD_EXPR:
18680 case ROUND_MOD_EXPR:
18681 case TRUNC_MOD_EXPR:
18682 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18683 {
18684 op = DW_OP_mod;
18685 goto do_binop;
18686 }
18687 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18688 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18689 if (list_ret == 0 || list_ret1 == 0)
18690 return 0;
18691
18692 add_loc_list (&list_ret, list_ret1);
18693 if (list_ret == 0)
18694 return 0;
18695 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18696 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18697 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18698 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18699 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18700 break;
18701
18702 case MULT_EXPR:
18703 op = DW_OP_mul;
18704 goto do_binop;
18705
18706 case LSHIFT_EXPR:
18707 op = DW_OP_shl;
18708 goto do_binop;
18709
18710 case RSHIFT_EXPR:
18711 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18712 goto do_binop;
18713
18714 case POINTER_PLUS_EXPR:
18715 case PLUS_EXPR:
18716 do_plus:
18717 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18718 {
18719 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18720 smarter to encode their opposite. The DW_OP_plus_uconst operation
18721 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18722 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18723 bytes, Y being the size of the operation that pushes the opposite
18724 of the addend. So let's choose the smallest representation. */
18725 const tree tree_addend = TREE_OPERAND (loc, 1);
18726 offset_int wi_addend;
18727 HOST_WIDE_INT shwi_addend;
18728 dw_loc_descr_ref loc_naddend;
18729
18730 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18731 if (list_ret == 0)
18732 return 0;
18733
18734 /* Try to get the literal to push. It is the opposite of the addend,
18735 so as we rely on wrapping during DWARF evaluation, first decode
18736 the literal as a "DWARF-sized" signed number. */
18737 wi_addend = wi::to_offset (tree_addend);
18738 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18739 shwi_addend = wi_addend.to_shwi ();
18740 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18741 ? int_loc_descriptor (-shwi_addend)
18742 : NULL;
18743
18744 if (loc_naddend != NULL
18745 && ((unsigned) size_of_uleb128 (shwi_addend)
18746 > size_of_loc_descr (loc_naddend)))
18747 {
18748 add_loc_descr_to_each (list_ret, loc_naddend);
18749 add_loc_descr_to_each (list_ret,
18750 new_loc_descr (DW_OP_minus, 0, 0));
18751 }
18752 else
18753 {
18754 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18755 {
18756 loc_naddend = loc_cur;
18757 loc_cur = loc_cur->dw_loc_next;
18758 ggc_free (loc_naddend);
18759 }
18760 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18761 }
18762 break;
18763 }
18764
18765 op = DW_OP_plus;
18766 goto do_binop;
18767
18768 case LE_EXPR:
18769 op = DW_OP_le;
18770 goto do_comp_binop;
18771
18772 case GE_EXPR:
18773 op = DW_OP_ge;
18774 goto do_comp_binop;
18775
18776 case LT_EXPR:
18777 op = DW_OP_lt;
18778 goto do_comp_binop;
18779
18780 case GT_EXPR:
18781 op = DW_OP_gt;
18782 goto do_comp_binop;
18783
18784 do_comp_binop:
18785 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18786 {
18787 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18788 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18789 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18790 TREE_CODE (loc));
18791 break;
18792 }
18793 else
18794 goto do_binop;
18795
18796 case EQ_EXPR:
18797 op = DW_OP_eq;
18798 goto do_binop;
18799
18800 case NE_EXPR:
18801 op = DW_OP_ne;
18802 goto do_binop;
18803
18804 do_binop:
18805 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18806 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18807 if (list_ret == 0 || list_ret1 == 0)
18808 return 0;
18809
18810 add_loc_list (&list_ret, list_ret1);
18811 if (list_ret == 0)
18812 return 0;
18813 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18814 break;
18815
18816 case TRUTH_NOT_EXPR:
18817 case BIT_NOT_EXPR:
18818 op = DW_OP_not;
18819 goto do_unop;
18820
18821 case ABS_EXPR:
18822 op = DW_OP_abs;
18823 goto do_unop;
18824
18825 case NEGATE_EXPR:
18826 op = DW_OP_neg;
18827 goto do_unop;
18828
18829 do_unop:
18830 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18831 if (list_ret == 0)
18832 return 0;
18833
18834 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18835 break;
18836
18837 case MIN_EXPR:
18838 case MAX_EXPR:
18839 {
18840 const enum tree_code code =
18841 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18842
18843 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18844 build2 (code, integer_type_node,
18845 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18846 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18847 }
18848
18849 /* fall through */
18850
18851 case COND_EXPR:
18852 {
18853 dw_loc_descr_ref lhs
18854 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18855 dw_loc_list_ref rhs
18856 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18857 dw_loc_descr_ref bra_node, jump_node, tmp;
18858
18859 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18860 if (list_ret == 0 || lhs == 0 || rhs == 0)
18861 return 0;
18862
18863 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18864 add_loc_descr_to_each (list_ret, bra_node);
18865
18866 add_loc_list (&list_ret, rhs);
18867 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18868 add_loc_descr_to_each (list_ret, jump_node);
18869
18870 add_loc_descr_to_each (list_ret, lhs);
18871 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18872 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18873
18874 /* ??? Need a node to point the skip at. Use a nop. */
18875 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18876 add_loc_descr_to_each (list_ret, tmp);
18877 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18878 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18879 }
18880 break;
18881
18882 case FIX_TRUNC_EXPR:
18883 return 0;
18884
18885 default:
18886 /* Leave front-end specific codes as simply unknown. This comes
18887 up, for instance, with the C STMT_EXPR. */
18888 if ((unsigned int) TREE_CODE (loc)
18889 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18890 {
18891 expansion_failed (loc, NULL_RTX,
18892 "language specific tree node");
18893 return 0;
18894 }
18895
18896 /* Otherwise this is a generic code; we should just lists all of
18897 these explicitly. We forgot one. */
18898 if (flag_checking)
18899 gcc_unreachable ();
18900
18901 /* In a release build, we want to degrade gracefully: better to
18902 generate incomplete debugging information than to crash. */
18903 return NULL;
18904 }
18905
18906 if (!ret && !list_ret)
18907 return 0;
18908
18909 if (want_address == 2 && !have_address
18910 && (dwarf_version >= 4 || !dwarf_strict))
18911 {
18912 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18913 {
18914 expansion_failed (loc, NULL_RTX,
18915 "DWARF address size mismatch");
18916 return 0;
18917 }
18918 if (ret)
18919 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18920 else
18921 add_loc_descr_to_each (list_ret,
18922 new_loc_descr (DW_OP_stack_value, 0, 0));
18923 have_address = 1;
18924 }
18925 /* Show if we can't fill the request for an address. */
18926 if (want_address && !have_address)
18927 {
18928 expansion_failed (loc, NULL_RTX,
18929 "Want address and only have value");
18930 return 0;
18931 }
18932
18933 gcc_assert (!ret || !list_ret);
18934
18935 /* If we've got an address and don't want one, dereference. */
18936 if (!want_address && have_address)
18937 {
18938 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18939
18940 if (size > DWARF2_ADDR_SIZE || size == -1)
18941 {
18942 expansion_failed (loc, NULL_RTX,
18943 "DWARF address size mismatch");
18944 return 0;
18945 }
18946 else if (size == DWARF2_ADDR_SIZE)
18947 op = DW_OP_deref;
18948 else
18949 op = DW_OP_deref_size;
18950
18951 if (ret)
18952 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18953 else
18954 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18955 }
18956 if (ret)
18957 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18958
18959 return list_ret;
18960 }
18961
18962 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18963 expressions. */
18964
18965 static dw_loc_list_ref
loc_list_from_tree(tree loc,int want_address,struct loc_descr_context * context)18966 loc_list_from_tree (tree loc, int want_address,
18967 struct loc_descr_context *context)
18968 {
18969 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18970
18971 for (dw_loc_list_ref loc_cur = result;
18972 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18973 loc_descr_without_nops (loc_cur->expr);
18974 return result;
18975 }
18976
18977 /* Same as above but return only single location expression. */
18978 static dw_loc_descr_ref
loc_descriptor_from_tree(tree loc,int want_address,struct loc_descr_context * context)18979 loc_descriptor_from_tree (tree loc, int want_address,
18980 struct loc_descr_context *context)
18981 {
18982 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18983 if (!ret)
18984 return NULL;
18985 if (ret->dw_loc_next)
18986 {
18987 expansion_failed (loc, NULL_RTX,
18988 "Location list where only loc descriptor needed");
18989 return NULL;
18990 }
18991 return ret->expr;
18992 }
18993
18994 /* Given a value, round it up to the lowest multiple of `boundary'
18995 which is not less than the value itself. */
18996
18997 static inline HOST_WIDE_INT
ceiling(HOST_WIDE_INT value,unsigned int boundary)18998 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18999 {
19000 return (((value + boundary - 1) / boundary) * boundary);
19001 }
19002
19003 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19004 pointer to the declared type for the relevant field variable, or return
19005 `integer_type_node' if the given node turns out to be an
19006 ERROR_MARK node. */
19007
19008 static inline tree
field_type(const_tree decl)19009 field_type (const_tree decl)
19010 {
19011 tree type;
19012
19013 if (TREE_CODE (decl) == ERROR_MARK)
19014 return integer_type_node;
19015
19016 type = DECL_BIT_FIELD_TYPE (decl);
19017 if (type == NULL_TREE)
19018 type = TREE_TYPE (decl);
19019
19020 return type;
19021 }
19022
19023 /* Given a pointer to a tree node, return the alignment in bits for
19024 it, or else return BITS_PER_WORD if the node actually turns out to
19025 be an ERROR_MARK node. */
19026
19027 static inline unsigned
simple_type_align_in_bits(const_tree type)19028 simple_type_align_in_bits (const_tree type)
19029 {
19030 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19031 }
19032
19033 static inline unsigned
simple_decl_align_in_bits(const_tree decl)19034 simple_decl_align_in_bits (const_tree decl)
19035 {
19036 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19037 }
19038
19039 /* Return the result of rounding T up to ALIGN. */
19040
19041 static inline offset_int
round_up_to_align(const offset_int & t,unsigned int align)19042 round_up_to_align (const offset_int &t, unsigned int align)
19043 {
19044 return wi::udiv_trunc (t + align - 1, align) * align;
19045 }
19046
19047 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19048 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19049 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19050 if we fail to return the size in one of these two forms. */
19051
19052 static dw_loc_descr_ref
type_byte_size(const_tree type,HOST_WIDE_INT * cst_size)19053 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19054 {
19055 tree tree_size;
19056 struct loc_descr_context ctx;
19057
19058 /* Return a constant integer in priority, if possible. */
19059 *cst_size = int_size_in_bytes (type);
19060 if (*cst_size != -1)
19061 return NULL;
19062
19063 ctx.context_type = const_cast<tree> (type);
19064 ctx.base_decl = NULL_TREE;
19065 ctx.dpi = NULL;
19066 ctx.placeholder_arg = false;
19067 ctx.placeholder_seen = false;
19068
19069 type = TYPE_MAIN_VARIANT (type);
19070 tree_size = TYPE_SIZE_UNIT (type);
19071 return ((tree_size != NULL_TREE)
19072 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19073 : NULL);
19074 }
19075
19076 /* Helper structure for RECORD_TYPE processing. */
19077 struct vlr_context
19078 {
19079 /* Root RECORD_TYPE. It is needed to generate data member location
19080 descriptions in variable-length records (VLR), but also to cope with
19081 variants, which are composed of nested structures multiplexed with
19082 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19083 function processing a FIELD_DECL, it is required to be non null. */
19084 tree struct_type;
19085 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19086 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19087 this variant part as part of the root record (in storage units). For
19088 regular records, it must be NULL_TREE. */
19089 tree variant_part_offset;
19090 };
19091
19092 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19093 addressed byte of the "containing object" for the given FIELD_DECL. If
19094 possible, return a native constant through CST_OFFSET (in which case NULL is
19095 returned); otherwise return a DWARF expression that computes the offset.
19096
19097 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19098 that offset is, either because the argument turns out to be a pointer to an
19099 ERROR_MARK node, or because the offset expression is too complex for us.
19100
19101 CTX is required: see the comment for VLR_CONTEXT. */
19102
19103 static dw_loc_descr_ref
field_byte_offset(const_tree decl,struct vlr_context * ctx,HOST_WIDE_INT * cst_offset)19104 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19105 HOST_WIDE_INT *cst_offset)
19106 {
19107 tree tree_result;
19108 dw_loc_list_ref loc_result;
19109
19110 *cst_offset = 0;
19111
19112 if (TREE_CODE (decl) == ERROR_MARK)
19113 return NULL;
19114 else
19115 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19116
19117 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19118 case. */
19119 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19120 return NULL;
19121
19122 #ifdef PCC_BITFIELD_TYPE_MATTERS
19123 /* We used to handle only constant offsets in all cases. Now, we handle
19124 properly dynamic byte offsets only when PCC bitfield type doesn't
19125 matter. */
19126 if (PCC_BITFIELD_TYPE_MATTERS
19127 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19128 {
19129 offset_int object_offset_in_bits;
19130 offset_int object_offset_in_bytes;
19131 offset_int bitpos_int;
19132 tree type;
19133 tree field_size_tree;
19134 offset_int deepest_bitpos;
19135 offset_int field_size_in_bits;
19136 unsigned int type_align_in_bits;
19137 unsigned int decl_align_in_bits;
19138 offset_int type_size_in_bits;
19139
19140 bitpos_int = wi::to_offset (bit_position (decl));
19141 type = field_type (decl);
19142 type_size_in_bits = offset_int_type_size_in_bits (type);
19143 type_align_in_bits = simple_type_align_in_bits (type);
19144
19145 field_size_tree = DECL_SIZE (decl);
19146
19147 /* The size could be unspecified if there was an error, or for
19148 a flexible array member. */
19149 if (!field_size_tree)
19150 field_size_tree = bitsize_zero_node;
19151
19152 /* If the size of the field is not constant, use the type size. */
19153 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19154 field_size_in_bits = wi::to_offset (field_size_tree);
19155 else
19156 field_size_in_bits = type_size_in_bits;
19157
19158 decl_align_in_bits = simple_decl_align_in_bits (decl);
19159
19160 /* The GCC front-end doesn't make any attempt to keep track of the
19161 starting bit offset (relative to the start of the containing
19162 structure type) of the hypothetical "containing object" for a
19163 bit-field. Thus, when computing the byte offset value for the
19164 start of the "containing object" of a bit-field, we must deduce
19165 this information on our own. This can be rather tricky to do in
19166 some cases. For example, handling the following structure type
19167 definition when compiling for an i386/i486 target (which only
19168 aligns long long's to 32-bit boundaries) can be very tricky:
19169
19170 struct S { int field1; long long field2:31; };
19171
19172 Fortunately, there is a simple rule-of-thumb which can be used
19173 in such cases. When compiling for an i386/i486, GCC will
19174 allocate 8 bytes for the structure shown above. It decides to
19175 do this based upon one simple rule for bit-field allocation.
19176 GCC allocates each "containing object" for each bit-field at
19177 the first (i.e. lowest addressed) legitimate alignment boundary
19178 (based upon the required minimum alignment for the declared
19179 type of the field) which it can possibly use, subject to the
19180 condition that there is still enough available space remaining
19181 in the containing object (when allocated at the selected point)
19182 to fully accommodate all of the bits of the bit-field itself.
19183
19184 This simple rule makes it obvious why GCC allocates 8 bytes for
19185 each object of the structure type shown above. When looking
19186 for a place to allocate the "containing object" for `field2',
19187 the compiler simply tries to allocate a 64-bit "containing
19188 object" at each successive 32-bit boundary (starting at zero)
19189 until it finds a place to allocate that 64- bit field such that
19190 at least 31 contiguous (and previously unallocated) bits remain
19191 within that selected 64 bit field. (As it turns out, for the
19192 example above, the compiler finds it is OK to allocate the
19193 "containing object" 64-bit field at bit-offset zero within the
19194 structure type.)
19195
19196 Here we attempt to work backwards from the limited set of facts
19197 we're given, and we try to deduce from those facts, where GCC
19198 must have believed that the containing object started (within
19199 the structure type). The value we deduce is then used (by the
19200 callers of this routine) to generate DW_AT_location and
19201 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19202 the case of DW_AT_location, regular fields as well). */
19203
19204 /* Figure out the bit-distance from the start of the structure to
19205 the "deepest" bit of the bit-field. */
19206 deepest_bitpos = bitpos_int + field_size_in_bits;
19207
19208 /* This is the tricky part. Use some fancy footwork to deduce
19209 where the lowest addressed bit of the containing object must
19210 be. */
19211 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19212
19213 /* Round up to type_align by default. This works best for
19214 bitfields. */
19215 object_offset_in_bits
19216 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19217
19218 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19219 {
19220 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19221
19222 /* Round up to decl_align instead. */
19223 object_offset_in_bits
19224 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19225 }
19226
19227 object_offset_in_bytes
19228 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19229 if (ctx->variant_part_offset == NULL_TREE)
19230 {
19231 *cst_offset = object_offset_in_bytes.to_shwi ();
19232 return NULL;
19233 }
19234 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19235 }
19236 else
19237 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19238 tree_result = byte_position (decl);
19239
19240 if (ctx->variant_part_offset != NULL_TREE)
19241 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19242 ctx->variant_part_offset, tree_result);
19243
19244 /* If the byte offset is a constant, it's simplier to handle a native
19245 constant rather than a DWARF expression. */
19246 if (TREE_CODE (tree_result) == INTEGER_CST)
19247 {
19248 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19249 return NULL;
19250 }
19251 struct loc_descr_context loc_ctx = {
19252 ctx->struct_type, /* context_type */
19253 NULL_TREE, /* base_decl */
19254 NULL, /* dpi */
19255 false, /* placeholder_arg */
19256 false /* placeholder_seen */
19257 };
19258 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19259
19260 /* We want a DWARF expression: abort if we only have a location list with
19261 multiple elements. */
19262 if (!loc_result || !single_element_loc_list_p (loc_result))
19263 return NULL;
19264 else
19265 return loc_result->expr;
19266 }
19267
19268 /* The following routines define various Dwarf attributes and any data
19269 associated with them. */
19270
19271 /* Add a location description attribute value to a DIE.
19272
19273 This emits location attributes suitable for whole variables and
19274 whole parameters. Note that the location attributes for struct fields are
19275 generated by the routine `data_member_location_attribute' below. */
19276
19277 static inline void
add_AT_location_description(dw_die_ref die,enum dwarf_attribute attr_kind,dw_loc_list_ref descr)19278 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19279 dw_loc_list_ref descr)
19280 {
19281 bool check_no_locviews = true;
19282 if (descr == 0)
19283 return;
19284 if (single_element_loc_list_p (descr))
19285 add_AT_loc (die, attr_kind, descr->expr);
19286 else
19287 {
19288 add_AT_loc_list (die, attr_kind, descr);
19289 gcc_assert (descr->ll_symbol);
19290 if (attr_kind == DW_AT_location && descr->vl_symbol
19291 && dwarf2out_locviews_in_attribute ())
19292 {
19293 add_AT_view_list (die, DW_AT_GNU_locviews);
19294 check_no_locviews = false;
19295 }
19296 }
19297
19298 if (check_no_locviews)
19299 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19300 }
19301
19302 /* Add DW_AT_accessibility attribute to DIE if needed. */
19303
19304 static void
add_accessibility_attribute(dw_die_ref die,tree decl)19305 add_accessibility_attribute (dw_die_ref die, tree decl)
19306 {
19307 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19308 children, otherwise the default is DW_ACCESS_public. In DWARF2
19309 the default has always been DW_ACCESS_public. */
19310 if (TREE_PROTECTED (decl))
19311 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19312 else if (TREE_PRIVATE (decl))
19313 {
19314 if (dwarf_version == 2
19315 || die->die_parent == NULL
19316 || die->die_parent->die_tag != DW_TAG_class_type)
19317 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19318 }
19319 else if (dwarf_version > 2
19320 && die->die_parent
19321 && die->die_parent->die_tag == DW_TAG_class_type)
19322 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19323 }
19324
19325 /* Attach the specialized form of location attribute used for data members of
19326 struct and union types. In the special case of a FIELD_DECL node which
19327 represents a bit-field, the "offset" part of this special location
19328 descriptor must indicate the distance in bytes from the lowest-addressed
19329 byte of the containing struct or union type to the lowest-addressed byte of
19330 the "containing object" for the bit-field. (See the `field_byte_offset'
19331 function above).
19332
19333 For any given bit-field, the "containing object" is a hypothetical object
19334 (of some integral or enum type) within which the given bit-field lives. The
19335 type of this hypothetical "containing object" is always the same as the
19336 declared type of the individual bit-field itself (for GCC anyway... the
19337 DWARF spec doesn't actually mandate this). Note that it is the size (in
19338 bytes) of the hypothetical "containing object" which will be given in the
19339 DW_AT_byte_size attribute for this bit-field. (See the
19340 `byte_size_attribute' function below.) It is also used when calculating the
19341 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19342 function below.)
19343
19344 CTX is required: see the comment for VLR_CONTEXT. */
19345
19346 static void
add_data_member_location_attribute(dw_die_ref die,tree decl,struct vlr_context * ctx)19347 add_data_member_location_attribute (dw_die_ref die,
19348 tree decl,
19349 struct vlr_context *ctx)
19350 {
19351 HOST_WIDE_INT offset;
19352 dw_loc_descr_ref loc_descr = 0;
19353
19354 if (TREE_CODE (decl) == TREE_BINFO)
19355 {
19356 /* We're working on the TAG_inheritance for a base class. */
19357 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19358 {
19359 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19360 aren't at a fixed offset from all (sub)objects of the same
19361 type. We need to extract the appropriate offset from our
19362 vtable. The following dwarf expression means
19363
19364 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19365
19366 This is specific to the V3 ABI, of course. */
19367
19368 dw_loc_descr_ref tmp;
19369
19370 /* Make a copy of the object address. */
19371 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19372 add_loc_descr (&loc_descr, tmp);
19373
19374 /* Extract the vtable address. */
19375 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19376 add_loc_descr (&loc_descr, tmp);
19377
19378 /* Calculate the address of the offset. */
19379 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19380 gcc_assert (offset < 0);
19381
19382 tmp = int_loc_descriptor (-offset);
19383 add_loc_descr (&loc_descr, tmp);
19384 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19385 add_loc_descr (&loc_descr, tmp);
19386
19387 /* Extract the offset. */
19388 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19389 add_loc_descr (&loc_descr, tmp);
19390
19391 /* Add it to the object address. */
19392 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19393 add_loc_descr (&loc_descr, tmp);
19394 }
19395 else
19396 offset = tree_to_shwi (BINFO_OFFSET (decl));
19397 }
19398 else
19399 {
19400 loc_descr = field_byte_offset (decl, ctx, &offset);
19401
19402 /* If loc_descr is available then we know the field offset is dynamic.
19403 However, GDB does not handle dynamic field offsets very well at the
19404 moment. */
19405 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19406 {
19407 loc_descr = NULL;
19408 offset = 0;
19409 }
19410
19411 /* Data member location evalutation starts with the base address on the
19412 stack. Compute the field offset and add it to this base address. */
19413 else if (loc_descr != NULL)
19414 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19415 }
19416
19417 if (! loc_descr)
19418 {
19419 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19420 e.g. GDB only added support to it in November 2016. For DWARF5
19421 we need newer debug info consumers anyway. We might change this
19422 to dwarf_version >= 4 once most consumers catched up. */
19423 if (dwarf_version >= 5
19424 && TREE_CODE (decl) == FIELD_DECL
19425 && DECL_BIT_FIELD_TYPE (decl))
19426 {
19427 tree off = bit_position (decl);
19428 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19429 {
19430 remove_AT (die, DW_AT_byte_size);
19431 remove_AT (die, DW_AT_bit_offset);
19432 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19433 return;
19434 }
19435 }
19436 if (dwarf_version > 2)
19437 {
19438 /* Don't need to output a location expression, just the constant. */
19439 if (offset < 0)
19440 add_AT_int (die, DW_AT_data_member_location, offset);
19441 else
19442 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19443 return;
19444 }
19445 else
19446 {
19447 enum dwarf_location_atom op;
19448
19449 /* The DWARF2 standard says that we should assume that the structure
19450 address is already on the stack, so we can specify a structure
19451 field address by using DW_OP_plus_uconst. */
19452 op = DW_OP_plus_uconst;
19453 loc_descr = new_loc_descr (op, offset, 0);
19454 }
19455 }
19456
19457 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19458 }
19459
19460 /* Writes integer values to dw_vec_const array. */
19461
19462 static void
insert_int(HOST_WIDE_INT val,unsigned int size,unsigned char * dest)19463 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19464 {
19465 while (size != 0)
19466 {
19467 *dest++ = val & 0xff;
19468 val >>= 8;
19469 --size;
19470 }
19471 }
19472
19473 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19474
19475 static HOST_WIDE_INT
extract_int(const unsigned char * src,unsigned int size)19476 extract_int (const unsigned char *src, unsigned int size)
19477 {
19478 HOST_WIDE_INT val = 0;
19479
19480 src += size;
19481 while (size != 0)
19482 {
19483 val <<= 8;
19484 val |= *--src & 0xff;
19485 --size;
19486 }
19487 return val;
19488 }
19489
19490 /* Writes wide_int values to dw_vec_const array. */
19491
19492 static void
insert_wide_int(const wide_int & val,unsigned char * dest,int elt_size)19493 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19494 {
19495 int i;
19496
19497 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19498 {
19499 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19500 return;
19501 }
19502
19503 /* We'd have to extend this code to support odd sizes. */
19504 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19505
19506 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19507
19508 if (WORDS_BIG_ENDIAN)
19509 for (i = n - 1; i >= 0; i--)
19510 {
19511 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19512 dest += sizeof (HOST_WIDE_INT);
19513 }
19514 else
19515 for (i = 0; i < n; i++)
19516 {
19517 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19518 dest += sizeof (HOST_WIDE_INT);
19519 }
19520 }
19521
19522 /* Writes floating point values to dw_vec_const array. */
19523
19524 static void
insert_float(const_rtx rtl,unsigned char * array)19525 insert_float (const_rtx rtl, unsigned char *array)
19526 {
19527 long val[4];
19528 int i;
19529 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19530
19531 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19532
19533 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19534 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19535 {
19536 insert_int (val[i], 4, array);
19537 array += 4;
19538 }
19539 }
19540
19541 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19542 does not have a "location" either in memory or in a register. These
19543 things can arise in GNU C when a constant is passed as an actual parameter
19544 to an inlined function. They can also arise in C++ where declared
19545 constants do not necessarily get memory "homes". */
19546
19547 static bool
add_const_value_attribute(dw_die_ref die,rtx rtl)19548 add_const_value_attribute (dw_die_ref die, rtx rtl)
19549 {
19550 switch (GET_CODE (rtl))
19551 {
19552 case CONST_INT:
19553 {
19554 HOST_WIDE_INT val = INTVAL (rtl);
19555
19556 if (val < 0)
19557 add_AT_int (die, DW_AT_const_value, val);
19558 else
19559 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19560 }
19561 return true;
19562
19563 case CONST_WIDE_INT:
19564 {
19565 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19566 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19567 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19568 wide_int w = wi::zext (w1, prec);
19569 add_AT_wide (die, DW_AT_const_value, w);
19570 }
19571 return true;
19572
19573 case CONST_DOUBLE:
19574 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19575 floating-point constant. A CONST_DOUBLE is used whenever the
19576 constant requires more than one word in order to be adequately
19577 represented. */
19578 if (TARGET_SUPPORTS_WIDE_INT == 0
19579 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19580 add_AT_double (die, DW_AT_const_value,
19581 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19582 else
19583 {
19584 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19585 unsigned int length = GET_MODE_SIZE (mode);
19586 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19587
19588 insert_float (rtl, array);
19589 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19590 }
19591 return true;
19592
19593 case CONST_VECTOR:
19594 {
19595 unsigned int length;
19596 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19597 return false;
19598
19599 machine_mode mode = GET_MODE (rtl);
19600 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19601 unsigned char *array
19602 = ggc_vec_alloc<unsigned char> (length * elt_size);
19603 unsigned int i;
19604 unsigned char *p;
19605 machine_mode imode = GET_MODE_INNER (mode);
19606
19607 switch (GET_MODE_CLASS (mode))
19608 {
19609 case MODE_VECTOR_INT:
19610 for (i = 0, p = array; i < length; i++, p += elt_size)
19611 {
19612 rtx elt = CONST_VECTOR_ELT (rtl, i);
19613 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19614 }
19615 break;
19616
19617 case MODE_VECTOR_FLOAT:
19618 for (i = 0, p = array; i < length; i++, p += elt_size)
19619 {
19620 rtx elt = CONST_VECTOR_ELT (rtl, i);
19621 insert_float (elt, p);
19622 }
19623 break;
19624
19625 default:
19626 gcc_unreachable ();
19627 }
19628
19629 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19630 }
19631 return true;
19632
19633 case CONST_STRING:
19634 if (dwarf_version >= 4 || !dwarf_strict)
19635 {
19636 dw_loc_descr_ref loc_result;
19637 resolve_one_addr (&rtl);
19638 rtl_addr:
19639 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19640 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19641 add_AT_loc (die, DW_AT_location, loc_result);
19642 vec_safe_push (used_rtx_array, rtl);
19643 return true;
19644 }
19645 return false;
19646
19647 case CONST:
19648 if (CONSTANT_P (XEXP (rtl, 0)))
19649 return add_const_value_attribute (die, XEXP (rtl, 0));
19650 /* FALLTHROUGH */
19651 case SYMBOL_REF:
19652 if (!const_ok_for_output (rtl))
19653 return false;
19654 /* FALLTHROUGH */
19655 case LABEL_REF:
19656 if (dwarf_version >= 4 || !dwarf_strict)
19657 goto rtl_addr;
19658 return false;
19659
19660 case PLUS:
19661 /* In cases where an inlined instance of an inline function is passed
19662 the address of an `auto' variable (which is local to the caller) we
19663 can get a situation where the DECL_RTL of the artificial local
19664 variable (for the inlining) which acts as a stand-in for the
19665 corresponding formal parameter (of the inline function) will look
19666 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19667 exactly a compile-time constant expression, but it isn't the address
19668 of the (artificial) local variable either. Rather, it represents the
19669 *value* which the artificial local variable always has during its
19670 lifetime. We currently have no way to represent such quasi-constant
19671 values in Dwarf, so for now we just punt and generate nothing. */
19672 return false;
19673
19674 case HIGH:
19675 case CONST_FIXED:
19676 return false;
19677
19678 case MEM:
19679 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19680 && MEM_READONLY_P (rtl)
19681 && GET_MODE (rtl) == BLKmode)
19682 {
19683 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19684 return true;
19685 }
19686 return false;
19687
19688 default:
19689 /* No other kinds of rtx should be possible here. */
19690 gcc_unreachable ();
19691 }
19692 return false;
19693 }
19694
19695 /* Determine whether the evaluation of EXPR references any variables
19696 or functions which aren't otherwise used (and therefore may not be
19697 output). */
19698 static tree
reference_to_unused(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)19699 reference_to_unused (tree * tp, int * walk_subtrees,
19700 void * data ATTRIBUTE_UNUSED)
19701 {
19702 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19703 *walk_subtrees = 0;
19704
19705 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19706 && ! TREE_ASM_WRITTEN (*tp))
19707 return *tp;
19708 /* ??? The C++ FE emits debug information for using decls, so
19709 putting gcc_unreachable here falls over. See PR31899. For now
19710 be conservative. */
19711 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19712 return *tp;
19713 else if (VAR_P (*tp))
19714 {
19715 varpool_node *node = varpool_node::get (*tp);
19716 if (!node || !node->definition)
19717 return *tp;
19718 }
19719 else if (TREE_CODE (*tp) == FUNCTION_DECL
19720 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19721 {
19722 /* The call graph machinery must have finished analyzing,
19723 optimizing and gimplifying the CU by now.
19724 So if *TP has no call graph node associated
19725 to it, it means *TP will not be emitted. */
19726 if (!cgraph_node::get (*tp))
19727 return *tp;
19728 }
19729 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19730 return *tp;
19731
19732 return NULL_TREE;
19733 }
19734
19735 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19736 for use in a later add_const_value_attribute call. */
19737
19738 static rtx
rtl_for_decl_init(tree init,tree type)19739 rtl_for_decl_init (tree init, tree type)
19740 {
19741 rtx rtl = NULL_RTX;
19742
19743 STRIP_NOPS (init);
19744
19745 /* If a variable is initialized with a string constant without embedded
19746 zeros, build CONST_STRING. */
19747 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19748 {
19749 tree enttype = TREE_TYPE (type);
19750 tree domain = TYPE_DOMAIN (type);
19751 scalar_int_mode mode;
19752
19753 if (is_int_mode (TYPE_MODE (enttype), &mode)
19754 && GET_MODE_SIZE (mode) == 1
19755 && domain
19756 && TYPE_MAX_VALUE (domain)
19757 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19758 && integer_zerop (TYPE_MIN_VALUE (domain))
19759 && compare_tree_int (TYPE_MAX_VALUE (domain),
19760 TREE_STRING_LENGTH (init) - 1) == 0
19761 && ((size_t) TREE_STRING_LENGTH (init)
19762 == strlen (TREE_STRING_POINTER (init)) + 1))
19763 {
19764 rtl = gen_rtx_CONST_STRING (VOIDmode,
19765 ggc_strdup (TREE_STRING_POINTER (init)));
19766 rtl = gen_rtx_MEM (BLKmode, rtl);
19767 MEM_READONLY_P (rtl) = 1;
19768 }
19769 }
19770 /* Other aggregates, and complex values, could be represented using
19771 CONCAT: FIXME! */
19772 else if (AGGREGATE_TYPE_P (type)
19773 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19774 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19775 || TREE_CODE (type) == COMPLEX_TYPE)
19776 ;
19777 /* Vectors only work if their mode is supported by the target.
19778 FIXME: generic vectors ought to work too. */
19779 else if (TREE_CODE (type) == VECTOR_TYPE
19780 && !VECTOR_MODE_P (TYPE_MODE (type)))
19781 ;
19782 /* If the initializer is something that we know will expand into an
19783 immediate RTL constant, expand it now. We must be careful not to
19784 reference variables which won't be output. */
19785 else if (initializer_constant_valid_p (init, type)
19786 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19787 {
19788 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19789 possible. */
19790 if (TREE_CODE (type) == VECTOR_TYPE)
19791 switch (TREE_CODE (init))
19792 {
19793 case VECTOR_CST:
19794 break;
19795 case CONSTRUCTOR:
19796 if (TREE_CONSTANT (init))
19797 {
19798 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19799 bool constant_p = true;
19800 tree value;
19801 unsigned HOST_WIDE_INT ix;
19802
19803 /* Even when ctor is constant, it might contain non-*_CST
19804 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19805 belong into VECTOR_CST nodes. */
19806 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19807 if (!CONSTANT_CLASS_P (value))
19808 {
19809 constant_p = false;
19810 break;
19811 }
19812
19813 if (constant_p)
19814 {
19815 init = build_vector_from_ctor (type, elts);
19816 break;
19817 }
19818 }
19819 /* FALLTHRU */
19820
19821 default:
19822 return NULL;
19823 }
19824
19825 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19826
19827 /* If expand_expr returns a MEM, it wasn't immediate. */
19828 gcc_assert (!rtl || !MEM_P (rtl));
19829 }
19830
19831 return rtl;
19832 }
19833
19834 /* Generate RTL for the variable DECL to represent its location. */
19835
19836 static rtx
rtl_for_decl_location(tree decl)19837 rtl_for_decl_location (tree decl)
19838 {
19839 rtx rtl;
19840
19841 /* Here we have to decide where we are going to say the parameter "lives"
19842 (as far as the debugger is concerned). We only have a couple of
19843 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19844
19845 DECL_RTL normally indicates where the parameter lives during most of the
19846 activation of the function. If optimization is enabled however, this
19847 could be either NULL or else a pseudo-reg. Both of those cases indicate
19848 that the parameter doesn't really live anywhere (as far as the code
19849 generation parts of GCC are concerned) during most of the function's
19850 activation. That will happen (for example) if the parameter is never
19851 referenced within the function.
19852
19853 We could just generate a location descriptor here for all non-NULL
19854 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19855 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19856 where DECL_RTL is NULL or is a pseudo-reg.
19857
19858 Note however that we can only get away with using DECL_INCOMING_RTL as
19859 a backup substitute for DECL_RTL in certain limited cases. In cases
19860 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19861 we can be sure that the parameter was passed using the same type as it is
19862 declared to have within the function, and that its DECL_INCOMING_RTL
19863 points us to a place where a value of that type is passed.
19864
19865 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19866 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19867 because in these cases DECL_INCOMING_RTL points us to a value of some
19868 type which is *different* from the type of the parameter itself. Thus,
19869 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19870 such cases, the debugger would end up (for example) trying to fetch a
19871 `float' from a place which actually contains the first part of a
19872 `double'. That would lead to really incorrect and confusing
19873 output at debug-time.
19874
19875 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19876 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19877 are a couple of exceptions however. On little-endian machines we can
19878 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19879 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19880 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19881 when (on a little-endian machine) a non-prototyped function has a
19882 parameter declared to be of type `short' or `char'. In such cases,
19883 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19884 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19885 passed `int' value. If the debugger then uses that address to fetch
19886 a `short' or a `char' (on a little-endian machine) the result will be
19887 the correct data, so we allow for such exceptional cases below.
19888
19889 Note that our goal here is to describe the place where the given formal
19890 parameter lives during most of the function's activation (i.e. between the
19891 end of the prologue and the start of the epilogue). We'll do that as best
19892 as we can. Note however that if the given formal parameter is modified
19893 sometime during the execution of the function, then a stack backtrace (at
19894 debug-time) will show the function as having been called with the *new*
19895 value rather than the value which was originally passed in. This happens
19896 rarely enough that it is not a major problem, but it *is* a problem, and
19897 I'd like to fix it.
19898
19899 A future version of dwarf2out.c may generate two additional attributes for
19900 any given DW_TAG_formal_parameter DIE which will describe the "passed
19901 type" and the "passed location" for the given formal parameter in addition
19902 to the attributes we now generate to indicate the "declared type" and the
19903 "active location" for each parameter. This additional set of attributes
19904 could be used by debuggers for stack backtraces. Separately, note that
19905 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19906 This happens (for example) for inlined-instances of inline function formal
19907 parameters which are never referenced. This really shouldn't be
19908 happening. All PARM_DECL nodes should get valid non-NULL
19909 DECL_INCOMING_RTL values. FIXME. */
19910
19911 /* Use DECL_RTL as the "location" unless we find something better. */
19912 rtl = DECL_RTL_IF_SET (decl);
19913
19914 /* When generating abstract instances, ignore everything except
19915 constants, symbols living in memory, and symbols living in
19916 fixed registers. */
19917 if (! reload_completed)
19918 {
19919 if (rtl
19920 && (CONSTANT_P (rtl)
19921 || (MEM_P (rtl)
19922 && CONSTANT_P (XEXP (rtl, 0)))
19923 || (REG_P (rtl)
19924 && VAR_P (decl)
19925 && TREE_STATIC (decl))))
19926 {
19927 rtl = targetm.delegitimize_address (rtl);
19928 return rtl;
19929 }
19930 rtl = NULL_RTX;
19931 }
19932 else if (TREE_CODE (decl) == PARM_DECL)
19933 {
19934 if (rtl == NULL_RTX
19935 || is_pseudo_reg (rtl)
19936 || (MEM_P (rtl)
19937 && is_pseudo_reg (XEXP (rtl, 0))
19938 && DECL_INCOMING_RTL (decl)
19939 && MEM_P (DECL_INCOMING_RTL (decl))
19940 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19941 {
19942 tree declared_type = TREE_TYPE (decl);
19943 tree passed_type = DECL_ARG_TYPE (decl);
19944 machine_mode dmode = TYPE_MODE (declared_type);
19945 machine_mode pmode = TYPE_MODE (passed_type);
19946
19947 /* This decl represents a formal parameter which was optimized out.
19948 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19949 all cases where (rtl == NULL_RTX) just below. */
19950 if (dmode == pmode)
19951 rtl = DECL_INCOMING_RTL (decl);
19952 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19953 && SCALAR_INT_MODE_P (dmode)
19954 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19955 && DECL_INCOMING_RTL (decl))
19956 {
19957 rtx inc = DECL_INCOMING_RTL (decl);
19958 if (REG_P (inc))
19959 rtl = inc;
19960 else if (MEM_P (inc))
19961 {
19962 if (BYTES_BIG_ENDIAN)
19963 rtl = adjust_address_nv (inc, dmode,
19964 GET_MODE_SIZE (pmode)
19965 - GET_MODE_SIZE (dmode));
19966 else
19967 rtl = inc;
19968 }
19969 }
19970 }
19971
19972 /* If the parm was passed in registers, but lives on the stack, then
19973 make a big endian correction if the mode of the type of the
19974 parameter is not the same as the mode of the rtl. */
19975 /* ??? This is the same series of checks that are made in dbxout.c before
19976 we reach the big endian correction code there. It isn't clear if all
19977 of these checks are necessary here, but keeping them all is the safe
19978 thing to do. */
19979 else if (MEM_P (rtl)
19980 && XEXP (rtl, 0) != const0_rtx
19981 && ! CONSTANT_P (XEXP (rtl, 0))
19982 /* Not passed in memory. */
19983 && !MEM_P (DECL_INCOMING_RTL (decl))
19984 /* Not passed by invisible reference. */
19985 && (!REG_P (XEXP (rtl, 0))
19986 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19987 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19988 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19989 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19990 #endif
19991 )
19992 /* Big endian correction check. */
19993 && BYTES_BIG_ENDIAN
19994 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19995 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19996 UNITS_PER_WORD))
19997 {
19998 machine_mode addr_mode = get_address_mode (rtl);
19999 poly_int64 offset = (UNITS_PER_WORD
20000 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20001
20002 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20003 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20004 }
20005 }
20006 else if (VAR_P (decl)
20007 && rtl
20008 && MEM_P (rtl)
20009 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20010 {
20011 machine_mode addr_mode = get_address_mode (rtl);
20012 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20013 GET_MODE (rtl));
20014
20015 /* If a variable is declared "register" yet is smaller than
20016 a register, then if we store the variable to memory, it
20017 looks like we're storing a register-sized value, when in
20018 fact we are not. We need to adjust the offset of the
20019 storage location to reflect the actual value's bytes,
20020 else gdb will not be able to display it. */
20021 if (maybe_ne (offset, 0))
20022 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20023 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20024 }
20025
20026 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20027 and will have been substituted directly into all expressions that use it.
20028 C does not have such a concept, but C++ and other languages do. */
20029 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20030 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20031
20032 if (rtl)
20033 rtl = targetm.delegitimize_address (rtl);
20034
20035 /* If we don't look past the constant pool, we risk emitting a
20036 reference to a constant pool entry that isn't referenced from
20037 code, and thus is not emitted. */
20038 if (rtl)
20039 rtl = avoid_constant_pool_reference (rtl);
20040
20041 /* Try harder to get a rtl. If this symbol ends up not being emitted
20042 in the current CU, resolve_addr will remove the expression referencing
20043 it. */
20044 if (rtl == NULL_RTX
20045 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20046 && VAR_P (decl)
20047 && !DECL_EXTERNAL (decl)
20048 && TREE_STATIC (decl)
20049 && DECL_NAME (decl)
20050 && !DECL_HARD_REGISTER (decl)
20051 && DECL_MODE (decl) != VOIDmode)
20052 {
20053 rtl = make_decl_rtl_for_debug (decl);
20054 if (!MEM_P (rtl)
20055 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20056 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20057 rtl = NULL_RTX;
20058 }
20059
20060 return rtl;
20061 }
20062
20063 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20064 returned. If so, the decl for the COMMON block is returned, and the
20065 value is the offset into the common block for the symbol. */
20066
20067 static tree
fortran_common(tree decl,HOST_WIDE_INT * value)20068 fortran_common (tree decl, HOST_WIDE_INT *value)
20069 {
20070 tree val_expr, cvar;
20071 machine_mode mode;
20072 poly_int64 bitsize, bitpos;
20073 tree offset;
20074 HOST_WIDE_INT cbitpos;
20075 int unsignedp, reversep, volatilep = 0;
20076
20077 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20078 it does not have a value (the offset into the common area), or if it
20079 is thread local (as opposed to global) then it isn't common, and shouldn't
20080 be handled as such. */
20081 if (!VAR_P (decl)
20082 || !TREE_STATIC (decl)
20083 || !DECL_HAS_VALUE_EXPR_P (decl)
20084 || !is_fortran ())
20085 return NULL_TREE;
20086
20087 val_expr = DECL_VALUE_EXPR (decl);
20088 if (TREE_CODE (val_expr) != COMPONENT_REF)
20089 return NULL_TREE;
20090
20091 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20092 &unsignedp, &reversep, &volatilep);
20093
20094 if (cvar == NULL_TREE
20095 || !VAR_P (cvar)
20096 || DECL_ARTIFICIAL (cvar)
20097 || !TREE_PUBLIC (cvar)
20098 /* We don't expect to have to cope with variable offsets,
20099 since at present all static data must have a constant size. */
20100 || !bitpos.is_constant (&cbitpos))
20101 return NULL_TREE;
20102
20103 *value = 0;
20104 if (offset != NULL)
20105 {
20106 if (!tree_fits_shwi_p (offset))
20107 return NULL_TREE;
20108 *value = tree_to_shwi (offset);
20109 }
20110 if (cbitpos != 0)
20111 *value += cbitpos / BITS_PER_UNIT;
20112
20113 return cvar;
20114 }
20115
20116 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20117 data attribute for a variable or a parameter. We generate the
20118 DW_AT_const_value attribute only in those cases where the given variable
20119 or parameter does not have a true "location" either in memory or in a
20120 register. This can happen (for example) when a constant is passed as an
20121 actual argument in a call to an inline function. (It's possible that
20122 these things can crop up in other ways also.) Note that one type of
20123 constant value which can be passed into an inlined function is a constant
20124 pointer. This can happen for example if an actual argument in an inlined
20125 function call evaluates to a compile-time constant address.
20126
20127 CACHE_P is true if it is worth caching the location list for DECL,
20128 so that future calls can reuse it rather than regenerate it from scratch.
20129 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20130 since we will need to refer to them each time the function is inlined. */
20131
20132 static bool
add_location_or_const_value_attribute(dw_die_ref die,tree decl,bool cache_p)20133 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20134 {
20135 rtx rtl;
20136 dw_loc_list_ref list;
20137 var_loc_list *loc_list;
20138 cached_dw_loc_list *cache;
20139
20140 if (early_dwarf)
20141 return false;
20142
20143 if (TREE_CODE (decl) == ERROR_MARK)
20144 return false;
20145
20146 if (get_AT (die, DW_AT_location)
20147 || get_AT (die, DW_AT_const_value))
20148 return true;
20149
20150 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20151 || TREE_CODE (decl) == RESULT_DECL);
20152
20153 /* Try to get some constant RTL for this decl, and use that as the value of
20154 the location. */
20155
20156 rtl = rtl_for_decl_location (decl);
20157 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20158 && add_const_value_attribute (die, rtl))
20159 return true;
20160
20161 /* See if we have single element location list that is equivalent to
20162 a constant value. That way we are better to use add_const_value_attribute
20163 rather than expanding constant value equivalent. */
20164 loc_list = lookup_decl_loc (decl);
20165 if (loc_list
20166 && loc_list->first
20167 && loc_list->first->next == NULL
20168 && NOTE_P (loc_list->first->loc)
20169 && NOTE_VAR_LOCATION (loc_list->first->loc)
20170 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20171 {
20172 struct var_loc_node *node;
20173
20174 node = loc_list->first;
20175 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20176 if (GET_CODE (rtl) == EXPR_LIST)
20177 rtl = XEXP (rtl, 0);
20178 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20179 && add_const_value_attribute (die, rtl))
20180 return true;
20181 }
20182 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20183 list several times. See if we've already cached the contents. */
20184 list = NULL;
20185 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20186 cache_p = false;
20187 if (cache_p)
20188 {
20189 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20190 if (cache)
20191 list = cache->loc_list;
20192 }
20193 if (list == NULL)
20194 {
20195 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20196 NULL);
20197 /* It is usually worth caching this result if the decl is from
20198 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20199 if (cache_p && list && list->dw_loc_next)
20200 {
20201 cached_dw_loc_list **slot
20202 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20203 DECL_UID (decl),
20204 INSERT);
20205 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20206 cache->decl_id = DECL_UID (decl);
20207 cache->loc_list = list;
20208 *slot = cache;
20209 }
20210 }
20211 if (list)
20212 {
20213 add_AT_location_description (die, DW_AT_location, list);
20214 return true;
20215 }
20216 /* None of that worked, so it must not really have a location;
20217 try adding a constant value attribute from the DECL_INITIAL. */
20218 return tree_add_const_value_attribute_for_decl (die, decl);
20219 }
20220
20221 /* Helper function for tree_add_const_value_attribute. Natively encode
20222 initializer INIT into an array. Return true if successful. */
20223
20224 static bool
native_encode_initializer(tree init,unsigned char * array,int size)20225 native_encode_initializer (tree init, unsigned char *array, int size)
20226 {
20227 tree type;
20228
20229 if (init == NULL_TREE)
20230 return false;
20231
20232 STRIP_NOPS (init);
20233 switch (TREE_CODE (init))
20234 {
20235 case STRING_CST:
20236 type = TREE_TYPE (init);
20237 if (TREE_CODE (type) == ARRAY_TYPE)
20238 {
20239 tree enttype = TREE_TYPE (type);
20240 scalar_int_mode mode;
20241
20242 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20243 || GET_MODE_SIZE (mode) != 1)
20244 return false;
20245 if (int_size_in_bytes (type) != size)
20246 return false;
20247 if (size > TREE_STRING_LENGTH (init))
20248 {
20249 memcpy (array, TREE_STRING_POINTER (init),
20250 TREE_STRING_LENGTH (init));
20251 memset (array + TREE_STRING_LENGTH (init),
20252 '\0', size - TREE_STRING_LENGTH (init));
20253 }
20254 else
20255 memcpy (array, TREE_STRING_POINTER (init), size);
20256 return true;
20257 }
20258 return false;
20259 case CONSTRUCTOR:
20260 type = TREE_TYPE (init);
20261 if (int_size_in_bytes (type) != size)
20262 return false;
20263 if (TREE_CODE (type) == ARRAY_TYPE)
20264 {
20265 HOST_WIDE_INT min_index;
20266 unsigned HOST_WIDE_INT cnt;
20267 int curpos = 0, fieldsize;
20268 constructor_elt *ce;
20269
20270 if (TYPE_DOMAIN (type) == NULL_TREE
20271 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20272 return false;
20273
20274 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20275 if (fieldsize <= 0)
20276 return false;
20277
20278 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20279 memset (array, '\0', size);
20280 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20281 {
20282 tree val = ce->value;
20283 tree index = ce->index;
20284 int pos = curpos;
20285 if (index && TREE_CODE (index) == RANGE_EXPR)
20286 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20287 * fieldsize;
20288 else if (index)
20289 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20290
20291 if (val)
20292 {
20293 STRIP_NOPS (val);
20294 if (!native_encode_initializer (val, array + pos, fieldsize))
20295 return false;
20296 }
20297 curpos = pos + fieldsize;
20298 if (index && TREE_CODE (index) == RANGE_EXPR)
20299 {
20300 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20301 - tree_to_shwi (TREE_OPERAND (index, 0));
20302 while (count-- > 0)
20303 {
20304 if (val)
20305 memcpy (array + curpos, array + pos, fieldsize);
20306 curpos += fieldsize;
20307 }
20308 }
20309 gcc_assert (curpos <= size);
20310 }
20311 return true;
20312 }
20313 else if (TREE_CODE (type) == RECORD_TYPE
20314 || TREE_CODE (type) == UNION_TYPE)
20315 {
20316 tree field = NULL_TREE;
20317 unsigned HOST_WIDE_INT cnt;
20318 constructor_elt *ce;
20319
20320 if (int_size_in_bytes (type) != size)
20321 return false;
20322
20323 if (TREE_CODE (type) == RECORD_TYPE)
20324 field = TYPE_FIELDS (type);
20325
20326 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20327 {
20328 tree val = ce->value;
20329 int pos, fieldsize;
20330
20331 if (ce->index != 0)
20332 field = ce->index;
20333
20334 if (val)
20335 STRIP_NOPS (val);
20336
20337 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20338 return false;
20339
20340 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20341 && TYPE_DOMAIN (TREE_TYPE (field))
20342 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20343 return false;
20344 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20345 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20346 return false;
20347 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20348 pos = int_byte_position (field);
20349 gcc_assert (pos + fieldsize <= size);
20350 if (val && fieldsize != 0
20351 && !native_encode_initializer (val, array + pos, fieldsize))
20352 return false;
20353 }
20354 return true;
20355 }
20356 return false;
20357 case VIEW_CONVERT_EXPR:
20358 case NON_LVALUE_EXPR:
20359 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20360 default:
20361 return native_encode_expr (init, array, size) == size;
20362 }
20363 }
20364
20365 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20366 attribute is the const value T. */
20367
20368 static bool
tree_add_const_value_attribute(dw_die_ref die,tree t)20369 tree_add_const_value_attribute (dw_die_ref die, tree t)
20370 {
20371 tree init;
20372 tree type = TREE_TYPE (t);
20373 rtx rtl;
20374
20375 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20376 return false;
20377
20378 init = t;
20379 gcc_assert (!DECL_P (init));
20380
20381 if (TREE_CODE (init) == INTEGER_CST)
20382 {
20383 if (tree_fits_uhwi_p (init))
20384 {
20385 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20386 return true;
20387 }
20388 if (tree_fits_shwi_p (init))
20389 {
20390 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20391 return true;
20392 }
20393 }
20394 if (! early_dwarf)
20395 {
20396 rtl = rtl_for_decl_init (init, type);
20397 if (rtl)
20398 return add_const_value_attribute (die, rtl);
20399 }
20400 /* If the host and target are sane, try harder. */
20401 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20402 && initializer_constant_valid_p (init, type))
20403 {
20404 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20405 if (size > 0 && (int) size == size)
20406 {
20407 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20408
20409 if (native_encode_initializer (init, array, size))
20410 {
20411 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20412 return true;
20413 }
20414 ggc_free (array);
20415 }
20416 }
20417 return false;
20418 }
20419
20420 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20421 attribute is the const value of T, where T is an integral constant
20422 variable with static storage duration
20423 (so it can't be a PARM_DECL or a RESULT_DECL). */
20424
20425 static bool
tree_add_const_value_attribute_for_decl(dw_die_ref var_die,tree decl)20426 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20427 {
20428
20429 if (!decl
20430 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20431 || (VAR_P (decl) && !TREE_STATIC (decl)))
20432 return false;
20433
20434 if (TREE_READONLY (decl)
20435 && ! TREE_THIS_VOLATILE (decl)
20436 && DECL_INITIAL (decl))
20437 /* OK */;
20438 else
20439 return false;
20440
20441 /* Don't add DW_AT_const_value if abstract origin already has one. */
20442 if (get_AT (var_die, DW_AT_const_value))
20443 return false;
20444
20445 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20446 }
20447
20448 /* Convert the CFI instructions for the current function into a
20449 location list. This is used for DW_AT_frame_base when we targeting
20450 a dwarf2 consumer that does not support the dwarf3
20451 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20452 expressions. */
20453
20454 static dw_loc_list_ref
convert_cfa_to_fb_loc_list(HOST_WIDE_INT offset)20455 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20456 {
20457 int ix;
20458 dw_fde_ref fde;
20459 dw_loc_list_ref list, *list_tail;
20460 dw_cfi_ref cfi;
20461 dw_cfa_location last_cfa, next_cfa;
20462 const char *start_label, *last_label, *section;
20463 dw_cfa_location remember;
20464
20465 fde = cfun->fde;
20466 gcc_assert (fde != NULL);
20467
20468 section = secname_for_decl (current_function_decl);
20469 list_tail = &list;
20470 list = NULL;
20471
20472 memset (&next_cfa, 0, sizeof (next_cfa));
20473 next_cfa.reg = INVALID_REGNUM;
20474 remember = next_cfa;
20475
20476 start_label = fde->dw_fde_begin;
20477
20478 /* ??? Bald assumption that the CIE opcode list does not contain
20479 advance opcodes. */
20480 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20481 lookup_cfa_1 (cfi, &next_cfa, &remember);
20482
20483 last_cfa = next_cfa;
20484 last_label = start_label;
20485
20486 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20487 {
20488 /* If the first partition contained no CFI adjustments, the
20489 CIE opcodes apply to the whole first partition. */
20490 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20491 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20492 list_tail =&(*list_tail)->dw_loc_next;
20493 start_label = last_label = fde->dw_fde_second_begin;
20494 }
20495
20496 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20497 {
20498 switch (cfi->dw_cfi_opc)
20499 {
20500 case DW_CFA_set_loc:
20501 case DW_CFA_advance_loc1:
20502 case DW_CFA_advance_loc2:
20503 case DW_CFA_advance_loc4:
20504 if (!cfa_equal_p (&last_cfa, &next_cfa))
20505 {
20506 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20507 start_label, 0, last_label, 0, section);
20508
20509 list_tail = &(*list_tail)->dw_loc_next;
20510 last_cfa = next_cfa;
20511 start_label = last_label;
20512 }
20513 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20514 break;
20515
20516 case DW_CFA_advance_loc:
20517 /* The encoding is complex enough that we should never emit this. */
20518 gcc_unreachable ();
20519
20520 default:
20521 lookup_cfa_1 (cfi, &next_cfa, &remember);
20522 break;
20523 }
20524 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20525 {
20526 if (!cfa_equal_p (&last_cfa, &next_cfa))
20527 {
20528 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20529 start_label, 0, last_label, 0, section);
20530
20531 list_tail = &(*list_tail)->dw_loc_next;
20532 last_cfa = next_cfa;
20533 start_label = last_label;
20534 }
20535 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20536 start_label, 0, fde->dw_fde_end, 0, section);
20537 list_tail = &(*list_tail)->dw_loc_next;
20538 start_label = last_label = fde->dw_fde_second_begin;
20539 }
20540 }
20541
20542 if (!cfa_equal_p (&last_cfa, &next_cfa))
20543 {
20544 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20545 start_label, 0, last_label, 0, section);
20546 list_tail = &(*list_tail)->dw_loc_next;
20547 start_label = last_label;
20548 }
20549
20550 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20551 start_label, 0,
20552 fde->dw_fde_second_begin
20553 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20554 section);
20555
20556 maybe_gen_llsym (list);
20557
20558 return list;
20559 }
20560
20561 /* Compute a displacement from the "steady-state frame pointer" to the
20562 frame base (often the same as the CFA), and store it in
20563 frame_pointer_fb_offset. OFFSET is added to the displacement
20564 before the latter is negated. */
20565
20566 static void
compute_frame_pointer_to_fb_displacement(poly_int64 offset)20567 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20568 {
20569 rtx reg, elim;
20570
20571 #ifdef FRAME_POINTER_CFA_OFFSET
20572 reg = frame_pointer_rtx;
20573 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20574 #else
20575 reg = arg_pointer_rtx;
20576 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20577 #endif
20578
20579 elim = (ira_use_lra_p
20580 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20581 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20582 elim = strip_offset_and_add (elim, &offset);
20583
20584 frame_pointer_fb_offset = -offset;
20585
20586 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20587 in which to eliminate. This is because it's stack pointer isn't
20588 directly accessible as a register within the ISA. To work around
20589 this, assume that while we cannot provide a proper value for
20590 frame_pointer_fb_offset, we won't need one either. */
20591 frame_pointer_fb_offset_valid
20592 = ((SUPPORTS_STACK_ALIGNMENT
20593 && (elim == hard_frame_pointer_rtx
20594 || elim == stack_pointer_rtx))
20595 || elim == (frame_pointer_needed
20596 ? hard_frame_pointer_rtx
20597 : stack_pointer_rtx));
20598 }
20599
20600 /* Generate a DW_AT_name attribute given some string value to be included as
20601 the value of the attribute. */
20602
20603 static void
add_name_attribute(dw_die_ref die,const char * name_string)20604 add_name_attribute (dw_die_ref die, const char *name_string)
20605 {
20606 if (name_string != NULL && *name_string != 0)
20607 {
20608 if (demangle_name_func)
20609 name_string = (*demangle_name_func) (name_string);
20610
20611 add_AT_string (die, DW_AT_name, name_string);
20612 }
20613 }
20614
20615 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20616 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20617 of TYPE accordingly.
20618
20619 ??? This is a temporary measure until after we're able to generate
20620 regular DWARF for the complex Ada type system. */
20621
20622 static void
add_gnat_descriptive_type_attribute(dw_die_ref die,tree type,dw_die_ref context_die)20623 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20624 dw_die_ref context_die)
20625 {
20626 tree dtype;
20627 dw_die_ref dtype_die;
20628
20629 if (!lang_hooks.types.descriptive_type)
20630 return;
20631
20632 dtype = lang_hooks.types.descriptive_type (type);
20633 if (!dtype)
20634 return;
20635
20636 dtype_die = lookup_type_die (dtype);
20637 if (!dtype_die)
20638 {
20639 gen_type_die (dtype, context_die);
20640 dtype_die = lookup_type_die (dtype);
20641 gcc_assert (dtype_die);
20642 }
20643
20644 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20645 }
20646
20647 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20648
20649 static const char *
comp_dir_string(void)20650 comp_dir_string (void)
20651 {
20652 const char *wd;
20653 char *wd1;
20654 static const char *cached_wd = NULL;
20655
20656 if (cached_wd != NULL)
20657 return cached_wd;
20658
20659 wd = get_src_pwd ();
20660 if (wd == NULL)
20661 return NULL;
20662
20663 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20664 {
20665 int wdlen;
20666
20667 wdlen = strlen (wd);
20668 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20669 strcpy (wd1, wd);
20670 wd1 [wdlen] = DIR_SEPARATOR;
20671 wd1 [wdlen + 1] = 0;
20672 wd = wd1;
20673 }
20674
20675 cached_wd = remap_debug_filename (wd);
20676 return cached_wd;
20677 }
20678
20679 /* Generate a DW_AT_comp_dir attribute for DIE. */
20680
20681 static void
add_comp_dir_attribute(dw_die_ref die)20682 add_comp_dir_attribute (dw_die_ref die)
20683 {
20684 const char * wd = comp_dir_string ();
20685 if (wd != NULL)
20686 add_AT_string (die, DW_AT_comp_dir, wd);
20687 }
20688
20689 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20690 pointer computation, ...), output a representation for that bound according
20691 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20692 loc_list_from_tree for the meaning of CONTEXT. */
20693
20694 static void
add_scalar_info(dw_die_ref die,enum dwarf_attribute attr,tree value,int forms,struct loc_descr_context * context)20695 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20696 int forms, struct loc_descr_context *context)
20697 {
20698 dw_die_ref context_die, decl_die;
20699 dw_loc_list_ref list;
20700 bool strip_conversions = true;
20701 bool placeholder_seen = false;
20702
20703 while (strip_conversions)
20704 switch (TREE_CODE (value))
20705 {
20706 case ERROR_MARK:
20707 case SAVE_EXPR:
20708 return;
20709
20710 CASE_CONVERT:
20711 case VIEW_CONVERT_EXPR:
20712 value = TREE_OPERAND (value, 0);
20713 break;
20714
20715 default:
20716 strip_conversions = false;
20717 break;
20718 }
20719
20720 /* If possible and permitted, output the attribute as a constant. */
20721 if ((forms & dw_scalar_form_constant) != 0
20722 && TREE_CODE (value) == INTEGER_CST)
20723 {
20724 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20725
20726 /* If HOST_WIDE_INT is big enough then represent the bound as
20727 a constant value. We need to choose a form based on
20728 whether the type is signed or unsigned. We cannot just
20729 call add_AT_unsigned if the value itself is positive
20730 (add_AT_unsigned might add the unsigned value encoded as
20731 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20732 bounds type and then sign extend any unsigned values found
20733 for signed types. This is needed only for
20734 DW_AT_{lower,upper}_bound, since for most other attributes,
20735 consumers will treat DW_FORM_data[1248] as unsigned values,
20736 regardless of the underlying type. */
20737 if (prec <= HOST_BITS_PER_WIDE_INT
20738 || tree_fits_uhwi_p (value))
20739 {
20740 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20741 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20742 else
20743 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20744 }
20745 else
20746 /* Otherwise represent the bound as an unsigned value with
20747 the precision of its type. The precision and signedness
20748 of the type will be necessary to re-interpret it
20749 unambiguously. */
20750 add_AT_wide (die, attr, wi::to_wide (value));
20751 return;
20752 }
20753
20754 /* Otherwise, if it's possible and permitted too, output a reference to
20755 another DIE. */
20756 if ((forms & dw_scalar_form_reference) != 0)
20757 {
20758 tree decl = NULL_TREE;
20759
20760 /* Some type attributes reference an outer type. For instance, the upper
20761 bound of an array may reference an embedding record (this happens in
20762 Ada). */
20763 if (TREE_CODE (value) == COMPONENT_REF
20764 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20765 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20766 decl = TREE_OPERAND (value, 1);
20767
20768 else if (VAR_P (value)
20769 || TREE_CODE (value) == PARM_DECL
20770 || TREE_CODE (value) == RESULT_DECL)
20771 decl = value;
20772
20773 if (decl != NULL_TREE)
20774 {
20775 dw_die_ref decl_die = lookup_decl_die (decl);
20776
20777 /* ??? Can this happen, or should the variable have been bound
20778 first? Probably it can, since I imagine that we try to create
20779 the types of parameters in the order in which they exist in
20780 the list, and won't have created a forward reference to a
20781 later parameter. */
20782 if (decl_die != NULL)
20783 {
20784 add_AT_die_ref (die, attr, decl_die);
20785 return;
20786 }
20787 }
20788 }
20789
20790 /* Last chance: try to create a stack operation procedure to evaluate the
20791 value. Do nothing if even that is not possible or permitted. */
20792 if ((forms & dw_scalar_form_exprloc) == 0)
20793 return;
20794
20795 list = loc_list_from_tree (value, 2, context);
20796 if (context && context->placeholder_arg)
20797 {
20798 placeholder_seen = context->placeholder_seen;
20799 context->placeholder_seen = false;
20800 }
20801 if (list == NULL || single_element_loc_list_p (list))
20802 {
20803 /* If this attribute is not a reference nor constant, it is
20804 a DWARF expression rather than location description. For that
20805 loc_list_from_tree (value, 0, &context) is needed. */
20806 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20807 if (list2 && single_element_loc_list_p (list2))
20808 {
20809 if (placeholder_seen)
20810 {
20811 struct dwarf_procedure_info dpi;
20812 dpi.fndecl = NULL_TREE;
20813 dpi.args_count = 1;
20814 if (!resolve_args_picking (list2->expr, 1, &dpi))
20815 return;
20816 }
20817 add_AT_loc (die, attr, list2->expr);
20818 return;
20819 }
20820 }
20821
20822 /* If that failed to give a single element location list, fall back to
20823 outputting this as a reference... still if permitted. */
20824 if (list == NULL
20825 || (forms & dw_scalar_form_reference) == 0
20826 || placeholder_seen)
20827 return;
20828
20829 if (current_function_decl == 0)
20830 context_die = comp_unit_die ();
20831 else
20832 context_die = lookup_decl_die (current_function_decl);
20833
20834 decl_die = new_die (DW_TAG_variable, context_die, value);
20835 add_AT_flag (decl_die, DW_AT_artificial, 1);
20836 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20837 context_die);
20838 add_AT_location_description (decl_die, DW_AT_location, list);
20839 add_AT_die_ref (die, attr, decl_die);
20840 }
20841
20842 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20843 default. */
20844
20845 static int
lower_bound_default(void)20846 lower_bound_default (void)
20847 {
20848 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20849 {
20850 case DW_LANG_C:
20851 case DW_LANG_C89:
20852 case DW_LANG_C99:
20853 case DW_LANG_C11:
20854 case DW_LANG_C_plus_plus:
20855 case DW_LANG_C_plus_plus_11:
20856 case DW_LANG_C_plus_plus_14:
20857 case DW_LANG_ObjC:
20858 case DW_LANG_ObjC_plus_plus:
20859 return 0;
20860 case DW_LANG_Fortran77:
20861 case DW_LANG_Fortran90:
20862 case DW_LANG_Fortran95:
20863 case DW_LANG_Fortran03:
20864 case DW_LANG_Fortran08:
20865 return 1;
20866 case DW_LANG_UPC:
20867 case DW_LANG_D:
20868 case DW_LANG_Python:
20869 return dwarf_version >= 4 ? 0 : -1;
20870 case DW_LANG_Ada95:
20871 case DW_LANG_Ada83:
20872 case DW_LANG_Cobol74:
20873 case DW_LANG_Cobol85:
20874 case DW_LANG_Modula2:
20875 case DW_LANG_PLI:
20876 return dwarf_version >= 4 ? 1 : -1;
20877 default:
20878 return -1;
20879 }
20880 }
20881
20882 /* Given a tree node describing an array bound (either lower or upper) output
20883 a representation for that bound. */
20884
20885 static void
add_bound_info(dw_die_ref subrange_die,enum dwarf_attribute bound_attr,tree bound,struct loc_descr_context * context)20886 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20887 tree bound, struct loc_descr_context *context)
20888 {
20889 int dflt;
20890
20891 while (1)
20892 switch (TREE_CODE (bound))
20893 {
20894 /* Strip all conversions. */
20895 CASE_CONVERT:
20896 case VIEW_CONVERT_EXPR:
20897 bound = TREE_OPERAND (bound, 0);
20898 break;
20899
20900 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20901 are even omitted when they are the default. */
20902 case INTEGER_CST:
20903 /* If the value for this bound is the default one, we can even omit the
20904 attribute. */
20905 if (bound_attr == DW_AT_lower_bound
20906 && tree_fits_shwi_p (bound)
20907 && (dflt = lower_bound_default ()) != -1
20908 && tree_to_shwi (bound) == dflt)
20909 return;
20910
20911 /* FALLTHRU */
20912
20913 default:
20914 /* Because of the complex interaction there can be with other GNAT
20915 encodings, GDB isn't ready yet to handle proper DWARF description
20916 for self-referencial subrange bounds: let GNAT encodings do the
20917 magic in such a case. */
20918 if (is_ada ()
20919 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20920 && contains_placeholder_p (bound))
20921 return;
20922
20923 add_scalar_info (subrange_die, bound_attr, bound,
20924 dw_scalar_form_constant
20925 | dw_scalar_form_exprloc
20926 | dw_scalar_form_reference,
20927 context);
20928 return;
20929 }
20930 }
20931
20932 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20933 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20934 Note that the block of subscript information for an array type also
20935 includes information about the element type of the given array type.
20936
20937 This function reuses previously set type and bound information if
20938 available. */
20939
20940 static void
add_subscript_info(dw_die_ref type_die,tree type,bool collapse_p)20941 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20942 {
20943 unsigned dimension_number;
20944 tree lower, upper;
20945 dw_die_ref child = type_die->die_child;
20946
20947 for (dimension_number = 0;
20948 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20949 type = TREE_TYPE (type), dimension_number++)
20950 {
20951 tree domain = TYPE_DOMAIN (type);
20952
20953 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20954 break;
20955
20956 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20957 and (in GNU C only) variable bounds. Handle all three forms
20958 here. */
20959
20960 /* Find and reuse a previously generated DW_TAG_subrange_type if
20961 available.
20962
20963 For multi-dimensional arrays, as we iterate through the
20964 various dimensions in the enclosing for loop above, we also
20965 iterate through the DIE children and pick at each
20966 DW_TAG_subrange_type previously generated (if available).
20967 Each child DW_TAG_subrange_type DIE describes the range of
20968 the current dimension. At this point we should have as many
20969 DW_TAG_subrange_type's as we have dimensions in the
20970 array. */
20971 dw_die_ref subrange_die = NULL;
20972 if (child)
20973 while (1)
20974 {
20975 child = child->die_sib;
20976 if (child->die_tag == DW_TAG_subrange_type)
20977 subrange_die = child;
20978 if (child == type_die->die_child)
20979 {
20980 /* If we wrapped around, stop looking next time. */
20981 child = NULL;
20982 break;
20983 }
20984 if (child->die_tag == DW_TAG_subrange_type)
20985 break;
20986 }
20987 if (!subrange_die)
20988 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20989
20990 if (domain)
20991 {
20992 /* We have an array type with specified bounds. */
20993 lower = TYPE_MIN_VALUE (domain);
20994 upper = TYPE_MAX_VALUE (domain);
20995
20996 /* Define the index type. */
20997 if (TREE_TYPE (domain)
20998 && !get_AT (subrange_die, DW_AT_type))
20999 {
21000 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21001 TREE_TYPE field. We can't emit debug info for this
21002 because it is an unnamed integral type. */
21003 if (TREE_CODE (domain) == INTEGER_TYPE
21004 && TYPE_NAME (domain) == NULL_TREE
21005 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21006 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21007 ;
21008 else
21009 add_type_attribute (subrange_die, TREE_TYPE (domain),
21010 TYPE_UNQUALIFIED, false, type_die);
21011 }
21012
21013 /* ??? If upper is NULL, the array has unspecified length,
21014 but it does have a lower bound. This happens with Fortran
21015 dimension arr(N:*)
21016 Since the debugger is definitely going to need to know N
21017 to produce useful results, go ahead and output the lower
21018 bound solo, and hope the debugger can cope. */
21019
21020 if (!get_AT (subrange_die, DW_AT_lower_bound))
21021 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21022 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
21023 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21024 }
21025
21026 /* Otherwise we have an array type with an unspecified length. The
21027 DWARF-2 spec does not say how to handle this; let's just leave out the
21028 bounds. */
21029 }
21030 }
21031
21032 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21033
21034 static void
add_byte_size_attribute(dw_die_ref die,tree tree_node)21035 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21036 {
21037 dw_die_ref decl_die;
21038 HOST_WIDE_INT size;
21039 dw_loc_descr_ref size_expr = NULL;
21040
21041 switch (TREE_CODE (tree_node))
21042 {
21043 case ERROR_MARK:
21044 size = 0;
21045 break;
21046 case ENUMERAL_TYPE:
21047 case RECORD_TYPE:
21048 case UNION_TYPE:
21049 case QUAL_UNION_TYPE:
21050 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21051 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21052 {
21053 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21054 return;
21055 }
21056 size_expr = type_byte_size (tree_node, &size);
21057 break;
21058 case FIELD_DECL:
21059 /* For a data member of a struct or union, the DW_AT_byte_size is
21060 generally given as the number of bytes normally allocated for an
21061 object of the *declared* type of the member itself. This is true
21062 even for bit-fields. */
21063 size = int_size_in_bytes (field_type (tree_node));
21064 break;
21065 default:
21066 gcc_unreachable ();
21067 }
21068
21069 /* Support for dynamically-sized objects was introduced by DWARFv3.
21070 At the moment, GDB does not handle variable byte sizes very well,
21071 though. */
21072 if ((dwarf_version >= 3 || !dwarf_strict)
21073 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21074 && size_expr != NULL)
21075 add_AT_loc (die, DW_AT_byte_size, size_expr);
21076
21077 /* Note that `size' might be -1 when we get to this point. If it is, that
21078 indicates that the byte size of the entity in question is variable and
21079 that we could not generate a DWARF expression that computes it. */
21080 if (size >= 0)
21081 add_AT_unsigned (die, DW_AT_byte_size, size);
21082 }
21083
21084 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21085 alignment. */
21086
21087 static void
add_alignment_attribute(dw_die_ref die,tree tree_node)21088 add_alignment_attribute (dw_die_ref die, tree tree_node)
21089 {
21090 if (dwarf_version < 5 && dwarf_strict)
21091 return;
21092
21093 unsigned align;
21094
21095 if (DECL_P (tree_node))
21096 {
21097 if (!DECL_USER_ALIGN (tree_node))
21098 return;
21099
21100 align = DECL_ALIGN_UNIT (tree_node);
21101 }
21102 else if (TYPE_P (tree_node))
21103 {
21104 if (!TYPE_USER_ALIGN (tree_node))
21105 return;
21106
21107 align = TYPE_ALIGN_UNIT (tree_node);
21108 }
21109 else
21110 gcc_unreachable ();
21111
21112 add_AT_unsigned (die, DW_AT_alignment, align);
21113 }
21114
21115 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21116 which specifies the distance in bits from the highest order bit of the
21117 "containing object" for the bit-field to the highest order bit of the
21118 bit-field itself.
21119
21120 For any given bit-field, the "containing object" is a hypothetical object
21121 (of some integral or enum type) within which the given bit-field lives. The
21122 type of this hypothetical "containing object" is always the same as the
21123 declared type of the individual bit-field itself. The determination of the
21124 exact location of the "containing object" for a bit-field is rather
21125 complicated. It's handled by the `field_byte_offset' function (above).
21126
21127 CTX is required: see the comment for VLR_CONTEXT.
21128
21129 Note that it is the size (in bytes) of the hypothetical "containing object"
21130 which will be given in the DW_AT_byte_size attribute for this bit-field.
21131 (See `byte_size_attribute' above). */
21132
21133 static inline void
add_bit_offset_attribute(dw_die_ref die,tree decl,struct vlr_context * ctx)21134 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21135 {
21136 HOST_WIDE_INT object_offset_in_bytes;
21137 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21138 HOST_WIDE_INT bitpos_int;
21139 HOST_WIDE_INT highest_order_object_bit_offset;
21140 HOST_WIDE_INT highest_order_field_bit_offset;
21141 HOST_WIDE_INT bit_offset;
21142
21143 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21144
21145 /* Must be a field and a bit field. */
21146 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21147
21148 /* We can't yet handle bit-fields whose offsets are variable, so if we
21149 encounter such things, just return without generating any attribute
21150 whatsoever. Likewise for variable or too large size. */
21151 if (! tree_fits_shwi_p (bit_position (decl))
21152 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21153 return;
21154
21155 bitpos_int = int_bit_position (decl);
21156
21157 /* Note that the bit offset is always the distance (in bits) from the
21158 highest-order bit of the "containing object" to the highest-order bit of
21159 the bit-field itself. Since the "high-order end" of any object or field
21160 is different on big-endian and little-endian machines, the computation
21161 below must take account of these differences. */
21162 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21163 highest_order_field_bit_offset = bitpos_int;
21164
21165 if (! BYTES_BIG_ENDIAN)
21166 {
21167 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21168 highest_order_object_bit_offset +=
21169 simple_type_size_in_bits (original_type);
21170 }
21171
21172 bit_offset
21173 = (! BYTES_BIG_ENDIAN
21174 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21175 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21176
21177 if (bit_offset < 0)
21178 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21179 else
21180 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21181 }
21182
21183 /* For a FIELD_DECL node which represents a bit field, output an attribute
21184 which specifies the length in bits of the given field. */
21185
21186 static inline void
add_bit_size_attribute(dw_die_ref die,tree decl)21187 add_bit_size_attribute (dw_die_ref die, tree decl)
21188 {
21189 /* Must be a field and a bit field. */
21190 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21191 && DECL_BIT_FIELD_TYPE (decl));
21192
21193 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21194 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21195 }
21196
21197 /* If the compiled language is ANSI C, then add a 'prototyped'
21198 attribute, if arg types are given for the parameters of a function. */
21199
21200 static inline void
add_prototyped_attribute(dw_die_ref die,tree func_type)21201 add_prototyped_attribute (dw_die_ref die, tree func_type)
21202 {
21203 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21204 {
21205 case DW_LANG_C:
21206 case DW_LANG_C89:
21207 case DW_LANG_C99:
21208 case DW_LANG_C11:
21209 case DW_LANG_ObjC:
21210 if (prototype_p (func_type))
21211 add_AT_flag (die, DW_AT_prototyped, 1);
21212 break;
21213 default:
21214 break;
21215 }
21216 }
21217
21218 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21219 by looking in the type declaration, the object declaration equate table or
21220 the block mapping. */
21221
21222 static inline dw_die_ref
add_abstract_origin_attribute(dw_die_ref die,tree origin)21223 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21224 {
21225 dw_die_ref origin_die = NULL;
21226
21227 if (DECL_P (origin))
21228 {
21229 dw_die_ref c;
21230 origin_die = lookup_decl_die (origin);
21231 /* "Unwrap" the decls DIE which we put in the imported unit context.
21232 We are looking for the abstract copy here. */
21233 if (in_lto_p
21234 && origin_die
21235 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21236 /* ??? Identify this better. */
21237 && c->with_offset)
21238 origin_die = c;
21239 }
21240 else if (TYPE_P (origin))
21241 origin_die = lookup_type_die (origin);
21242 else if (TREE_CODE (origin) == BLOCK)
21243 origin_die = BLOCK_DIE (origin);
21244
21245 /* XXX: Functions that are never lowered don't always have correct block
21246 trees (in the case of java, they simply have no block tree, in some other
21247 languages). For these functions, there is nothing we can really do to
21248 output correct debug info for inlined functions in all cases. Rather
21249 than die, we'll just produce deficient debug info now, in that we will
21250 have variables without a proper abstract origin. In the future, when all
21251 functions are lowered, we should re-add a gcc_assert (origin_die)
21252 here. */
21253
21254 if (origin_die)
21255 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21256 return origin_die;
21257 }
21258
21259 /* We do not currently support the pure_virtual attribute. */
21260
21261 static inline void
add_pure_or_virtual_attribute(dw_die_ref die,tree func_decl)21262 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21263 {
21264 if (DECL_VINDEX (func_decl))
21265 {
21266 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21267
21268 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21269 add_AT_loc (die, DW_AT_vtable_elem_location,
21270 new_loc_descr (DW_OP_constu,
21271 tree_to_shwi (DECL_VINDEX (func_decl)),
21272 0));
21273
21274 /* GNU extension: Record what type this method came from originally. */
21275 if (debug_info_level > DINFO_LEVEL_TERSE
21276 && DECL_CONTEXT (func_decl))
21277 add_AT_die_ref (die, DW_AT_containing_type,
21278 lookup_type_die (DECL_CONTEXT (func_decl)));
21279 }
21280 }
21281
21282 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21283 given decl. This used to be a vendor extension until after DWARF 4
21284 standardized it. */
21285
21286 static void
add_linkage_attr(dw_die_ref die,tree decl)21287 add_linkage_attr (dw_die_ref die, tree decl)
21288 {
21289 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21290
21291 /* Mimic what assemble_name_raw does with a leading '*'. */
21292 if (name[0] == '*')
21293 name = &name[1];
21294
21295 if (dwarf_version >= 4)
21296 add_AT_string (die, DW_AT_linkage_name, name);
21297 else
21298 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21299 }
21300
21301 /* Add source coordinate attributes for the given decl. */
21302
21303 static void
add_src_coords_attributes(dw_die_ref die,tree decl)21304 add_src_coords_attributes (dw_die_ref die, tree decl)
21305 {
21306 expanded_location s;
21307
21308 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21309 return;
21310 s = expand_location (DECL_SOURCE_LOCATION (decl));
21311 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21312 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21313 if (debug_column_info && s.column)
21314 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21315 }
21316
21317 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21318
21319 static void
add_linkage_name_raw(dw_die_ref die,tree decl)21320 add_linkage_name_raw (dw_die_ref die, tree decl)
21321 {
21322 /* Defer until we have an assembler name set. */
21323 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21324 {
21325 limbo_die_node *asm_name;
21326
21327 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21328 asm_name->die = die;
21329 asm_name->created_for = decl;
21330 asm_name->next = deferred_asm_name;
21331 deferred_asm_name = asm_name;
21332 }
21333 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21334 add_linkage_attr (die, decl);
21335 }
21336
21337 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21338
21339 static void
add_linkage_name(dw_die_ref die,tree decl)21340 add_linkage_name (dw_die_ref die, tree decl)
21341 {
21342 if (debug_info_level > DINFO_LEVEL_NONE
21343 && VAR_OR_FUNCTION_DECL_P (decl)
21344 && TREE_PUBLIC (decl)
21345 && !(VAR_P (decl) && DECL_REGISTER (decl))
21346 && die->die_tag != DW_TAG_member)
21347 add_linkage_name_raw (die, decl);
21348 }
21349
21350 /* Add a DW_AT_name attribute and source coordinate attribute for the
21351 given decl, but only if it actually has a name. */
21352
21353 static void
add_name_and_src_coords_attributes(dw_die_ref die,tree decl,bool no_linkage_name)21354 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21355 bool no_linkage_name)
21356 {
21357 tree decl_name;
21358
21359 decl_name = DECL_NAME (decl);
21360 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21361 {
21362 const char *name = dwarf2_name (decl, 0);
21363 if (name)
21364 add_name_attribute (die, name);
21365 if (! DECL_ARTIFICIAL (decl))
21366 add_src_coords_attributes (die, decl);
21367
21368 if (!no_linkage_name)
21369 add_linkage_name (die, decl);
21370 }
21371
21372 #ifdef VMS_DEBUGGING_INFO
21373 /* Get the function's name, as described by its RTL. This may be different
21374 from the DECL_NAME name used in the source file. */
21375 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21376 {
21377 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21378 XEXP (DECL_RTL (decl), 0), false);
21379 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21380 }
21381 #endif /* VMS_DEBUGGING_INFO */
21382 }
21383
21384 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21385
21386 static void
add_discr_value(dw_die_ref die,dw_discr_value * value)21387 add_discr_value (dw_die_ref die, dw_discr_value *value)
21388 {
21389 dw_attr_node attr;
21390
21391 attr.dw_attr = DW_AT_discr_value;
21392 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21393 attr.dw_attr_val.val_entry = NULL;
21394 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21395 if (value->pos)
21396 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21397 else
21398 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21399 add_dwarf_attr (die, &attr);
21400 }
21401
21402 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21403
21404 static void
add_discr_list(dw_die_ref die,dw_discr_list_ref discr_list)21405 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21406 {
21407 dw_attr_node attr;
21408
21409 attr.dw_attr = DW_AT_discr_list;
21410 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21411 attr.dw_attr_val.val_entry = NULL;
21412 attr.dw_attr_val.v.val_discr_list = discr_list;
21413 add_dwarf_attr (die, &attr);
21414 }
21415
21416 static inline dw_discr_list_ref
AT_discr_list(dw_attr_node * attr)21417 AT_discr_list (dw_attr_node *attr)
21418 {
21419 return attr->dw_attr_val.v.val_discr_list;
21420 }
21421
21422 #ifdef VMS_DEBUGGING_INFO
21423 /* Output the debug main pointer die for VMS */
21424
21425 void
dwarf2out_vms_debug_main_pointer(void)21426 dwarf2out_vms_debug_main_pointer (void)
21427 {
21428 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21429 dw_die_ref die;
21430
21431 /* Allocate the VMS debug main subprogram die. */
21432 die = new_die_raw (DW_TAG_subprogram);
21433 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21434 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21435 current_function_funcdef_no);
21436 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21437
21438 /* Make it the first child of comp_unit_die (). */
21439 die->die_parent = comp_unit_die ();
21440 if (comp_unit_die ()->die_child)
21441 {
21442 die->die_sib = comp_unit_die ()->die_child->die_sib;
21443 comp_unit_die ()->die_child->die_sib = die;
21444 }
21445 else
21446 {
21447 die->die_sib = die;
21448 comp_unit_die ()->die_child = die;
21449 }
21450 }
21451 #endif /* VMS_DEBUGGING_INFO */
21452
21453 /* Push a new declaration scope. */
21454
21455 static void
push_decl_scope(tree scope)21456 push_decl_scope (tree scope)
21457 {
21458 vec_safe_push (decl_scope_table, scope);
21459 }
21460
21461 /* Pop a declaration scope. */
21462
21463 static inline void
pop_decl_scope(void)21464 pop_decl_scope (void)
21465 {
21466 decl_scope_table->pop ();
21467 }
21468
21469 /* walk_tree helper function for uses_local_type, below. */
21470
21471 static tree
uses_local_type_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)21472 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21473 {
21474 if (!TYPE_P (*tp))
21475 *walk_subtrees = 0;
21476 else
21477 {
21478 tree name = TYPE_NAME (*tp);
21479 if (name && DECL_P (name) && decl_function_context (name))
21480 return *tp;
21481 }
21482 return NULL_TREE;
21483 }
21484
21485 /* If TYPE involves a function-local type (including a local typedef to a
21486 non-local type), returns that type; otherwise returns NULL_TREE. */
21487
21488 static tree
uses_local_type(tree type)21489 uses_local_type (tree type)
21490 {
21491 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21492 return used;
21493 }
21494
21495 /* Return the DIE for the scope that immediately contains this type.
21496 Non-named types that do not involve a function-local type get global
21497 scope. Named types nested in namespaces or other types get their
21498 containing scope. All other types (i.e. function-local named types) get
21499 the current active scope. */
21500
21501 static dw_die_ref
scope_die_for(tree t,dw_die_ref context_die)21502 scope_die_for (tree t, dw_die_ref context_die)
21503 {
21504 dw_die_ref scope_die = NULL;
21505 tree containing_scope;
21506
21507 /* Non-types always go in the current scope. */
21508 gcc_assert (TYPE_P (t));
21509
21510 /* Use the scope of the typedef, rather than the scope of the type
21511 it refers to. */
21512 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21513 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21514 else
21515 containing_scope = TYPE_CONTEXT (t);
21516
21517 /* Use the containing namespace if there is one. */
21518 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21519 {
21520 if (context_die == lookup_decl_die (containing_scope))
21521 /* OK */;
21522 else if (debug_info_level > DINFO_LEVEL_TERSE)
21523 context_die = get_context_die (containing_scope);
21524 else
21525 containing_scope = NULL_TREE;
21526 }
21527
21528 /* Ignore function type "scopes" from the C frontend. They mean that
21529 a tagged type is local to a parmlist of a function declarator, but
21530 that isn't useful to DWARF. */
21531 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21532 containing_scope = NULL_TREE;
21533
21534 if (SCOPE_FILE_SCOPE_P (containing_scope))
21535 {
21536 /* If T uses a local type keep it local as well, to avoid references
21537 to function-local DIEs from outside the function. */
21538 if (current_function_decl && uses_local_type (t))
21539 scope_die = context_die;
21540 else
21541 scope_die = comp_unit_die ();
21542 }
21543 else if (TYPE_P (containing_scope))
21544 {
21545 /* For types, we can just look up the appropriate DIE. */
21546 if (debug_info_level > DINFO_LEVEL_TERSE)
21547 scope_die = get_context_die (containing_scope);
21548 else
21549 {
21550 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21551 if (scope_die == NULL)
21552 scope_die = comp_unit_die ();
21553 }
21554 }
21555 else
21556 scope_die = context_die;
21557
21558 return scope_die;
21559 }
21560
21561 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21562
21563 static inline int
local_scope_p(dw_die_ref context_die)21564 local_scope_p (dw_die_ref context_die)
21565 {
21566 for (; context_die; context_die = context_die->die_parent)
21567 if (context_die->die_tag == DW_TAG_inlined_subroutine
21568 || context_die->die_tag == DW_TAG_subprogram)
21569 return 1;
21570
21571 return 0;
21572 }
21573
21574 /* Returns nonzero if CONTEXT_DIE is a class. */
21575
21576 static inline int
class_scope_p(dw_die_ref context_die)21577 class_scope_p (dw_die_ref context_die)
21578 {
21579 return (context_die
21580 && (context_die->die_tag == DW_TAG_structure_type
21581 || context_die->die_tag == DW_TAG_class_type
21582 || context_die->die_tag == DW_TAG_interface_type
21583 || context_die->die_tag == DW_TAG_union_type));
21584 }
21585
21586 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21587 whether or not to treat a DIE in this context as a declaration. */
21588
21589 static inline int
class_or_namespace_scope_p(dw_die_ref context_die)21590 class_or_namespace_scope_p (dw_die_ref context_die)
21591 {
21592 return (class_scope_p (context_die)
21593 || (context_die && context_die->die_tag == DW_TAG_namespace));
21594 }
21595
21596 /* Many forms of DIEs require a "type description" attribute. This
21597 routine locates the proper "type descriptor" die for the type given
21598 by 'type' plus any additional qualifiers given by 'cv_quals', and
21599 adds a DW_AT_type attribute below the given die. */
21600
21601 static void
add_type_attribute(dw_die_ref object_die,tree type,int cv_quals,bool reverse,dw_die_ref context_die)21602 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21603 bool reverse, dw_die_ref context_die)
21604 {
21605 enum tree_code code = TREE_CODE (type);
21606 dw_die_ref type_die = NULL;
21607
21608 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21609 or fixed-point type, use the inner type. This is because we have no
21610 support for unnamed types in base_type_die. This can happen if this is
21611 an Ada subrange type. Correct solution is emit a subrange type die. */
21612 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21613 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21614 type = TREE_TYPE (type), code = TREE_CODE (type);
21615
21616 if (code == ERROR_MARK
21617 /* Handle a special case. For functions whose return type is void, we
21618 generate *no* type attribute. (Note that no object may have type
21619 `void', so this only applies to function return types). */
21620 || code == VOID_TYPE)
21621 return;
21622
21623 type_die = modified_type_die (type,
21624 cv_quals | TYPE_QUALS (type),
21625 reverse,
21626 context_die);
21627
21628 if (type_die != NULL)
21629 add_AT_die_ref (object_die, DW_AT_type, type_die);
21630 }
21631
21632 /* Given an object die, add the calling convention attribute for the
21633 function call type. */
21634 static void
add_calling_convention_attribute(dw_die_ref subr_die,tree decl)21635 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21636 {
21637 enum dwarf_calling_convention value = DW_CC_normal;
21638
21639 value = ((enum dwarf_calling_convention)
21640 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21641
21642 if (is_fortran ()
21643 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21644 {
21645 /* DWARF 2 doesn't provide a way to identify a program's source-level
21646 entry point. DW_AT_calling_convention attributes are only meant
21647 to describe functions' calling conventions. However, lacking a
21648 better way to signal the Fortran main program, we used this for
21649 a long time, following existing custom. Now, DWARF 4 has
21650 DW_AT_main_subprogram, which we add below, but some tools still
21651 rely on the old way, which we thus keep. */
21652 value = DW_CC_program;
21653
21654 if (dwarf_version >= 4 || !dwarf_strict)
21655 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21656 }
21657
21658 /* Only add the attribute if the backend requests it, and
21659 is not DW_CC_normal. */
21660 if (value && (value != DW_CC_normal))
21661 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21662 }
21663
21664 /* Given a tree pointer to a struct, class, union, or enum type node, return
21665 a pointer to the (string) tag name for the given type, or zero if the type
21666 was declared without a tag. */
21667
21668 static const char *
type_tag(const_tree type)21669 type_tag (const_tree type)
21670 {
21671 const char *name = 0;
21672
21673 if (TYPE_NAME (type) != 0)
21674 {
21675 tree t = 0;
21676
21677 /* Find the IDENTIFIER_NODE for the type name. */
21678 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21679 && !TYPE_NAMELESS (type))
21680 t = TYPE_NAME (type);
21681
21682 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21683 a TYPE_DECL node, regardless of whether or not a `typedef' was
21684 involved. */
21685 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21686 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21687 {
21688 /* We want to be extra verbose. Don't call dwarf_name if
21689 DECL_NAME isn't set. The default hook for decl_printable_name
21690 doesn't like that, and in this context it's correct to return
21691 0, instead of "<anonymous>" or the like. */
21692 if (DECL_NAME (TYPE_NAME (type))
21693 && !DECL_NAMELESS (TYPE_NAME (type)))
21694 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21695 }
21696
21697 /* Now get the name as a string, or invent one. */
21698 if (!name && t != 0)
21699 name = IDENTIFIER_POINTER (t);
21700 }
21701
21702 return (name == 0 || *name == '\0') ? 0 : name;
21703 }
21704
21705 /* Return the type associated with a data member, make a special check
21706 for bit field types. */
21707
21708 static inline tree
member_declared_type(const_tree member)21709 member_declared_type (const_tree member)
21710 {
21711 return (DECL_BIT_FIELD_TYPE (member)
21712 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21713 }
21714
21715 /* Get the decl's label, as described by its RTL. This may be different
21716 from the DECL_NAME name used in the source file. */
21717
21718 #if 0
21719 static const char *
21720 decl_start_label (tree decl)
21721 {
21722 rtx x;
21723 const char *fnname;
21724
21725 x = DECL_RTL (decl);
21726 gcc_assert (MEM_P (x));
21727
21728 x = XEXP (x, 0);
21729 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21730
21731 fnname = XSTR (x, 0);
21732 return fnname;
21733 }
21734 #endif
21735
21736 /* For variable-length arrays that have been previously generated, but
21737 may be incomplete due to missing subscript info, fill the subscript
21738 info. Return TRUE if this is one of those cases. */
21739 static bool
fill_variable_array_bounds(tree type)21740 fill_variable_array_bounds (tree type)
21741 {
21742 if (TREE_ASM_WRITTEN (type)
21743 && TREE_CODE (type) == ARRAY_TYPE
21744 && variably_modified_type_p (type, NULL))
21745 {
21746 dw_die_ref array_die = lookup_type_die (type);
21747 if (!array_die)
21748 return false;
21749 add_subscript_info (array_die, type, !is_ada ());
21750 return true;
21751 }
21752 return false;
21753 }
21754
21755 /* These routines generate the internal representation of the DIE's for
21756 the compilation unit. Debugging information is collected by walking
21757 the declaration trees passed in from dwarf2out_decl(). */
21758
21759 static void
gen_array_type_die(tree type,dw_die_ref context_die)21760 gen_array_type_die (tree type, dw_die_ref context_die)
21761 {
21762 dw_die_ref array_die;
21763
21764 /* GNU compilers represent multidimensional array types as sequences of one
21765 dimensional array types whose element types are themselves array types.
21766 We sometimes squish that down to a single array_type DIE with multiple
21767 subscripts in the Dwarf debugging info. The draft Dwarf specification
21768 say that we are allowed to do this kind of compression in C, because
21769 there is no difference between an array of arrays and a multidimensional
21770 array. We don't do this for Ada to remain as close as possible to the
21771 actual representation, which is especially important against the language
21772 flexibilty wrt arrays of variable size. */
21773
21774 bool collapse_nested_arrays = !is_ada ();
21775
21776 if (fill_variable_array_bounds (type))
21777 return;
21778
21779 dw_die_ref scope_die = scope_die_for (type, context_die);
21780 tree element_type;
21781
21782 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21783 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21784 if (TYPE_STRING_FLAG (type)
21785 && TREE_CODE (type) == ARRAY_TYPE
21786 && is_fortran ()
21787 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21788 {
21789 HOST_WIDE_INT size;
21790
21791 array_die = new_die (DW_TAG_string_type, scope_die, type);
21792 add_name_attribute (array_die, type_tag (type));
21793 equate_type_number_to_die (type, array_die);
21794 size = int_size_in_bytes (type);
21795 if (size >= 0)
21796 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21797 /* ??? We can't annotate types late, but for LTO we may not
21798 generate a location early either (gfortran.dg/save_6.f90). */
21799 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21800 && TYPE_DOMAIN (type) != NULL_TREE
21801 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21802 {
21803 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21804 tree rszdecl = szdecl;
21805
21806 size = int_size_in_bytes (TREE_TYPE (szdecl));
21807 if (!DECL_P (szdecl))
21808 {
21809 if (TREE_CODE (szdecl) == INDIRECT_REF
21810 && DECL_P (TREE_OPERAND (szdecl, 0)))
21811 {
21812 rszdecl = TREE_OPERAND (szdecl, 0);
21813 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21814 != DWARF2_ADDR_SIZE)
21815 size = 0;
21816 }
21817 else
21818 size = 0;
21819 }
21820 if (size > 0)
21821 {
21822 dw_loc_list_ref loc
21823 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21824 NULL);
21825 if (loc)
21826 {
21827 add_AT_location_description (array_die, DW_AT_string_length,
21828 loc);
21829 if (size != DWARF2_ADDR_SIZE)
21830 add_AT_unsigned (array_die, dwarf_version >= 5
21831 ? DW_AT_string_length_byte_size
21832 : DW_AT_byte_size, size);
21833 }
21834 }
21835 }
21836 return;
21837 }
21838
21839 array_die = new_die (DW_TAG_array_type, scope_die, type);
21840 add_name_attribute (array_die, type_tag (type));
21841 equate_type_number_to_die (type, array_die);
21842
21843 if (TREE_CODE (type) == VECTOR_TYPE)
21844 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21845
21846 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21847 if (is_fortran ()
21848 && TREE_CODE (type) == ARRAY_TYPE
21849 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21850 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21851 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21852
21853 #if 0
21854 /* We default the array ordering. Debuggers will probably do the right
21855 things even if DW_AT_ordering is not present. It's not even an issue
21856 until we start to get into multidimensional arrays anyway. If a debugger
21857 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21858 then we'll have to put the DW_AT_ordering attribute back in. (But if
21859 and when we find out that we need to put these in, we will only do so
21860 for multidimensional arrays. */
21861 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21862 #endif
21863
21864 if (TREE_CODE (type) == VECTOR_TYPE)
21865 {
21866 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21867 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21868 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21869 add_bound_info (subrange_die, DW_AT_upper_bound,
21870 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21871 }
21872 else
21873 add_subscript_info (array_die, type, collapse_nested_arrays);
21874
21875 /* Add representation of the type of the elements of this array type and
21876 emit the corresponding DIE if we haven't done it already. */
21877 element_type = TREE_TYPE (type);
21878 if (collapse_nested_arrays)
21879 while (TREE_CODE (element_type) == ARRAY_TYPE)
21880 {
21881 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21882 break;
21883 element_type = TREE_TYPE (element_type);
21884 }
21885
21886 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21887 TREE_CODE (type) == ARRAY_TYPE
21888 && TYPE_REVERSE_STORAGE_ORDER (type),
21889 context_die);
21890
21891 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21892 if (TYPE_ARTIFICIAL (type))
21893 add_AT_flag (array_die, DW_AT_artificial, 1);
21894
21895 if (get_AT (array_die, DW_AT_name))
21896 add_pubtype (type, array_die);
21897
21898 add_alignment_attribute (array_die, type);
21899 }
21900
21901 /* This routine generates DIE for array with hidden descriptor, details
21902 are filled into *info by a langhook. */
21903
21904 static void
gen_descr_array_type_die(tree type,struct array_descr_info * info,dw_die_ref context_die)21905 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21906 dw_die_ref context_die)
21907 {
21908 const dw_die_ref scope_die = scope_die_for (type, context_die);
21909 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21910 struct loc_descr_context context = { type, info->base_decl, NULL,
21911 false, false };
21912 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21913 int dim;
21914
21915 add_name_attribute (array_die, type_tag (type));
21916 equate_type_number_to_die (type, array_die);
21917
21918 if (info->ndimensions > 1)
21919 switch (info->ordering)
21920 {
21921 case array_descr_ordering_row_major:
21922 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21923 break;
21924 case array_descr_ordering_column_major:
21925 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21926 break;
21927 default:
21928 break;
21929 }
21930
21931 if (dwarf_version >= 3 || !dwarf_strict)
21932 {
21933 if (info->data_location)
21934 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21935 dw_scalar_form_exprloc, &context);
21936 if (info->associated)
21937 add_scalar_info (array_die, DW_AT_associated, info->associated,
21938 dw_scalar_form_constant
21939 | dw_scalar_form_exprloc
21940 | dw_scalar_form_reference, &context);
21941 if (info->allocated)
21942 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21943 dw_scalar_form_constant
21944 | dw_scalar_form_exprloc
21945 | dw_scalar_form_reference, &context);
21946 if (info->stride)
21947 {
21948 const enum dwarf_attribute attr
21949 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21950 const int forms
21951 = (info->stride_in_bits)
21952 ? dw_scalar_form_constant
21953 : (dw_scalar_form_constant
21954 | dw_scalar_form_exprloc
21955 | dw_scalar_form_reference);
21956
21957 add_scalar_info (array_die, attr, info->stride, forms, &context);
21958 }
21959 }
21960 if (dwarf_version >= 5)
21961 {
21962 if (info->rank)
21963 {
21964 add_scalar_info (array_die, DW_AT_rank, info->rank,
21965 dw_scalar_form_constant
21966 | dw_scalar_form_exprloc, &context);
21967 subrange_tag = DW_TAG_generic_subrange;
21968 context.placeholder_arg = true;
21969 }
21970 }
21971
21972 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21973
21974 for (dim = 0; dim < info->ndimensions; dim++)
21975 {
21976 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21977
21978 if (info->dimen[dim].bounds_type)
21979 add_type_attribute (subrange_die,
21980 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21981 false, context_die);
21982 if (info->dimen[dim].lower_bound)
21983 add_bound_info (subrange_die, DW_AT_lower_bound,
21984 info->dimen[dim].lower_bound, &context);
21985 if (info->dimen[dim].upper_bound)
21986 add_bound_info (subrange_die, DW_AT_upper_bound,
21987 info->dimen[dim].upper_bound, &context);
21988 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21989 add_scalar_info (subrange_die, DW_AT_byte_stride,
21990 info->dimen[dim].stride,
21991 dw_scalar_form_constant
21992 | dw_scalar_form_exprloc
21993 | dw_scalar_form_reference,
21994 &context);
21995 }
21996
21997 gen_type_die (info->element_type, context_die);
21998 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21999 TREE_CODE (type) == ARRAY_TYPE
22000 && TYPE_REVERSE_STORAGE_ORDER (type),
22001 context_die);
22002
22003 if (get_AT (array_die, DW_AT_name))
22004 add_pubtype (type, array_die);
22005
22006 add_alignment_attribute (array_die, type);
22007 }
22008
22009 #if 0
22010 static void
22011 gen_entry_point_die (tree decl, dw_die_ref context_die)
22012 {
22013 tree origin = decl_ultimate_origin (decl);
22014 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22015
22016 if (origin != NULL)
22017 add_abstract_origin_attribute (decl_die, origin);
22018 else
22019 {
22020 add_name_and_src_coords_attributes (decl_die, decl);
22021 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22022 TYPE_UNQUALIFIED, false, context_die);
22023 }
22024
22025 if (DECL_ABSTRACT_P (decl))
22026 equate_decl_number_to_die (decl, decl_die);
22027 else
22028 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22029 }
22030 #endif
22031
22032 /* Walk through the list of incomplete types again, trying once more to
22033 emit full debugging info for them. */
22034
22035 static void
retry_incomplete_types(void)22036 retry_incomplete_types (void)
22037 {
22038 set_early_dwarf s;
22039 int i;
22040
22041 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22042 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22043 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22044 vec_safe_truncate (incomplete_types, 0);
22045 }
22046
22047 /* Determine what tag to use for a record type. */
22048
22049 static enum dwarf_tag
record_type_tag(tree type)22050 record_type_tag (tree type)
22051 {
22052 if (! lang_hooks.types.classify_record)
22053 return DW_TAG_structure_type;
22054
22055 switch (lang_hooks.types.classify_record (type))
22056 {
22057 case RECORD_IS_STRUCT:
22058 return DW_TAG_structure_type;
22059
22060 case RECORD_IS_CLASS:
22061 return DW_TAG_class_type;
22062
22063 case RECORD_IS_INTERFACE:
22064 if (dwarf_version >= 3 || !dwarf_strict)
22065 return DW_TAG_interface_type;
22066 return DW_TAG_structure_type;
22067
22068 default:
22069 gcc_unreachable ();
22070 }
22071 }
22072
22073 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22074 include all of the information about the enumeration values also. Each
22075 enumerated type name/value is listed as a child of the enumerated type
22076 DIE. */
22077
22078 static dw_die_ref
gen_enumeration_type_die(tree type,dw_die_ref context_die)22079 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22080 {
22081 dw_die_ref type_die = lookup_type_die (type);
22082 dw_die_ref orig_type_die = type_die;
22083
22084 if (type_die == NULL)
22085 {
22086 type_die = new_die (DW_TAG_enumeration_type,
22087 scope_die_for (type, context_die), type);
22088 equate_type_number_to_die (type, type_die);
22089 add_name_attribute (type_die, type_tag (type));
22090 if ((dwarf_version >= 4 || !dwarf_strict)
22091 && ENUM_IS_SCOPED (type))
22092 add_AT_flag (type_die, DW_AT_enum_class, 1);
22093 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22094 add_AT_flag (type_die, DW_AT_declaration, 1);
22095 if (!dwarf_strict)
22096 add_AT_unsigned (type_die, DW_AT_encoding,
22097 TYPE_UNSIGNED (type)
22098 ? DW_ATE_unsigned
22099 : DW_ATE_signed);
22100 }
22101 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22102 return type_die;
22103 else
22104 remove_AT (type_die, DW_AT_declaration);
22105
22106 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22107 given enum type is incomplete, do not generate the DW_AT_byte_size
22108 attribute or the DW_AT_element_list attribute. */
22109 if (TYPE_SIZE (type))
22110 {
22111 tree link;
22112
22113 if (!ENUM_IS_OPAQUE (type))
22114 TREE_ASM_WRITTEN (type) = 1;
22115 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22116 add_byte_size_attribute (type_die, type);
22117 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22118 add_alignment_attribute (type_die, type);
22119 if ((dwarf_version >= 3 || !dwarf_strict)
22120 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22121 {
22122 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22123 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22124 context_die);
22125 }
22126 if (TYPE_STUB_DECL (type) != NULL_TREE)
22127 {
22128 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22129 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22130 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22131 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22132 }
22133
22134 /* If the first reference to this type was as the return type of an
22135 inline function, then it may not have a parent. Fix this now. */
22136 if (type_die->die_parent == NULL)
22137 add_child_die (scope_die_for (type, context_die), type_die);
22138
22139 for (link = TYPE_VALUES (type);
22140 link != NULL; link = TREE_CHAIN (link))
22141 {
22142 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22143 tree value = TREE_VALUE (link);
22144
22145 gcc_assert (!ENUM_IS_OPAQUE (type));
22146 add_name_attribute (enum_die,
22147 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22148
22149 if (TREE_CODE (value) == CONST_DECL)
22150 value = DECL_INITIAL (value);
22151
22152 if (simple_type_size_in_bits (TREE_TYPE (value))
22153 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22154 {
22155 /* For constant forms created by add_AT_unsigned DWARF
22156 consumers (GDB, elfutils, etc.) always zero extend
22157 the value. Only when the actual value is negative
22158 do we need to use add_AT_int to generate a constant
22159 form that can represent negative values. */
22160 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22161 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22162 add_AT_unsigned (enum_die, DW_AT_const_value,
22163 (unsigned HOST_WIDE_INT) val);
22164 else
22165 add_AT_int (enum_die, DW_AT_const_value, val);
22166 }
22167 else
22168 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22169 that here. TODO: This should be re-worked to use correct
22170 signed/unsigned double tags for all cases. */
22171 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22172 }
22173
22174 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22175 if (TYPE_ARTIFICIAL (type)
22176 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22177 add_AT_flag (type_die, DW_AT_artificial, 1);
22178 }
22179 else
22180 add_AT_flag (type_die, DW_AT_declaration, 1);
22181
22182 add_pubtype (type, type_die);
22183
22184 return type_die;
22185 }
22186
22187 /* Generate a DIE to represent either a real live formal parameter decl or to
22188 represent just the type of some formal parameter position in some function
22189 type.
22190
22191 Note that this routine is a bit unusual because its argument may be a
22192 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22193 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22194 node. If it's the former then this function is being called to output a
22195 DIE to represent a formal parameter object (or some inlining thereof). If
22196 it's the latter, then this function is only being called to output a
22197 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22198 argument type of some subprogram type.
22199 If EMIT_NAME_P is true, name and source coordinate attributes
22200 are emitted. */
22201
22202 static dw_die_ref
gen_formal_parameter_die(tree node,tree origin,bool emit_name_p,dw_die_ref context_die)22203 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22204 dw_die_ref context_die)
22205 {
22206 tree node_or_origin = node ? node : origin;
22207 tree ultimate_origin;
22208 dw_die_ref parm_die = NULL;
22209
22210 if (DECL_P (node_or_origin))
22211 {
22212 parm_die = lookup_decl_die (node);
22213
22214 /* If the contexts differ, we may not be talking about the same
22215 thing.
22216 ??? When in LTO the DIE parent is the "abstract" copy and the
22217 context_die is the specification "copy". But this whole block
22218 should eventually be no longer needed. */
22219 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22220 {
22221 if (!DECL_ABSTRACT_P (node))
22222 {
22223 /* This can happen when creating an inlined instance, in
22224 which case we need to create a new DIE that will get
22225 annotated with DW_AT_abstract_origin. */
22226 parm_die = NULL;
22227 }
22228 else
22229 gcc_unreachable ();
22230 }
22231
22232 if (parm_die && parm_die->die_parent == NULL)
22233 {
22234 /* Check that parm_die already has the right attributes that
22235 we would have added below. If any attributes are
22236 missing, fall through to add them. */
22237 if (! DECL_ABSTRACT_P (node_or_origin)
22238 && !get_AT (parm_die, DW_AT_location)
22239 && !get_AT (parm_die, DW_AT_const_value))
22240 /* We are missing location info, and are about to add it. */
22241 ;
22242 else
22243 {
22244 add_child_die (context_die, parm_die);
22245 return parm_die;
22246 }
22247 }
22248 }
22249
22250 /* If we have a previously generated DIE, use it, unless this is an
22251 concrete instance (origin != NULL), in which case we need a new
22252 DIE with a corresponding DW_AT_abstract_origin. */
22253 bool reusing_die;
22254 if (parm_die && origin == NULL)
22255 reusing_die = true;
22256 else
22257 {
22258 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22259 reusing_die = false;
22260 }
22261
22262 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22263 {
22264 case tcc_declaration:
22265 ultimate_origin = decl_ultimate_origin (node_or_origin);
22266 if (node || ultimate_origin)
22267 origin = ultimate_origin;
22268
22269 if (reusing_die)
22270 goto add_location;
22271
22272 if (origin != NULL)
22273 add_abstract_origin_attribute (parm_die, origin);
22274 else if (emit_name_p)
22275 add_name_and_src_coords_attributes (parm_die, node);
22276 if (origin == NULL
22277 || (! DECL_ABSTRACT_P (node_or_origin)
22278 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22279 decl_function_context
22280 (node_or_origin))))
22281 {
22282 tree type = TREE_TYPE (node_or_origin);
22283 if (decl_by_reference_p (node_or_origin))
22284 add_type_attribute (parm_die, TREE_TYPE (type),
22285 TYPE_UNQUALIFIED,
22286 false, context_die);
22287 else
22288 add_type_attribute (parm_die, type,
22289 decl_quals (node_or_origin),
22290 false, context_die);
22291 }
22292 if (origin == NULL && DECL_ARTIFICIAL (node))
22293 add_AT_flag (parm_die, DW_AT_artificial, 1);
22294 add_location:
22295 if (node && node != origin)
22296 equate_decl_number_to_die (node, parm_die);
22297 if (! DECL_ABSTRACT_P (node_or_origin))
22298 add_location_or_const_value_attribute (parm_die, node_or_origin,
22299 node == NULL);
22300
22301 break;
22302
22303 case tcc_type:
22304 /* We were called with some kind of a ..._TYPE node. */
22305 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22306 context_die);
22307 break;
22308
22309 default:
22310 gcc_unreachable ();
22311 }
22312
22313 return parm_die;
22314 }
22315
22316 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22317 children DW_TAG_formal_parameter DIEs representing the arguments of the
22318 parameter pack.
22319
22320 PARM_PACK must be a function parameter pack.
22321 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22322 must point to the subsequent arguments of the function PACK_ARG belongs to.
22323 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22324 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22325 following the last one for which a DIE was generated. */
22326
22327 static dw_die_ref
gen_formal_parameter_pack_die(tree parm_pack,tree pack_arg,dw_die_ref subr_die,tree * next_arg)22328 gen_formal_parameter_pack_die (tree parm_pack,
22329 tree pack_arg,
22330 dw_die_ref subr_die,
22331 tree *next_arg)
22332 {
22333 tree arg;
22334 dw_die_ref parm_pack_die;
22335
22336 gcc_assert (parm_pack
22337 && lang_hooks.function_parameter_pack_p (parm_pack)
22338 && subr_die);
22339
22340 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22341 add_src_coords_attributes (parm_pack_die, parm_pack);
22342
22343 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22344 {
22345 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22346 parm_pack))
22347 break;
22348 gen_formal_parameter_die (arg, NULL,
22349 false /* Don't emit name attribute. */,
22350 parm_pack_die);
22351 }
22352 if (next_arg)
22353 *next_arg = arg;
22354 return parm_pack_die;
22355 }
22356
22357 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22358 at the end of an (ANSI prototyped) formal parameters list. */
22359
22360 static void
gen_unspecified_parameters_die(tree decl_or_type,dw_die_ref context_die)22361 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22362 {
22363 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22364 }
22365
22366 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22367 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22368 parameters as specified in some function type specification (except for
22369 those which appear as part of a function *definition*). */
22370
22371 static void
gen_formal_types_die(tree function_or_method_type,dw_die_ref context_die)22372 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22373 {
22374 tree link;
22375 tree formal_type = NULL;
22376 tree first_parm_type;
22377 tree arg;
22378
22379 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22380 {
22381 arg = DECL_ARGUMENTS (function_or_method_type);
22382 function_or_method_type = TREE_TYPE (function_or_method_type);
22383 }
22384 else
22385 arg = NULL_TREE;
22386
22387 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22388
22389 /* Make our first pass over the list of formal parameter types and output a
22390 DW_TAG_formal_parameter DIE for each one. */
22391 for (link = first_parm_type; link; )
22392 {
22393 dw_die_ref parm_die;
22394
22395 formal_type = TREE_VALUE (link);
22396 if (formal_type == void_type_node)
22397 break;
22398
22399 /* Output a (nameless) DIE to represent the formal parameter itself. */
22400 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22401 {
22402 parm_die = gen_formal_parameter_die (formal_type, NULL,
22403 true /* Emit name attribute. */,
22404 context_die);
22405 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22406 && link == first_parm_type)
22407 {
22408 add_AT_flag (parm_die, DW_AT_artificial, 1);
22409 if (dwarf_version >= 3 || !dwarf_strict)
22410 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22411 }
22412 else if (arg && DECL_ARTIFICIAL (arg))
22413 add_AT_flag (parm_die, DW_AT_artificial, 1);
22414 }
22415
22416 link = TREE_CHAIN (link);
22417 if (arg)
22418 arg = DECL_CHAIN (arg);
22419 }
22420
22421 /* If this function type has an ellipsis, add a
22422 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22423 if (formal_type != void_type_node)
22424 gen_unspecified_parameters_die (function_or_method_type, context_die);
22425
22426 /* Make our second (and final) pass over the list of formal parameter types
22427 and output DIEs to represent those types (as necessary). */
22428 for (link = TYPE_ARG_TYPES (function_or_method_type);
22429 link && TREE_VALUE (link);
22430 link = TREE_CHAIN (link))
22431 gen_type_die (TREE_VALUE (link), context_die);
22432 }
22433
22434 /* We want to generate the DIE for TYPE so that we can generate the
22435 die for MEMBER, which has been defined; we will need to refer back
22436 to the member declaration nested within TYPE. If we're trying to
22437 generate minimal debug info for TYPE, processing TYPE won't do the
22438 trick; we need to attach the member declaration by hand. */
22439
22440 static void
gen_type_die_for_member(tree type,tree member,dw_die_ref context_die)22441 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22442 {
22443 gen_type_die (type, context_die);
22444
22445 /* If we're trying to avoid duplicate debug info, we may not have
22446 emitted the member decl for this function. Emit it now. */
22447 if (TYPE_STUB_DECL (type)
22448 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22449 && ! lookup_decl_die (member))
22450 {
22451 dw_die_ref type_die;
22452 gcc_assert (!decl_ultimate_origin (member));
22453
22454 push_decl_scope (type);
22455 type_die = lookup_type_die_strip_naming_typedef (type);
22456 if (TREE_CODE (member) == FUNCTION_DECL)
22457 gen_subprogram_die (member, type_die);
22458 else if (TREE_CODE (member) == FIELD_DECL)
22459 {
22460 /* Ignore the nameless fields that are used to skip bits but handle
22461 C++ anonymous unions and structs. */
22462 if (DECL_NAME (member) != NULL_TREE
22463 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22464 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22465 {
22466 struct vlr_context vlr_ctx = {
22467 DECL_CONTEXT (member), /* struct_type */
22468 NULL_TREE /* variant_part_offset */
22469 };
22470 gen_type_die (member_declared_type (member), type_die);
22471 gen_field_die (member, &vlr_ctx, type_die);
22472 }
22473 }
22474 else
22475 gen_variable_die (member, NULL_TREE, type_die);
22476
22477 pop_decl_scope ();
22478 }
22479 }
22480
22481 /* Forward declare these functions, because they are mutually recursive
22482 with their set_block_* pairing functions. */
22483 static void set_decl_origin_self (tree);
22484
22485 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22486 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22487 that it points to the node itself, thus indicating that the node is its
22488 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22489 the given node is NULL, recursively descend the decl/block tree which
22490 it is the root of, and for each other ..._DECL or BLOCK node contained
22491 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22492 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22493 values to point to themselves. */
22494
22495 static void
set_block_origin_self(tree stmt)22496 set_block_origin_self (tree stmt)
22497 {
22498 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22499 {
22500 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22501
22502 {
22503 tree local_decl;
22504
22505 for (local_decl = BLOCK_VARS (stmt);
22506 local_decl != NULL_TREE;
22507 local_decl = DECL_CHAIN (local_decl))
22508 /* Do not recurse on nested functions since the inlining status
22509 of parent and child can be different as per the DWARF spec. */
22510 if (TREE_CODE (local_decl) != FUNCTION_DECL
22511 && !DECL_EXTERNAL (local_decl))
22512 set_decl_origin_self (local_decl);
22513 }
22514
22515 {
22516 tree subblock;
22517
22518 for (subblock = BLOCK_SUBBLOCKS (stmt);
22519 subblock != NULL_TREE;
22520 subblock = BLOCK_CHAIN (subblock))
22521 set_block_origin_self (subblock); /* Recurse. */
22522 }
22523 }
22524 }
22525
22526 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22527 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22528 node to so that it points to the node itself, thus indicating that the
22529 node represents its own (abstract) origin. Additionally, if the
22530 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22531 the decl/block tree of which the given node is the root of, and for
22532 each other ..._DECL or BLOCK node contained therein whose
22533 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22534 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22535 point to themselves. */
22536
22537 static void
set_decl_origin_self(tree decl)22538 set_decl_origin_self (tree decl)
22539 {
22540 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22541 {
22542 DECL_ABSTRACT_ORIGIN (decl) = decl;
22543 if (TREE_CODE (decl) == FUNCTION_DECL)
22544 {
22545 tree arg;
22546
22547 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22548 DECL_ABSTRACT_ORIGIN (arg) = arg;
22549 if (DECL_INITIAL (decl) != NULL_TREE
22550 && DECL_INITIAL (decl) != error_mark_node)
22551 set_block_origin_self (DECL_INITIAL (decl));
22552 }
22553 }
22554 }
22555
22556 /* Mark the early DIE for DECL as the abstract instance. */
22557
22558 static void
dwarf2out_abstract_function(tree decl)22559 dwarf2out_abstract_function (tree decl)
22560 {
22561 dw_die_ref old_die;
22562
22563 /* Make sure we have the actual abstract inline, not a clone. */
22564 decl = DECL_ORIGIN (decl);
22565
22566 if (DECL_IGNORED_P (decl))
22567 return;
22568
22569 old_die = lookup_decl_die (decl);
22570 /* With early debug we always have an old DIE unless we are in LTO
22571 and the user did not compile but only link with debug. */
22572 if (in_lto_p && ! old_die)
22573 return;
22574 gcc_assert (old_die != NULL);
22575 if (get_AT (old_die, DW_AT_inline)
22576 || get_AT (old_die, DW_AT_abstract_origin))
22577 /* We've already generated the abstract instance. */
22578 return;
22579
22580 /* Go ahead and put DW_AT_inline on the DIE. */
22581 if (DECL_DECLARED_INLINE_P (decl))
22582 {
22583 if (cgraph_function_possibly_inlined_p (decl))
22584 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22585 else
22586 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22587 }
22588 else
22589 {
22590 if (cgraph_function_possibly_inlined_p (decl))
22591 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22592 else
22593 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22594 }
22595
22596 if (DECL_DECLARED_INLINE_P (decl)
22597 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22598 add_AT_flag (old_die, DW_AT_artificial, 1);
22599
22600 set_decl_origin_self (decl);
22601 }
22602
22603 /* Helper function of premark_used_types() which gets called through
22604 htab_traverse.
22605
22606 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22607 marked as unused by prune_unused_types. */
22608
22609 bool
premark_used_types_helper(tree const & type,void *)22610 premark_used_types_helper (tree const &type, void *)
22611 {
22612 dw_die_ref die;
22613
22614 die = lookup_type_die (type);
22615 if (die != NULL)
22616 die->die_perennial_p = 1;
22617 return true;
22618 }
22619
22620 /* Helper function of premark_types_used_by_global_vars which gets called
22621 through htab_traverse.
22622
22623 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22624 marked as unused by prune_unused_types. The DIE of the type is marked
22625 only if the global variable using the type will actually be emitted. */
22626
22627 int
premark_types_used_by_global_vars_helper(types_used_by_vars_entry ** slot,void *)22628 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22629 void *)
22630 {
22631 struct types_used_by_vars_entry *entry;
22632 dw_die_ref die;
22633
22634 entry = (struct types_used_by_vars_entry *) *slot;
22635 gcc_assert (entry->type != NULL
22636 && entry->var_decl != NULL);
22637 die = lookup_type_die (entry->type);
22638 if (die)
22639 {
22640 /* Ask cgraph if the global variable really is to be emitted.
22641 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22642 varpool_node *node = varpool_node::get (entry->var_decl);
22643 if (node && node->definition)
22644 {
22645 die->die_perennial_p = 1;
22646 /* Keep the parent DIEs as well. */
22647 while ((die = die->die_parent) && die->die_perennial_p == 0)
22648 die->die_perennial_p = 1;
22649 }
22650 }
22651 return 1;
22652 }
22653
22654 /* Mark all members of used_types_hash as perennial. */
22655
22656 static void
premark_used_types(struct function * fun)22657 premark_used_types (struct function *fun)
22658 {
22659 if (fun && fun->used_types_hash)
22660 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22661 }
22662
22663 /* Mark all members of types_used_by_vars_entry as perennial. */
22664
22665 static void
premark_types_used_by_global_vars(void)22666 premark_types_used_by_global_vars (void)
22667 {
22668 if (types_used_by_vars_hash)
22669 types_used_by_vars_hash
22670 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22671 }
22672
22673 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22674 for CA_LOC call arg loc node. */
22675
22676 static dw_die_ref
gen_call_site_die(tree decl,dw_die_ref subr_die,struct call_arg_loc_node * ca_loc)22677 gen_call_site_die (tree decl, dw_die_ref subr_die,
22678 struct call_arg_loc_node *ca_loc)
22679 {
22680 dw_die_ref stmt_die = NULL, die;
22681 tree block = ca_loc->block;
22682
22683 while (block
22684 && block != DECL_INITIAL (decl)
22685 && TREE_CODE (block) == BLOCK)
22686 {
22687 stmt_die = BLOCK_DIE (block);
22688 if (stmt_die)
22689 break;
22690 block = BLOCK_SUPERCONTEXT (block);
22691 }
22692 if (stmt_die == NULL)
22693 stmt_die = subr_die;
22694 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22695 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22696 if (ca_loc->tail_call_p)
22697 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22698 if (ca_loc->symbol_ref)
22699 {
22700 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22701 if (tdie)
22702 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22703 else
22704 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22705 false);
22706 }
22707 return die;
22708 }
22709
22710 /* Generate a DIE to represent a declared function (either file-scope or
22711 block-local). */
22712
22713 static void
gen_subprogram_die(tree decl,dw_die_ref context_die)22714 gen_subprogram_die (tree decl, dw_die_ref context_die)
22715 {
22716 tree origin = decl_ultimate_origin (decl);
22717 dw_die_ref subr_die;
22718 dw_die_ref old_die = lookup_decl_die (decl);
22719
22720 /* This function gets called multiple times for different stages of
22721 the debug process. For example, for func() in this code:
22722
22723 namespace S
22724 {
22725 void func() { ... }
22726 }
22727
22728 ...we get called 4 times. Twice in early debug and twice in
22729 late debug:
22730
22731 Early debug
22732 -----------
22733
22734 1. Once while generating func() within the namespace. This is
22735 the declaration. The declaration bit below is set, as the
22736 context is the namespace.
22737
22738 A new DIE will be generated with DW_AT_declaration set.
22739
22740 2. Once for func() itself. This is the specification. The
22741 declaration bit below is clear as the context is the CU.
22742
22743 We will use the cached DIE from (1) to create a new DIE with
22744 DW_AT_specification pointing to the declaration in (1).
22745
22746 Late debug via rest_of_handle_final()
22747 -------------------------------------
22748
22749 3. Once generating func() within the namespace. This is also the
22750 declaration, as in (1), but this time we will early exit below
22751 as we have a cached DIE and a declaration needs no additional
22752 annotations (no locations), as the source declaration line
22753 info is enough.
22754
22755 4. Once for func() itself. As in (2), this is the specification,
22756 but this time we will re-use the cached DIE, and just annotate
22757 it with the location information that should now be available.
22758
22759 For something without namespaces, but with abstract instances, we
22760 are also called a multiple times:
22761
22762 class Base
22763 {
22764 public:
22765 Base (); // constructor declaration (1)
22766 };
22767
22768 Base::Base () { } // constructor specification (2)
22769
22770 Early debug
22771 -----------
22772
22773 1. Once for the Base() constructor by virtue of it being a
22774 member of the Base class. This is done via
22775 rest_of_type_compilation.
22776
22777 This is a declaration, so a new DIE will be created with
22778 DW_AT_declaration.
22779
22780 2. Once for the Base() constructor definition, but this time
22781 while generating the abstract instance of the base
22782 constructor (__base_ctor) which is being generated via early
22783 debug of reachable functions.
22784
22785 Even though we have a cached version of the declaration (1),
22786 we will create a DW_AT_specification of the declaration DIE
22787 in (1).
22788
22789 3. Once for the __base_ctor itself, but this time, we generate
22790 an DW_AT_abstract_origin version of the DW_AT_specification in
22791 (2).
22792
22793 Late debug via rest_of_handle_final
22794 -----------------------------------
22795
22796 4. One final time for the __base_ctor (which will have a cached
22797 DIE with DW_AT_abstract_origin created in (3). This time,
22798 we will just annotate the location information now
22799 available.
22800 */
22801 int declaration = (current_function_decl != decl
22802 || class_or_namespace_scope_p (context_die));
22803
22804 /* A declaration that has been previously dumped needs no
22805 additional information. */
22806 if (old_die && declaration)
22807 return;
22808
22809 /* Now that the C++ front end lazily declares artificial member fns, we
22810 might need to retrofit the declaration into its class. */
22811 if (!declaration && !origin && !old_die
22812 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22813 && !class_or_namespace_scope_p (context_die)
22814 && debug_info_level > DINFO_LEVEL_TERSE)
22815 old_die = force_decl_die (decl);
22816
22817 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22818 if (origin != NULL)
22819 {
22820 gcc_assert (!declaration || local_scope_p (context_die));
22821
22822 /* Fixup die_parent for the abstract instance of a nested
22823 inline function. */
22824 if (old_die && old_die->die_parent == NULL)
22825 add_child_die (context_die, old_die);
22826
22827 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22828 {
22829 /* If we have a DW_AT_abstract_origin we have a working
22830 cached version. */
22831 subr_die = old_die;
22832 }
22833 else
22834 {
22835 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22836 add_abstract_origin_attribute (subr_die, origin);
22837 /* This is where the actual code for a cloned function is.
22838 Let's emit linkage name attribute for it. This helps
22839 debuggers to e.g, set breakpoints into
22840 constructors/destructors when the user asks "break
22841 K::K". */
22842 add_linkage_name (subr_die, decl);
22843 }
22844 }
22845 /* A cached copy, possibly from early dwarf generation. Reuse as
22846 much as possible. */
22847 else if (old_die)
22848 {
22849 if (!get_AT_flag (old_die, DW_AT_declaration)
22850 /* We can have a normal definition following an inline one in the
22851 case of redefinition of GNU C extern inlines.
22852 It seems reasonable to use AT_specification in this case. */
22853 && !get_AT (old_die, DW_AT_inline))
22854 {
22855 /* Detect and ignore this case, where we are trying to output
22856 something we have already output. */
22857 if (get_AT (old_die, DW_AT_low_pc)
22858 || get_AT (old_die, DW_AT_ranges))
22859 return;
22860
22861 /* If we have no location information, this must be a
22862 partially generated DIE from early dwarf generation.
22863 Fall through and generate it. */
22864 }
22865
22866 /* If the definition comes from the same place as the declaration,
22867 maybe use the old DIE. We always want the DIE for this function
22868 that has the *_pc attributes to be under comp_unit_die so the
22869 debugger can find it. We also need to do this for abstract
22870 instances of inlines, since the spec requires the out-of-line copy
22871 to have the same parent. For local class methods, this doesn't
22872 apply; we just use the old DIE. */
22873 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22874 struct dwarf_file_data * file_index = lookup_filename (s.file);
22875 if ((is_cu_die (old_die->die_parent)
22876 /* This condition fixes the inconsistency/ICE with the
22877 following Fortran test (or some derivative thereof) while
22878 building libgfortran:
22879
22880 module some_m
22881 contains
22882 logical function funky (FLAG)
22883 funky = .true.
22884 end function
22885 end module
22886 */
22887 || (old_die->die_parent
22888 && old_die->die_parent->die_tag == DW_TAG_module)
22889 || context_die == NULL)
22890 && (DECL_ARTIFICIAL (decl)
22891 /* The location attributes may be in the abstract origin
22892 which in the case of LTO might be not available to
22893 look at. */
22894 || get_AT (old_die, DW_AT_abstract_origin)
22895 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22896 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22897 == (unsigned) s.line)
22898 && (!debug_column_info
22899 || s.column == 0
22900 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22901 == (unsigned) s.column)))))
22902 {
22903 subr_die = old_die;
22904
22905 /* Clear out the declaration attribute, but leave the
22906 parameters so they can be augmented with location
22907 information later. Unless this was a declaration, in
22908 which case, wipe out the nameless parameters and recreate
22909 them further down. */
22910 if (remove_AT (subr_die, DW_AT_declaration))
22911 {
22912
22913 remove_AT (subr_die, DW_AT_object_pointer);
22914 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22915 }
22916 }
22917 /* Make a specification pointing to the previously built
22918 declaration. */
22919 else
22920 {
22921 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22922 add_AT_specification (subr_die, old_die);
22923 add_pubname (decl, subr_die);
22924 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22925 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22926 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22927 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22928 if (debug_column_info
22929 && s.column
22930 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22931 != (unsigned) s.column))
22932 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22933
22934 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22935 emit the real type on the definition die. */
22936 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22937 {
22938 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22939 if (die == auto_die || die == decltype_auto_die)
22940 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22941 TYPE_UNQUALIFIED, false, context_die);
22942 }
22943
22944 /* When we process the method declaration, we haven't seen
22945 the out-of-class defaulted definition yet, so we have to
22946 recheck now. */
22947 if ((dwarf_version >= 5 || ! dwarf_strict)
22948 && !get_AT (subr_die, DW_AT_defaulted))
22949 {
22950 int defaulted
22951 = lang_hooks.decls.decl_dwarf_attribute (decl,
22952 DW_AT_defaulted);
22953 if (defaulted != -1)
22954 {
22955 /* Other values must have been handled before. */
22956 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22957 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22958 }
22959 }
22960 }
22961 }
22962 /* Create a fresh DIE for anything else. */
22963 else
22964 {
22965 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22966
22967 if (TREE_PUBLIC (decl))
22968 add_AT_flag (subr_die, DW_AT_external, 1);
22969
22970 add_name_and_src_coords_attributes (subr_die, decl);
22971 add_pubname (decl, subr_die);
22972 if (debug_info_level > DINFO_LEVEL_TERSE)
22973 {
22974 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22975 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22976 TYPE_UNQUALIFIED, false, context_die);
22977 }
22978
22979 add_pure_or_virtual_attribute (subr_die, decl);
22980 if (DECL_ARTIFICIAL (decl))
22981 add_AT_flag (subr_die, DW_AT_artificial, 1);
22982
22983 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22984 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22985
22986 add_alignment_attribute (subr_die, decl);
22987
22988 add_accessibility_attribute (subr_die, decl);
22989 }
22990
22991 /* Unless we have an existing non-declaration DIE, equate the new
22992 DIE. */
22993 if (!old_die || is_declaration_die (old_die))
22994 equate_decl_number_to_die (decl, subr_die);
22995
22996 if (declaration)
22997 {
22998 if (!old_die || !get_AT (old_die, DW_AT_inline))
22999 {
23000 add_AT_flag (subr_die, DW_AT_declaration, 1);
23001
23002 /* If this is an explicit function declaration then generate
23003 a DW_AT_explicit attribute. */
23004 if ((dwarf_version >= 3 || !dwarf_strict)
23005 && lang_hooks.decls.decl_dwarf_attribute (decl,
23006 DW_AT_explicit) == 1)
23007 add_AT_flag (subr_die, DW_AT_explicit, 1);
23008
23009 /* If this is a C++11 deleted special function member then generate
23010 a DW_AT_deleted attribute. */
23011 if ((dwarf_version >= 5 || !dwarf_strict)
23012 && lang_hooks.decls.decl_dwarf_attribute (decl,
23013 DW_AT_deleted) == 1)
23014 add_AT_flag (subr_die, DW_AT_deleted, 1);
23015
23016 /* If this is a C++11 defaulted special function member then
23017 generate a DW_AT_defaulted attribute. */
23018 if (dwarf_version >= 5 || !dwarf_strict)
23019 {
23020 int defaulted
23021 = lang_hooks.decls.decl_dwarf_attribute (decl,
23022 DW_AT_defaulted);
23023 if (defaulted != -1)
23024 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23025 }
23026
23027 /* If this is a C++11 non-static member function with & ref-qualifier
23028 then generate a DW_AT_reference attribute. */
23029 if ((dwarf_version >= 5 || !dwarf_strict)
23030 && lang_hooks.decls.decl_dwarf_attribute (decl,
23031 DW_AT_reference) == 1)
23032 add_AT_flag (subr_die, DW_AT_reference, 1);
23033
23034 /* If this is a C++11 non-static member function with &&
23035 ref-qualifier then generate a DW_AT_reference attribute. */
23036 if ((dwarf_version >= 5 || !dwarf_strict)
23037 && lang_hooks.decls.decl_dwarf_attribute (decl,
23038 DW_AT_rvalue_reference)
23039 == 1)
23040 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23041 }
23042 }
23043 /* For non DECL_EXTERNALs, if range information is available, fill
23044 the DIE with it. */
23045 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23046 {
23047 HOST_WIDE_INT cfa_fb_offset;
23048
23049 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23050
23051 if (!crtl->has_bb_partition)
23052 {
23053 dw_fde_ref fde = fun->fde;
23054 if (fde->dw_fde_begin)
23055 {
23056 /* We have already generated the labels. */
23057 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23058 fde->dw_fde_end, false);
23059 }
23060 else
23061 {
23062 /* Create start/end labels and add the range. */
23063 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23064 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23065 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23066 current_function_funcdef_no);
23067 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23068 current_function_funcdef_no);
23069 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23070 false);
23071 }
23072
23073 #if VMS_DEBUGGING_INFO
23074 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23075 Section 2.3 Prologue and Epilogue Attributes:
23076 When a breakpoint is set on entry to a function, it is generally
23077 desirable for execution to be suspended, not on the very first
23078 instruction of the function, but rather at a point after the
23079 function's frame has been set up, after any language defined local
23080 declaration processing has been completed, and before execution of
23081 the first statement of the function begins. Debuggers generally
23082 cannot properly determine where this point is. Similarly for a
23083 breakpoint set on exit from a function. The prologue and epilogue
23084 attributes allow a compiler to communicate the location(s) to use. */
23085
23086 {
23087 if (fde->dw_fde_vms_end_prologue)
23088 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23089 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23090
23091 if (fde->dw_fde_vms_begin_epilogue)
23092 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23093 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23094 }
23095 #endif
23096
23097 }
23098 else
23099 {
23100 /* Generate pubnames entries for the split function code ranges. */
23101 dw_fde_ref fde = fun->fde;
23102
23103 if (fde->dw_fde_second_begin)
23104 {
23105 if (dwarf_version >= 3 || !dwarf_strict)
23106 {
23107 /* We should use ranges for non-contiguous code section
23108 addresses. Use the actual code range for the initial
23109 section, since the HOT/COLD labels might precede an
23110 alignment offset. */
23111 bool range_list_added = false;
23112 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23113 fde->dw_fde_end, &range_list_added,
23114 false);
23115 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23116 fde->dw_fde_second_end,
23117 &range_list_added, false);
23118 if (range_list_added)
23119 add_ranges (NULL);
23120 }
23121 else
23122 {
23123 /* There is no real support in DW2 for this .. so we make
23124 a work-around. First, emit the pub name for the segment
23125 containing the function label. Then make and emit a
23126 simplified subprogram DIE for the second segment with the
23127 name pre-fixed by __hot/cold_sect_of_. We use the same
23128 linkage name for the second die so that gdb will find both
23129 sections when given "b foo". */
23130 const char *name = NULL;
23131 tree decl_name = DECL_NAME (decl);
23132 dw_die_ref seg_die;
23133
23134 /* Do the 'primary' section. */
23135 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23136 fde->dw_fde_end, false);
23137
23138 /* Build a minimal DIE for the secondary section. */
23139 seg_die = new_die (DW_TAG_subprogram,
23140 subr_die->die_parent, decl);
23141
23142 if (TREE_PUBLIC (decl))
23143 add_AT_flag (seg_die, DW_AT_external, 1);
23144
23145 if (decl_name != NULL
23146 && IDENTIFIER_POINTER (decl_name) != NULL)
23147 {
23148 name = dwarf2_name (decl, 1);
23149 if (! DECL_ARTIFICIAL (decl))
23150 add_src_coords_attributes (seg_die, decl);
23151
23152 add_linkage_name (seg_die, decl);
23153 }
23154 gcc_assert (name != NULL);
23155 add_pure_or_virtual_attribute (seg_die, decl);
23156 if (DECL_ARTIFICIAL (decl))
23157 add_AT_flag (seg_die, DW_AT_artificial, 1);
23158
23159 name = concat ("__second_sect_of_", name, NULL);
23160 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23161 fde->dw_fde_second_end, false);
23162 add_name_attribute (seg_die, name);
23163 if (want_pubnames ())
23164 add_pubname_string (name, seg_die);
23165 }
23166 }
23167 else
23168 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23169 false);
23170 }
23171
23172 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23173
23174 /* We define the "frame base" as the function's CFA. This is more
23175 convenient for several reasons: (1) It's stable across the prologue
23176 and epilogue, which makes it better than just a frame pointer,
23177 (2) With dwarf3, there exists a one-byte encoding that allows us
23178 to reference the .debug_frame data by proxy, but failing that,
23179 (3) We can at least reuse the code inspection and interpretation
23180 code that determines the CFA position at various points in the
23181 function. */
23182 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23183 {
23184 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23185 add_AT_loc (subr_die, DW_AT_frame_base, op);
23186 }
23187 else
23188 {
23189 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23190 if (list->dw_loc_next)
23191 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23192 else
23193 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23194 }
23195
23196 /* Compute a displacement from the "steady-state frame pointer" to
23197 the CFA. The former is what all stack slots and argument slots
23198 will reference in the rtl; the latter is what we've told the
23199 debugger about. We'll need to adjust all frame_base references
23200 by this displacement. */
23201 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23202
23203 if (fun->static_chain_decl)
23204 {
23205 /* DWARF requires here a location expression that computes the
23206 address of the enclosing subprogram's frame base. The machinery
23207 in tree-nested.c is supposed to store this specific address in the
23208 last field of the FRAME record. */
23209 const tree frame_type
23210 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23211 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23212
23213 tree fb_expr
23214 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23215 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23216 fb_expr, fb_decl, NULL_TREE);
23217
23218 add_AT_location_description (subr_die, DW_AT_static_link,
23219 loc_list_from_tree (fb_expr, 0, NULL));
23220 }
23221
23222 resolve_variable_values ();
23223 }
23224
23225 /* Generate child dies for template paramaters. */
23226 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23227 gen_generic_params_dies (decl);
23228
23229 /* Now output descriptions of the arguments for this function. This gets
23230 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23231 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23232 `...' at the end of the formal parameter list. In order to find out if
23233 there was a trailing ellipsis or not, we must instead look at the type
23234 associated with the FUNCTION_DECL. This will be a node of type
23235 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23236 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23237 an ellipsis at the end. */
23238
23239 /* In the case where we are describing a mere function declaration, all we
23240 need to do here (and all we *can* do here) is to describe the *types* of
23241 its formal parameters. */
23242 if (debug_info_level <= DINFO_LEVEL_TERSE)
23243 ;
23244 else if (declaration)
23245 gen_formal_types_die (decl, subr_die);
23246 else
23247 {
23248 /* Generate DIEs to represent all known formal parameters. */
23249 tree parm = DECL_ARGUMENTS (decl);
23250 tree generic_decl = early_dwarf
23251 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23252 tree generic_decl_parm = generic_decl
23253 ? DECL_ARGUMENTS (generic_decl)
23254 : NULL;
23255
23256 /* Now we want to walk the list of parameters of the function and
23257 emit their relevant DIEs.
23258
23259 We consider the case of DECL being an instance of a generic function
23260 as well as it being a normal function.
23261
23262 If DECL is an instance of a generic function we walk the
23263 parameters of the generic function declaration _and_ the parameters of
23264 DECL itself. This is useful because we want to emit specific DIEs for
23265 function parameter packs and those are declared as part of the
23266 generic function declaration. In that particular case,
23267 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23268 That DIE has children DIEs representing the set of arguments
23269 of the pack. Note that the set of pack arguments can be empty.
23270 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23271 children DIE.
23272
23273 Otherwise, we just consider the parameters of DECL. */
23274 while (generic_decl_parm || parm)
23275 {
23276 if (generic_decl_parm
23277 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23278 gen_formal_parameter_pack_die (generic_decl_parm,
23279 parm, subr_die,
23280 &parm);
23281 else if (parm && !POINTER_BOUNDS_P (parm))
23282 {
23283 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23284
23285 if (early_dwarf
23286 && parm == DECL_ARGUMENTS (decl)
23287 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23288 && parm_die
23289 && (dwarf_version >= 3 || !dwarf_strict))
23290 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23291
23292 parm = DECL_CHAIN (parm);
23293 }
23294 else if (parm)
23295 parm = DECL_CHAIN (parm);
23296
23297 if (generic_decl_parm)
23298 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23299 }
23300
23301 /* Decide whether we need an unspecified_parameters DIE at the end.
23302 There are 2 more cases to do this for: 1) the ansi ... declaration -
23303 this is detectable when the end of the arg list is not a
23304 void_type_node 2) an unprototyped function declaration (not a
23305 definition). This just means that we have no info about the
23306 parameters at all. */
23307 if (early_dwarf)
23308 {
23309 if (prototype_p (TREE_TYPE (decl)))
23310 {
23311 /* This is the prototyped case, check for.... */
23312 if (stdarg_p (TREE_TYPE (decl)))
23313 gen_unspecified_parameters_die (decl, subr_die);
23314 }
23315 else if (DECL_INITIAL (decl) == NULL_TREE)
23316 gen_unspecified_parameters_die (decl, subr_die);
23317 }
23318 }
23319
23320 if (subr_die != old_die)
23321 /* Add the calling convention attribute if requested. */
23322 add_calling_convention_attribute (subr_die, decl);
23323
23324 /* Output Dwarf info for all of the stuff within the body of the function
23325 (if it has one - it may be just a declaration).
23326
23327 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23328 a function. This BLOCK actually represents the outermost binding contour
23329 for the function, i.e. the contour in which the function's formal
23330 parameters and labels get declared. Curiously, it appears that the front
23331 end doesn't actually put the PARM_DECL nodes for the current function onto
23332 the BLOCK_VARS list for this outer scope, but are strung off of the
23333 DECL_ARGUMENTS list for the function instead.
23334
23335 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23336 the LABEL_DECL nodes for the function however, and we output DWARF info
23337 for those in decls_for_scope. Just within the `outer_scope' there will be
23338 a BLOCK node representing the function's outermost pair of curly braces,
23339 and any blocks used for the base and member initializers of a C++
23340 constructor function. */
23341 tree outer_scope = DECL_INITIAL (decl);
23342 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23343 {
23344 int call_site_note_count = 0;
23345 int tail_call_site_note_count = 0;
23346
23347 /* Emit a DW_TAG_variable DIE for a named return value. */
23348 if (DECL_NAME (DECL_RESULT (decl)))
23349 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23350
23351 /* The first time through decls_for_scope we will generate the
23352 DIEs for the locals. The second time, we fill in the
23353 location info. */
23354 decls_for_scope (outer_scope, subr_die);
23355
23356 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23357 {
23358 struct call_arg_loc_node *ca_loc;
23359 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23360 {
23361 dw_die_ref die = NULL;
23362 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23363 rtx arg, next_arg;
23364
23365 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23366 ? XEXP (ca_loc->call_arg_loc_note, 0)
23367 : NULL_RTX);
23368 arg; arg = next_arg)
23369 {
23370 dw_loc_descr_ref reg, val;
23371 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23372 dw_die_ref cdie, tdie = NULL;
23373
23374 next_arg = XEXP (arg, 1);
23375 if (REG_P (XEXP (XEXP (arg, 0), 0))
23376 && next_arg
23377 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23378 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23379 && REGNO (XEXP (XEXP (arg, 0), 0))
23380 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23381 next_arg = XEXP (next_arg, 1);
23382 if (mode == VOIDmode)
23383 {
23384 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23385 if (mode == VOIDmode)
23386 mode = GET_MODE (XEXP (arg, 0));
23387 }
23388 if (mode == VOIDmode || mode == BLKmode)
23389 continue;
23390 /* Get dynamic information about call target only if we
23391 have no static information: we cannot generate both
23392 DW_AT_call_origin and DW_AT_call_target
23393 attributes. */
23394 if (ca_loc->symbol_ref == NULL_RTX)
23395 {
23396 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23397 {
23398 tloc = XEXP (XEXP (arg, 0), 1);
23399 continue;
23400 }
23401 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23402 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23403 {
23404 tlocc = XEXP (XEXP (arg, 0), 1);
23405 continue;
23406 }
23407 }
23408 reg = NULL;
23409 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23410 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23411 VAR_INIT_STATUS_INITIALIZED);
23412 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23413 {
23414 rtx mem = XEXP (XEXP (arg, 0), 0);
23415 reg = mem_loc_descriptor (XEXP (mem, 0),
23416 get_address_mode (mem),
23417 GET_MODE (mem),
23418 VAR_INIT_STATUS_INITIALIZED);
23419 }
23420 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23421 == DEBUG_PARAMETER_REF)
23422 {
23423 tree tdecl
23424 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23425 tdie = lookup_decl_die (tdecl);
23426 if (tdie == NULL)
23427 continue;
23428 }
23429 else
23430 continue;
23431 if (reg == NULL
23432 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23433 != DEBUG_PARAMETER_REF)
23434 continue;
23435 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23436 VOIDmode,
23437 VAR_INIT_STATUS_INITIALIZED);
23438 if (val == NULL)
23439 continue;
23440 if (die == NULL)
23441 die = gen_call_site_die (decl, subr_die, ca_loc);
23442 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23443 NULL_TREE);
23444 if (reg != NULL)
23445 add_AT_loc (cdie, DW_AT_location, reg);
23446 else if (tdie != NULL)
23447 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23448 tdie);
23449 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23450 if (next_arg != XEXP (arg, 1))
23451 {
23452 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23453 if (mode == VOIDmode)
23454 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23455 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23456 0), 1),
23457 mode, VOIDmode,
23458 VAR_INIT_STATUS_INITIALIZED);
23459 if (val != NULL)
23460 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23461 val);
23462 }
23463 }
23464 if (die == NULL
23465 && (ca_loc->symbol_ref || tloc))
23466 die = gen_call_site_die (decl, subr_die, ca_loc);
23467 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23468 {
23469 dw_loc_descr_ref tval = NULL;
23470
23471 if (tloc != NULL_RTX)
23472 tval = mem_loc_descriptor (tloc,
23473 GET_MODE (tloc) == VOIDmode
23474 ? Pmode : GET_MODE (tloc),
23475 VOIDmode,
23476 VAR_INIT_STATUS_INITIALIZED);
23477 if (tval)
23478 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23479 else if (tlocc != NULL_RTX)
23480 {
23481 tval = mem_loc_descriptor (tlocc,
23482 GET_MODE (tlocc) == VOIDmode
23483 ? Pmode : GET_MODE (tlocc),
23484 VOIDmode,
23485 VAR_INIT_STATUS_INITIALIZED);
23486 if (tval)
23487 add_AT_loc (die,
23488 dwarf_AT (DW_AT_call_target_clobbered),
23489 tval);
23490 }
23491 }
23492 if (die != NULL)
23493 {
23494 call_site_note_count++;
23495 if (ca_loc->tail_call_p)
23496 tail_call_site_note_count++;
23497 }
23498 }
23499 }
23500 call_arg_locations = NULL;
23501 call_arg_loc_last = NULL;
23502 if (tail_call_site_count >= 0
23503 && tail_call_site_count == tail_call_site_note_count
23504 && (!dwarf_strict || dwarf_version >= 5))
23505 {
23506 if (call_site_count >= 0
23507 && call_site_count == call_site_note_count)
23508 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23509 else
23510 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23511 }
23512 call_site_count = -1;
23513 tail_call_site_count = -1;
23514 }
23515
23516 /* Mark used types after we have created DIEs for the functions scopes. */
23517 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23518 }
23519
23520 /* Returns a hash value for X (which really is a die_struct). */
23521
23522 hashval_t
hash(die_struct * d)23523 block_die_hasher::hash (die_struct *d)
23524 {
23525 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23526 }
23527
23528 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23529 as decl_id and die_parent of die_struct Y. */
23530
23531 bool
equal(die_struct * x,die_struct * y)23532 block_die_hasher::equal (die_struct *x, die_struct *y)
23533 {
23534 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23535 }
23536
23537 /* Hold information about markers for inlined entry points. */
23538 struct GTY ((for_user)) inline_entry_data
23539 {
23540 /* The block that's the inlined_function_outer_scope for an inlined
23541 function. */
23542 tree block;
23543
23544 /* The label at the inlined entry point. */
23545 const char *label_pfx;
23546 unsigned int label_num;
23547
23548 /* The view number to be used as the inlined entry point. */
23549 var_loc_view view;
23550 };
23551
23552 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23553 {
23554 typedef tree compare_type;
23555 static inline hashval_t hash (const inline_entry_data *);
23556 static inline bool equal (const inline_entry_data *, const_tree);
23557 };
23558
23559 /* Hash table routines for inline_entry_data. */
23560
23561 inline hashval_t
hash(const inline_entry_data * data)23562 inline_entry_data_hasher::hash (const inline_entry_data *data)
23563 {
23564 return htab_hash_pointer (data->block);
23565 }
23566
23567 inline bool
equal(const inline_entry_data * data,const_tree block)23568 inline_entry_data_hasher::equal (const inline_entry_data *data,
23569 const_tree block)
23570 {
23571 return data->block == block;
23572 }
23573
23574 /* Inlined entry points pending DIE creation in this compilation unit. */
23575
23576 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23577
23578
23579 /* Return TRUE if DECL, which may have been previously generated as
23580 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23581 true if decl (or its origin) is either an extern declaration or a
23582 class/namespace scoped declaration.
23583
23584 The declare_in_namespace support causes us to get two DIEs for one
23585 variable, both of which are declarations. We want to avoid
23586 considering one to be a specification, so we must test for
23587 DECLARATION and DW_AT_declaration. */
23588 static inline bool
decl_will_get_specification_p(dw_die_ref old_die,tree decl,bool declaration)23589 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23590 {
23591 return (old_die && TREE_STATIC (decl) && !declaration
23592 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23593 }
23594
23595 /* Return true if DECL is a local static. */
23596
23597 static inline bool
local_function_static(tree decl)23598 local_function_static (tree decl)
23599 {
23600 gcc_assert (VAR_P (decl));
23601 return TREE_STATIC (decl)
23602 && DECL_CONTEXT (decl)
23603 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23604 }
23605
23606 /* Generate a DIE to represent a declared data object.
23607 Either DECL or ORIGIN must be non-null. */
23608
23609 static void
gen_variable_die(tree decl,tree origin,dw_die_ref context_die)23610 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23611 {
23612 HOST_WIDE_INT off = 0;
23613 tree com_decl;
23614 tree decl_or_origin = decl ? decl : origin;
23615 tree ultimate_origin;
23616 dw_die_ref var_die;
23617 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23618 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23619 || class_or_namespace_scope_p (context_die));
23620 bool specialization_p = false;
23621 bool no_linkage_name = false;
23622
23623 /* While C++ inline static data members have definitions inside of the
23624 class, force the first DIE to be a declaration, then let gen_member_die
23625 reparent it to the class context and call gen_variable_die again
23626 to create the outside of the class DIE for the definition. */
23627 if (!declaration
23628 && old_die == NULL
23629 && decl
23630 && DECL_CONTEXT (decl)
23631 && TYPE_P (DECL_CONTEXT (decl))
23632 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23633 {
23634 declaration = true;
23635 if (dwarf_version < 5)
23636 no_linkage_name = true;
23637 }
23638
23639 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23640 if (decl || ultimate_origin)
23641 origin = ultimate_origin;
23642 com_decl = fortran_common (decl_or_origin, &off);
23643
23644 /* Symbol in common gets emitted as a child of the common block, in the form
23645 of a data member. */
23646 if (com_decl)
23647 {
23648 dw_die_ref com_die;
23649 dw_loc_list_ref loc = NULL;
23650 die_node com_die_arg;
23651
23652 var_die = lookup_decl_die (decl_or_origin);
23653 if (var_die)
23654 {
23655 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23656 {
23657 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23658 if (loc)
23659 {
23660 if (off)
23661 {
23662 /* Optimize the common case. */
23663 if (single_element_loc_list_p (loc)
23664 && loc->expr->dw_loc_opc == DW_OP_addr
23665 && loc->expr->dw_loc_next == NULL
23666 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23667 == SYMBOL_REF)
23668 {
23669 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23670 loc->expr->dw_loc_oprnd1.v.val_addr
23671 = plus_constant (GET_MODE (x), x , off);
23672 }
23673 else
23674 loc_list_plus_const (loc, off);
23675 }
23676 add_AT_location_description (var_die, DW_AT_location, loc);
23677 remove_AT (var_die, DW_AT_declaration);
23678 }
23679 }
23680 return;
23681 }
23682
23683 if (common_block_die_table == NULL)
23684 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23685
23686 com_die_arg.decl_id = DECL_UID (com_decl);
23687 com_die_arg.die_parent = context_die;
23688 com_die = common_block_die_table->find (&com_die_arg);
23689 if (! early_dwarf)
23690 loc = loc_list_from_tree (com_decl, 2, NULL);
23691 if (com_die == NULL)
23692 {
23693 const char *cnam
23694 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23695 die_node **slot;
23696
23697 com_die = new_die (DW_TAG_common_block, context_die, decl);
23698 add_name_and_src_coords_attributes (com_die, com_decl);
23699 if (loc)
23700 {
23701 add_AT_location_description (com_die, DW_AT_location, loc);
23702 /* Avoid sharing the same loc descriptor between
23703 DW_TAG_common_block and DW_TAG_variable. */
23704 loc = loc_list_from_tree (com_decl, 2, NULL);
23705 }
23706 else if (DECL_EXTERNAL (decl_or_origin))
23707 add_AT_flag (com_die, DW_AT_declaration, 1);
23708 if (want_pubnames ())
23709 add_pubname_string (cnam, com_die); /* ??? needed? */
23710 com_die->decl_id = DECL_UID (com_decl);
23711 slot = common_block_die_table->find_slot (com_die, INSERT);
23712 *slot = com_die;
23713 }
23714 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23715 {
23716 add_AT_location_description (com_die, DW_AT_location, loc);
23717 loc = loc_list_from_tree (com_decl, 2, NULL);
23718 remove_AT (com_die, DW_AT_declaration);
23719 }
23720 var_die = new_die (DW_TAG_variable, com_die, decl);
23721 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23722 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23723 decl_quals (decl_or_origin), false,
23724 context_die);
23725 add_alignment_attribute (var_die, decl);
23726 add_AT_flag (var_die, DW_AT_external, 1);
23727 if (loc)
23728 {
23729 if (off)
23730 {
23731 /* Optimize the common case. */
23732 if (single_element_loc_list_p (loc)
23733 && loc->expr->dw_loc_opc == DW_OP_addr
23734 && loc->expr->dw_loc_next == NULL
23735 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23736 {
23737 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23738 loc->expr->dw_loc_oprnd1.v.val_addr
23739 = plus_constant (GET_MODE (x), x, off);
23740 }
23741 else
23742 loc_list_plus_const (loc, off);
23743 }
23744 add_AT_location_description (var_die, DW_AT_location, loc);
23745 }
23746 else if (DECL_EXTERNAL (decl_or_origin))
23747 add_AT_flag (var_die, DW_AT_declaration, 1);
23748 if (decl)
23749 equate_decl_number_to_die (decl, var_die);
23750 return;
23751 }
23752
23753 if (old_die)
23754 {
23755 if (declaration)
23756 {
23757 /* A declaration that has been previously dumped, needs no
23758 further annotations, since it doesn't need location on
23759 the second pass. */
23760 return;
23761 }
23762 else if (decl_will_get_specification_p (old_die, decl, declaration)
23763 && !get_AT (old_die, DW_AT_specification))
23764 {
23765 /* Fall-thru so we can make a new variable die along with a
23766 DW_AT_specification. */
23767 }
23768 else if (origin && old_die->die_parent != context_die)
23769 {
23770 /* If we will be creating an inlined instance, we need a
23771 new DIE that will get annotated with
23772 DW_AT_abstract_origin. */
23773 gcc_assert (!DECL_ABSTRACT_P (decl));
23774 }
23775 else
23776 {
23777 /* If a DIE was dumped early, it still needs location info.
23778 Skip to where we fill the location bits. */
23779 var_die = old_die;
23780
23781 /* ??? In LTRANS we cannot annotate early created variably
23782 modified type DIEs without copying them and adjusting all
23783 references to them. Thus we dumped them again. Also add a
23784 reference to them but beware of -g0 compile and -g link
23785 in which case the reference will be already present. */
23786 tree type = TREE_TYPE (decl_or_origin);
23787 if (in_lto_p
23788 && ! get_AT (var_die, DW_AT_type)
23789 && variably_modified_type_p
23790 (type, decl_function_context (decl_or_origin)))
23791 {
23792 if (decl_by_reference_p (decl_or_origin))
23793 add_type_attribute (var_die, TREE_TYPE (type),
23794 TYPE_UNQUALIFIED, false, context_die);
23795 else
23796 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23797 false, context_die);
23798 }
23799
23800 goto gen_variable_die_location;
23801 }
23802 }
23803
23804 /* For static data members, the declaration in the class is supposed
23805 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23806 also in DWARF2; the specification should still be DW_TAG_variable
23807 referencing the DW_TAG_member DIE. */
23808 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23809 var_die = new_die (DW_TAG_member, context_die, decl);
23810 else
23811 var_die = new_die (DW_TAG_variable, context_die, decl);
23812
23813 if (origin != NULL)
23814 add_abstract_origin_attribute (var_die, origin);
23815
23816 /* Loop unrolling can create multiple blocks that refer to the same
23817 static variable, so we must test for the DW_AT_declaration flag.
23818
23819 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23820 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23821 sharing them.
23822
23823 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23824 else if (decl_will_get_specification_p (old_die, decl, declaration))
23825 {
23826 /* This is a definition of a C++ class level static. */
23827 add_AT_specification (var_die, old_die);
23828 specialization_p = true;
23829 if (DECL_NAME (decl))
23830 {
23831 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23832 struct dwarf_file_data * file_index = lookup_filename (s.file);
23833
23834 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23835 add_AT_file (var_die, DW_AT_decl_file, file_index);
23836
23837 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23838 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23839
23840 if (debug_column_info
23841 && s.column
23842 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23843 != (unsigned) s.column))
23844 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23845
23846 if (old_die->die_tag == DW_TAG_member)
23847 add_linkage_name (var_die, decl);
23848 }
23849 }
23850 else
23851 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23852
23853 if ((origin == NULL && !specialization_p)
23854 || (origin != NULL
23855 && !DECL_ABSTRACT_P (decl_or_origin)
23856 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23857 decl_function_context
23858 (decl_or_origin))))
23859 {
23860 tree type = TREE_TYPE (decl_or_origin);
23861
23862 if (decl_by_reference_p (decl_or_origin))
23863 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23864 context_die);
23865 else
23866 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23867 context_die);
23868 }
23869
23870 if (origin == NULL && !specialization_p)
23871 {
23872 if (TREE_PUBLIC (decl))
23873 add_AT_flag (var_die, DW_AT_external, 1);
23874
23875 if (DECL_ARTIFICIAL (decl))
23876 add_AT_flag (var_die, DW_AT_artificial, 1);
23877
23878 add_alignment_attribute (var_die, decl);
23879
23880 add_accessibility_attribute (var_die, decl);
23881 }
23882
23883 if (declaration)
23884 add_AT_flag (var_die, DW_AT_declaration, 1);
23885
23886 if (decl && (DECL_ABSTRACT_P (decl)
23887 || !old_die || is_declaration_die (old_die)))
23888 equate_decl_number_to_die (decl, var_die);
23889
23890 gen_variable_die_location:
23891 if (! declaration
23892 && (! DECL_ABSTRACT_P (decl_or_origin)
23893 /* Local static vars are shared between all clones/inlines,
23894 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23895 already set. */
23896 || (VAR_P (decl_or_origin)
23897 && TREE_STATIC (decl_or_origin)
23898 && DECL_RTL_SET_P (decl_or_origin))))
23899 {
23900 if (early_dwarf)
23901 add_pubname (decl_or_origin, var_die);
23902 else
23903 add_location_or_const_value_attribute (var_die, decl_or_origin,
23904 decl == NULL);
23905 }
23906 else
23907 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23908
23909 if ((dwarf_version >= 4 || !dwarf_strict)
23910 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23911 DW_AT_const_expr) == 1
23912 && !get_AT (var_die, DW_AT_const_expr)
23913 && !specialization_p)
23914 add_AT_flag (var_die, DW_AT_const_expr, 1);
23915
23916 if (!dwarf_strict)
23917 {
23918 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23919 DW_AT_inline);
23920 if (inl != -1
23921 && !get_AT (var_die, DW_AT_inline)
23922 && !specialization_p)
23923 add_AT_unsigned (var_die, DW_AT_inline, inl);
23924 }
23925 }
23926
23927 /* Generate a DIE to represent a named constant. */
23928
23929 static void
gen_const_die(tree decl,dw_die_ref context_die)23930 gen_const_die (tree decl, dw_die_ref context_die)
23931 {
23932 dw_die_ref const_die;
23933 tree type = TREE_TYPE (decl);
23934
23935 const_die = lookup_decl_die (decl);
23936 if (const_die)
23937 return;
23938
23939 const_die = new_die (DW_TAG_constant, context_die, decl);
23940 equate_decl_number_to_die (decl, const_die);
23941 add_name_and_src_coords_attributes (const_die, decl);
23942 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23943 if (TREE_PUBLIC (decl))
23944 add_AT_flag (const_die, DW_AT_external, 1);
23945 if (DECL_ARTIFICIAL (decl))
23946 add_AT_flag (const_die, DW_AT_artificial, 1);
23947 tree_add_const_value_attribute_for_decl (const_die, decl);
23948 }
23949
23950 /* Generate a DIE to represent a label identifier. */
23951
23952 static void
gen_label_die(tree decl,dw_die_ref context_die)23953 gen_label_die (tree decl, dw_die_ref context_die)
23954 {
23955 tree origin = decl_ultimate_origin (decl);
23956 dw_die_ref lbl_die = lookup_decl_die (decl);
23957 rtx insn;
23958 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23959
23960 if (!lbl_die)
23961 {
23962 lbl_die = new_die (DW_TAG_label, context_die, decl);
23963 equate_decl_number_to_die (decl, lbl_die);
23964
23965 if (origin != NULL)
23966 add_abstract_origin_attribute (lbl_die, origin);
23967 else
23968 add_name_and_src_coords_attributes (lbl_die, decl);
23969 }
23970
23971 if (DECL_ABSTRACT_P (decl))
23972 equate_decl_number_to_die (decl, lbl_die);
23973 else if (! early_dwarf)
23974 {
23975 insn = DECL_RTL_IF_SET (decl);
23976
23977 /* Deleted labels are programmer specified labels which have been
23978 eliminated because of various optimizations. We still emit them
23979 here so that it is possible to put breakpoints on them. */
23980 if (insn
23981 && (LABEL_P (insn)
23982 || ((NOTE_P (insn)
23983 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23984 {
23985 /* When optimization is enabled (via -O) some parts of the compiler
23986 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23987 represent source-level labels which were explicitly declared by
23988 the user. This really shouldn't be happening though, so catch
23989 it if it ever does happen. */
23990 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23991
23992 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23993 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23994 }
23995 else if (insn
23996 && NOTE_P (insn)
23997 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23998 && CODE_LABEL_NUMBER (insn) != -1)
23999 {
24000 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24001 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24002 }
24003 }
24004 }
24005
24006 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24007 attributes to the DIE for a block STMT, to describe where the inlined
24008 function was called from. This is similar to add_src_coords_attributes. */
24009
24010 static inline void
add_call_src_coords_attributes(tree stmt,dw_die_ref die)24011 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24012 {
24013 /* We can end up with BUILTINS_LOCATION here. */
24014 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24015 return;
24016
24017 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24018
24019 if (dwarf_version >= 3 || !dwarf_strict)
24020 {
24021 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24022 add_AT_unsigned (die, DW_AT_call_line, s.line);
24023 if (debug_column_info && s.column)
24024 add_AT_unsigned (die, DW_AT_call_column, s.column);
24025 }
24026 }
24027
24028
24029 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24030 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24031
24032 static inline void
add_high_low_attributes(tree stmt,dw_die_ref die)24033 add_high_low_attributes (tree stmt, dw_die_ref die)
24034 {
24035 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24036
24037 if (inline_entry_data **iedp
24038 = !inline_entry_data_table ? NULL
24039 : inline_entry_data_table->find_slot_with_hash (stmt,
24040 htab_hash_pointer (stmt),
24041 NO_INSERT))
24042 {
24043 inline_entry_data *ied = *iedp;
24044 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24045 gcc_assert (debug_inline_points);
24046 gcc_assert (inlined_function_outer_scope_p (stmt));
24047
24048 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24049 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24050
24051 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24052 && !dwarf_strict)
24053 {
24054 if (!output_asm_line_debug_info ())
24055 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24056 else
24057 {
24058 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24059 /* FIXME: this will resolve to a small number. Could we
24060 possibly emit smaller data? Ideally we'd emit a
24061 uleb128, but that would make the size of DIEs
24062 impossible for the compiler to compute, since it's
24063 the assembler that computes the value of the view
24064 label in this case. Ideally, we'd have a single form
24065 encompassing both the address and the view, and
24066 indirecting them through a table might make things
24067 easier, but even that would be more wasteful,
24068 space-wise, than what we have now. */
24069 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24070 }
24071 }
24072
24073 inline_entry_data_table->clear_slot (iedp);
24074 }
24075
24076 if (BLOCK_FRAGMENT_CHAIN (stmt)
24077 && (dwarf_version >= 3 || !dwarf_strict))
24078 {
24079 tree chain, superblock = NULL_TREE;
24080 dw_die_ref pdie;
24081 dw_attr_node *attr = NULL;
24082
24083 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24084 {
24085 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24086 BLOCK_NUMBER (stmt));
24087 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24088 }
24089
24090 /* Optimize duplicate .debug_ranges lists or even tails of
24091 lists. If this BLOCK has same ranges as its supercontext,
24092 lookup DW_AT_ranges attribute in the supercontext (and
24093 recursively so), verify that the ranges_table contains the
24094 right values and use it instead of adding a new .debug_range. */
24095 for (chain = stmt, pdie = die;
24096 BLOCK_SAME_RANGE (chain);
24097 chain = BLOCK_SUPERCONTEXT (chain))
24098 {
24099 dw_attr_node *new_attr;
24100
24101 pdie = pdie->die_parent;
24102 if (pdie == NULL)
24103 break;
24104 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24105 break;
24106 new_attr = get_AT (pdie, DW_AT_ranges);
24107 if (new_attr == NULL
24108 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24109 break;
24110 attr = new_attr;
24111 superblock = BLOCK_SUPERCONTEXT (chain);
24112 }
24113 if (attr != NULL
24114 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24115 == BLOCK_NUMBER (superblock))
24116 && BLOCK_FRAGMENT_CHAIN (superblock))
24117 {
24118 unsigned long off = attr->dw_attr_val.v.val_offset;
24119 unsigned long supercnt = 0, thiscnt = 0;
24120 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24121 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24122 {
24123 ++supercnt;
24124 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24125 == BLOCK_NUMBER (chain));
24126 }
24127 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24128 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24129 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24130 ++thiscnt;
24131 gcc_assert (supercnt >= thiscnt);
24132 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24133 false);
24134 note_rnglist_head (off + supercnt - thiscnt);
24135 return;
24136 }
24137
24138 unsigned int offset = add_ranges (stmt, true);
24139 add_AT_range_list (die, DW_AT_ranges, offset, false);
24140 note_rnglist_head (offset);
24141
24142 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24143 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24144 do
24145 {
24146 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24147 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24148 chain = BLOCK_FRAGMENT_CHAIN (chain);
24149 }
24150 while (chain);
24151 add_ranges (NULL);
24152 }
24153 else
24154 {
24155 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24156 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24157 BLOCK_NUMBER (stmt));
24158 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24159 BLOCK_NUMBER (stmt));
24160 add_AT_low_high_pc (die, label, label_high, false);
24161 }
24162 }
24163
24164 /* Generate a DIE for a lexical block. */
24165
24166 static void
gen_lexical_block_die(tree stmt,dw_die_ref context_die)24167 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24168 {
24169 dw_die_ref old_die = BLOCK_DIE (stmt);
24170 dw_die_ref stmt_die = NULL;
24171 if (!old_die)
24172 {
24173 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24174 BLOCK_DIE (stmt) = stmt_die;
24175 }
24176
24177 if (BLOCK_ABSTRACT (stmt))
24178 {
24179 if (old_die)
24180 {
24181 /* This must have been generated early and it won't even
24182 need location information since it's a DW_AT_inline
24183 function. */
24184 if (flag_checking)
24185 for (dw_die_ref c = context_die; c; c = c->die_parent)
24186 if (c->die_tag == DW_TAG_inlined_subroutine
24187 || c->die_tag == DW_TAG_subprogram)
24188 {
24189 gcc_assert (get_AT (c, DW_AT_inline));
24190 break;
24191 }
24192 return;
24193 }
24194 }
24195 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24196 {
24197 /* If this is an inlined instance, create a new lexical die for
24198 anything below to attach DW_AT_abstract_origin to. */
24199 if (old_die)
24200 {
24201 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24202 BLOCK_DIE (stmt) = stmt_die;
24203 old_die = NULL;
24204 }
24205
24206 tree origin = block_ultimate_origin (stmt);
24207 if (origin != NULL_TREE && origin != stmt)
24208 add_abstract_origin_attribute (stmt_die, origin);
24209 }
24210
24211 if (old_die)
24212 stmt_die = old_die;
24213
24214 /* A non abstract block whose blocks have already been reordered
24215 should have the instruction range for this block. If so, set the
24216 high/low attributes. */
24217 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24218 {
24219 gcc_assert (stmt_die);
24220 add_high_low_attributes (stmt, stmt_die);
24221 }
24222
24223 decls_for_scope (stmt, stmt_die);
24224 }
24225
24226 /* Generate a DIE for an inlined subprogram. */
24227
24228 static void
gen_inlined_subroutine_die(tree stmt,dw_die_ref context_die)24229 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24230 {
24231 tree decl;
24232
24233 /* The instance of function that is effectively being inlined shall not
24234 be abstract. */
24235 gcc_assert (! BLOCK_ABSTRACT (stmt));
24236
24237 decl = block_ultimate_origin (stmt);
24238
24239 /* Make sure any inlined functions are known to be inlineable. */
24240 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24241 || cgraph_function_possibly_inlined_p (decl));
24242
24243 if (! BLOCK_ABSTRACT (stmt))
24244 {
24245 dw_die_ref subr_die
24246 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24247
24248 if (call_arg_locations || debug_inline_points)
24249 BLOCK_DIE (stmt) = subr_die;
24250 add_abstract_origin_attribute (subr_die, decl);
24251 if (TREE_ASM_WRITTEN (stmt))
24252 add_high_low_attributes (stmt, subr_die);
24253 add_call_src_coords_attributes (stmt, subr_die);
24254
24255 decls_for_scope (stmt, subr_die);
24256 }
24257 }
24258
24259 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24260 the comment for VLR_CONTEXT. */
24261
24262 static void
gen_field_die(tree decl,struct vlr_context * ctx,dw_die_ref context_die)24263 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24264 {
24265 dw_die_ref decl_die;
24266
24267 if (TREE_TYPE (decl) == error_mark_node)
24268 return;
24269
24270 decl_die = new_die (DW_TAG_member, context_die, decl);
24271 add_name_and_src_coords_attributes (decl_die, decl);
24272 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24273 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24274 context_die);
24275
24276 if (DECL_BIT_FIELD_TYPE (decl))
24277 {
24278 add_byte_size_attribute (decl_die, decl);
24279 add_bit_size_attribute (decl_die, decl);
24280 add_bit_offset_attribute (decl_die, decl, ctx);
24281 }
24282
24283 add_alignment_attribute (decl_die, decl);
24284
24285 /* If we have a variant part offset, then we are supposed to process a member
24286 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24287 trees. */
24288 gcc_assert (ctx->variant_part_offset == NULL_TREE
24289 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24290 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24291 add_data_member_location_attribute (decl_die, decl, ctx);
24292
24293 if (DECL_ARTIFICIAL (decl))
24294 add_AT_flag (decl_die, DW_AT_artificial, 1);
24295
24296 add_accessibility_attribute (decl_die, decl);
24297
24298 /* Equate decl number to die, so that we can look up this decl later on. */
24299 equate_decl_number_to_die (decl, decl_die);
24300 }
24301
24302 /* Generate a DIE for a pointer to a member type. TYPE can be an
24303 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24304 pointer to member function. */
24305
24306 static void
gen_ptr_to_mbr_type_die(tree type,dw_die_ref context_die)24307 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24308 {
24309 if (lookup_type_die (type))
24310 return;
24311
24312 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24313 scope_die_for (type, context_die), type);
24314
24315 equate_type_number_to_die (type, ptr_die);
24316 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24317 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24318 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24319 context_die);
24320 add_alignment_attribute (ptr_die, type);
24321
24322 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24323 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24324 {
24325 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24326 add_AT_loc (ptr_die, DW_AT_use_location, op);
24327 }
24328 }
24329
24330 static char *producer_string;
24331
24332 /* Return a heap allocated producer string including command line options
24333 if -grecord-gcc-switches. */
24334
24335 static char *
gen_producer_string(void)24336 gen_producer_string (void)
24337 {
24338 size_t j;
24339 auto_vec<const char *> switches;
24340 const char *language_string = lang_hooks.name;
24341 char *producer, *tail;
24342 const char *p;
24343 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24344 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24345
24346 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24347 switch (save_decoded_options[j].opt_index)
24348 {
24349 case OPT_o:
24350 case OPT_d:
24351 case OPT_dumpbase:
24352 case OPT_dumpdir:
24353 case OPT_auxbase:
24354 case OPT_auxbase_strip:
24355 case OPT_quiet:
24356 case OPT_version:
24357 case OPT_v:
24358 case OPT_w:
24359 case OPT_L:
24360 case OPT_D:
24361 case OPT_I:
24362 case OPT_U:
24363 case OPT_SPECIAL_unknown:
24364 case OPT_SPECIAL_ignore:
24365 case OPT_SPECIAL_program_name:
24366 case OPT_SPECIAL_input_file:
24367 case OPT_grecord_gcc_switches:
24368 case OPT__output_pch_:
24369 case OPT_fdiagnostics_show_location_:
24370 case OPT_fdiagnostics_show_option:
24371 case OPT_fdiagnostics_show_caret:
24372 case OPT_fdiagnostics_color_:
24373 case OPT_fverbose_asm:
24374 case OPT____:
24375 case OPT__sysroot_:
24376 case OPT_nostdinc:
24377 case OPT_nostdinc__:
24378 case OPT_fpreprocessed:
24379 case OPT_fltrans_output_list_:
24380 case OPT_fresolution_:
24381 case OPT_fdebug_prefix_map_:
24382 case OPT_fmacro_prefix_map_:
24383 case OPT_ffile_prefix_map_:
24384 case OPT_fcompare_debug:
24385 /* Ignore these. */
24386 continue;
24387 default:
24388 if (cl_options[save_decoded_options[j].opt_index].flags
24389 & CL_NO_DWARF_RECORD)
24390 continue;
24391 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24392 == '-');
24393 switch (save_decoded_options[j].canonical_option[0][1])
24394 {
24395 case 'M':
24396 case 'i':
24397 case 'W':
24398 continue;
24399 case 'f':
24400 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24401 "dump", 4) == 0)
24402 continue;
24403 break;
24404 default:
24405 break;
24406 }
24407 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24408 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24409 break;
24410 }
24411
24412 producer = XNEWVEC (char, plen + 1 + len + 1);
24413 tail = producer;
24414 sprintf (tail, "%s %s", language_string, version_string);
24415 tail += plen;
24416
24417 FOR_EACH_VEC_ELT (switches, j, p)
24418 {
24419 len = strlen (p);
24420 *tail = ' ';
24421 memcpy (tail + 1, p, len);
24422 tail += len + 1;
24423 }
24424
24425 *tail = '\0';
24426 return producer;
24427 }
24428
24429 /* Given a C and/or C++ language/version string return the "highest".
24430 C++ is assumed to be "higher" than C in this case. Used for merging
24431 LTO translation unit languages. */
24432 static const char *
highest_c_language(const char * lang1,const char * lang2)24433 highest_c_language (const char *lang1, const char *lang2)
24434 {
24435 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24436 return "GNU C++17";
24437 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24438 return "GNU C++14";
24439 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24440 return "GNU C++11";
24441 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24442 return "GNU C++98";
24443
24444 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24445 return "GNU C17";
24446 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24447 return "GNU C11";
24448 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24449 return "GNU C99";
24450 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24451 return "GNU C89";
24452
24453 gcc_unreachable ();
24454 }
24455
24456
24457 /* Generate the DIE for the compilation unit. */
24458
24459 static dw_die_ref
gen_compile_unit_die(const char * filename)24460 gen_compile_unit_die (const char *filename)
24461 {
24462 dw_die_ref die;
24463 const char *language_string = lang_hooks.name;
24464 int language;
24465
24466 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24467
24468 if (filename)
24469 {
24470 add_name_attribute (die, filename);
24471 /* Don't add cwd for <built-in>. */
24472 if (filename[0] != '<')
24473 add_comp_dir_attribute (die);
24474 }
24475
24476 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24477
24478 /* If our producer is LTO try to figure out a common language to use
24479 from the global list of translation units. */
24480 if (strcmp (language_string, "GNU GIMPLE") == 0)
24481 {
24482 unsigned i;
24483 tree t;
24484 const char *common_lang = NULL;
24485
24486 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24487 {
24488 if (!TRANSLATION_UNIT_LANGUAGE (t))
24489 continue;
24490 if (!common_lang)
24491 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24492 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24493 ;
24494 else if (strncmp (common_lang, "GNU C", 5) == 0
24495 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24496 /* Mixing C and C++ is ok, use C++ in that case. */
24497 common_lang = highest_c_language (common_lang,
24498 TRANSLATION_UNIT_LANGUAGE (t));
24499 else
24500 {
24501 /* Fall back to C. */
24502 common_lang = NULL;
24503 break;
24504 }
24505 }
24506
24507 if (common_lang)
24508 language_string = common_lang;
24509 }
24510
24511 language = DW_LANG_C;
24512 if (strncmp (language_string, "GNU C", 5) == 0
24513 && ISDIGIT (language_string[5]))
24514 {
24515 language = DW_LANG_C89;
24516 if (dwarf_version >= 3 || !dwarf_strict)
24517 {
24518 if (strcmp (language_string, "GNU C89") != 0)
24519 language = DW_LANG_C99;
24520
24521 if (dwarf_version >= 5 /* || !dwarf_strict */)
24522 if (strcmp (language_string, "GNU C11") == 0
24523 || strcmp (language_string, "GNU C17") == 0)
24524 language = DW_LANG_C11;
24525 }
24526 }
24527 else if (strncmp (language_string, "GNU C++", 7) == 0)
24528 {
24529 language = DW_LANG_C_plus_plus;
24530 if (dwarf_version >= 5 /* || !dwarf_strict */)
24531 {
24532 if (strcmp (language_string, "GNU C++11") == 0)
24533 language = DW_LANG_C_plus_plus_11;
24534 else if (strcmp (language_string, "GNU C++14") == 0)
24535 language = DW_LANG_C_plus_plus_14;
24536 else if (strcmp (language_string, "GNU C++17") == 0)
24537 /* For now. */
24538 language = DW_LANG_C_plus_plus_14;
24539 }
24540 }
24541 else if (strcmp (language_string, "GNU F77") == 0)
24542 language = DW_LANG_Fortran77;
24543 else if (dwarf_version >= 3 || !dwarf_strict)
24544 {
24545 if (strcmp (language_string, "GNU Ada") == 0)
24546 language = DW_LANG_Ada95;
24547 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24548 {
24549 language = DW_LANG_Fortran95;
24550 if (dwarf_version >= 5 /* || !dwarf_strict */)
24551 {
24552 if (strcmp (language_string, "GNU Fortran2003") == 0)
24553 language = DW_LANG_Fortran03;
24554 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24555 language = DW_LANG_Fortran08;
24556 }
24557 }
24558 else if (strcmp (language_string, "GNU Objective-C") == 0)
24559 language = DW_LANG_ObjC;
24560 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24561 language = DW_LANG_ObjC_plus_plus;
24562 else if (dwarf_version >= 5 || !dwarf_strict)
24563 {
24564 if (strcmp (language_string, "GNU Go") == 0)
24565 language = DW_LANG_Go;
24566 }
24567 }
24568 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24569 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24570 language = DW_LANG_Fortran90;
24571
24572 add_AT_unsigned (die, DW_AT_language, language);
24573
24574 switch (language)
24575 {
24576 case DW_LANG_Fortran77:
24577 case DW_LANG_Fortran90:
24578 case DW_LANG_Fortran95:
24579 case DW_LANG_Fortran03:
24580 case DW_LANG_Fortran08:
24581 /* Fortran has case insensitive identifiers and the front-end
24582 lowercases everything. */
24583 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24584 break;
24585 default:
24586 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24587 break;
24588 }
24589 return die;
24590 }
24591
24592 /* Generate the DIE for a base class. */
24593
24594 static void
gen_inheritance_die(tree binfo,tree access,tree type,dw_die_ref context_die)24595 gen_inheritance_die (tree binfo, tree access, tree type,
24596 dw_die_ref context_die)
24597 {
24598 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24599 struct vlr_context ctx = { type, NULL };
24600
24601 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24602 context_die);
24603 add_data_member_location_attribute (die, binfo, &ctx);
24604
24605 if (BINFO_VIRTUAL_P (binfo))
24606 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24607
24608 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24609 children, otherwise the default is DW_ACCESS_public. In DWARF2
24610 the default has always been DW_ACCESS_private. */
24611 if (access == access_public_node)
24612 {
24613 if (dwarf_version == 2
24614 || context_die->die_tag == DW_TAG_class_type)
24615 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24616 }
24617 else if (access == access_protected_node)
24618 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24619 else if (dwarf_version > 2
24620 && context_die->die_tag != DW_TAG_class_type)
24621 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24622 }
24623
24624 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24625 structure. */
24626 static bool
is_variant_part(tree decl)24627 is_variant_part (tree decl)
24628 {
24629 return (TREE_CODE (decl) == FIELD_DECL
24630 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24631 }
24632
24633 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24634 return the FIELD_DECL. Return NULL_TREE otherwise. */
24635
24636 static tree
analyze_discr_in_predicate(tree operand,tree struct_type)24637 analyze_discr_in_predicate (tree operand, tree struct_type)
24638 {
24639 bool continue_stripping = true;
24640 while (continue_stripping)
24641 switch (TREE_CODE (operand))
24642 {
24643 CASE_CONVERT:
24644 operand = TREE_OPERAND (operand, 0);
24645 break;
24646 default:
24647 continue_stripping = false;
24648 break;
24649 }
24650
24651 /* Match field access to members of struct_type only. */
24652 if (TREE_CODE (operand) == COMPONENT_REF
24653 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24654 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24655 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24656 return TREE_OPERAND (operand, 1);
24657 else
24658 return NULL_TREE;
24659 }
24660
24661 /* Check that SRC is a constant integer that can be represented as a native
24662 integer constant (either signed or unsigned). If so, store it into DEST and
24663 return true. Return false otherwise. */
24664
24665 static bool
get_discr_value(tree src,dw_discr_value * dest)24666 get_discr_value (tree src, dw_discr_value *dest)
24667 {
24668 tree discr_type = TREE_TYPE (src);
24669
24670 if (lang_hooks.types.get_debug_type)
24671 {
24672 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24673 if (debug_type != NULL)
24674 discr_type = debug_type;
24675 }
24676
24677 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24678 return false;
24679
24680 /* Signedness can vary between the original type and the debug type. This
24681 can happen for character types in Ada for instance: the character type
24682 used for code generation can be signed, to be compatible with the C one,
24683 but from a debugger point of view, it must be unsigned. */
24684 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24685 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24686
24687 if (is_orig_unsigned != is_debug_unsigned)
24688 src = fold_convert (discr_type, src);
24689
24690 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24691 return false;
24692
24693 dest->pos = is_debug_unsigned;
24694 if (is_debug_unsigned)
24695 dest->v.uval = tree_to_uhwi (src);
24696 else
24697 dest->v.sval = tree_to_shwi (src);
24698
24699 return true;
24700 }
24701
24702 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24703 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24704 store NULL_TREE in DISCR_DECL. Otherwise:
24705
24706 - store the discriminant field in STRUCT_TYPE that controls the variant
24707 part to *DISCR_DECL
24708
24709 - put in *DISCR_LISTS_P an array where for each variant, the item
24710 represents the corresponding matching list of discriminant values.
24711
24712 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24713 the above array.
24714
24715 Note that when the array is allocated (i.e. when the analysis is
24716 successful), it is up to the caller to free the array. */
24717
24718 static void
analyze_variants_discr(tree variant_part_decl,tree struct_type,tree * discr_decl,dw_discr_list_ref ** discr_lists_p,unsigned * discr_lists_length)24719 analyze_variants_discr (tree variant_part_decl,
24720 tree struct_type,
24721 tree *discr_decl,
24722 dw_discr_list_ref **discr_lists_p,
24723 unsigned *discr_lists_length)
24724 {
24725 tree variant_part_type = TREE_TYPE (variant_part_decl);
24726 tree variant;
24727 dw_discr_list_ref *discr_lists;
24728 unsigned i;
24729
24730 /* Compute how many variants there are in this variant part. */
24731 *discr_lists_length = 0;
24732 for (variant = TYPE_FIELDS (variant_part_type);
24733 variant != NULL_TREE;
24734 variant = DECL_CHAIN (variant))
24735 ++*discr_lists_length;
24736
24737 *discr_decl = NULL_TREE;
24738 *discr_lists_p
24739 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24740 sizeof (**discr_lists_p));
24741 discr_lists = *discr_lists_p;
24742
24743 /* And then analyze all variants to extract discriminant information for all
24744 of them. This analysis is conservative: as soon as we detect something we
24745 do not support, abort everything and pretend we found nothing. */
24746 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24747 variant != NULL_TREE;
24748 variant = DECL_CHAIN (variant), ++i)
24749 {
24750 tree match_expr = DECL_QUALIFIER (variant);
24751
24752 /* Now, try to analyze the predicate and deduce a discriminant for
24753 it. */
24754 if (match_expr == boolean_true_node)
24755 /* Typically happens for the default variant: it matches all cases that
24756 previous variants rejected. Don't output any matching value for
24757 this one. */
24758 continue;
24759
24760 /* The following loop tries to iterate over each discriminant
24761 possibility: single values or ranges. */
24762 while (match_expr != NULL_TREE)
24763 {
24764 tree next_round_match_expr;
24765 tree candidate_discr = NULL_TREE;
24766 dw_discr_list_ref new_node = NULL;
24767
24768 /* Possibilities are matched one after the other by nested
24769 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24770 continue with the rest at next iteration. */
24771 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24772 {
24773 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24774 match_expr = TREE_OPERAND (match_expr, 1);
24775 }
24776 else
24777 next_round_match_expr = NULL_TREE;
24778
24779 if (match_expr == boolean_false_node)
24780 /* This sub-expression matches nothing: just wait for the next
24781 one. */
24782 ;
24783
24784 else if (TREE_CODE (match_expr) == EQ_EXPR)
24785 {
24786 /* We are matching: <discr_field> == <integer_cst>
24787 This sub-expression matches a single value. */
24788 tree integer_cst = TREE_OPERAND (match_expr, 1);
24789
24790 candidate_discr
24791 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24792 struct_type);
24793
24794 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24795 if (!get_discr_value (integer_cst,
24796 &new_node->dw_discr_lower_bound))
24797 goto abort;
24798 new_node->dw_discr_range = false;
24799 }
24800
24801 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24802 {
24803 /* We are matching:
24804 <discr_field> > <integer_cst>
24805 && <discr_field> < <integer_cst>.
24806 This sub-expression matches the range of values between the
24807 two matched integer constants. Note that comparisons can be
24808 inclusive or exclusive. */
24809 tree candidate_discr_1, candidate_discr_2;
24810 tree lower_cst, upper_cst;
24811 bool lower_cst_included, upper_cst_included;
24812 tree lower_op = TREE_OPERAND (match_expr, 0);
24813 tree upper_op = TREE_OPERAND (match_expr, 1);
24814
24815 /* When the comparison is exclusive, the integer constant is not
24816 the discriminant range bound we are looking for: we will have
24817 to increment or decrement it. */
24818 if (TREE_CODE (lower_op) == GE_EXPR)
24819 lower_cst_included = true;
24820 else if (TREE_CODE (lower_op) == GT_EXPR)
24821 lower_cst_included = false;
24822 else
24823 goto abort;
24824
24825 if (TREE_CODE (upper_op) == LE_EXPR)
24826 upper_cst_included = true;
24827 else if (TREE_CODE (upper_op) == LT_EXPR)
24828 upper_cst_included = false;
24829 else
24830 goto abort;
24831
24832 /* Extract the discriminant from the first operand and check it
24833 is consistant with the same analysis in the second
24834 operand. */
24835 candidate_discr_1
24836 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24837 struct_type);
24838 candidate_discr_2
24839 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24840 struct_type);
24841 if (candidate_discr_1 == candidate_discr_2)
24842 candidate_discr = candidate_discr_1;
24843 else
24844 goto abort;
24845
24846 /* Extract bounds from both. */
24847 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24848 lower_cst = TREE_OPERAND (lower_op, 1);
24849 upper_cst = TREE_OPERAND (upper_op, 1);
24850
24851 if (!lower_cst_included)
24852 lower_cst
24853 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24854 build_int_cst (TREE_TYPE (lower_cst), 1));
24855 if (!upper_cst_included)
24856 upper_cst
24857 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24858 build_int_cst (TREE_TYPE (upper_cst), 1));
24859
24860 if (!get_discr_value (lower_cst,
24861 &new_node->dw_discr_lower_bound)
24862 || !get_discr_value (upper_cst,
24863 &new_node->dw_discr_upper_bound))
24864 goto abort;
24865
24866 new_node->dw_discr_range = true;
24867 }
24868
24869 else
24870 /* Unsupported sub-expression: we cannot determine the set of
24871 matching discriminant values. Abort everything. */
24872 goto abort;
24873
24874 /* If the discriminant info is not consistant with what we saw so
24875 far, consider the analysis failed and abort everything. */
24876 if (candidate_discr == NULL_TREE
24877 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24878 goto abort;
24879 else
24880 *discr_decl = candidate_discr;
24881
24882 if (new_node != NULL)
24883 {
24884 new_node->dw_discr_next = discr_lists[i];
24885 discr_lists[i] = new_node;
24886 }
24887 match_expr = next_round_match_expr;
24888 }
24889 }
24890
24891 /* If we reach this point, we could match everything we were interested
24892 in. */
24893 return;
24894
24895 abort:
24896 /* Clean all data structure and return no result. */
24897 free (*discr_lists_p);
24898 *discr_lists_p = NULL;
24899 *discr_decl = NULL_TREE;
24900 }
24901
24902 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24903 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24904 under CONTEXT_DIE.
24905
24906 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24907 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24908 this type, which are record types, represent the available variants and each
24909 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24910 values are inferred from these attributes.
24911
24912 In trees, the offsets for the fields inside these sub-records are relative
24913 to the variant part itself, whereas the corresponding DIEs should have
24914 offset attributes that are relative to the embedding record base address.
24915 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24916 must be an expression that computes the offset of the variant part to
24917 describe in DWARF. */
24918
24919 static void
gen_variant_part(tree variant_part_decl,struct vlr_context * vlr_ctx,dw_die_ref context_die)24920 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24921 dw_die_ref context_die)
24922 {
24923 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24924 tree variant_part_offset = vlr_ctx->variant_part_offset;
24925 struct loc_descr_context ctx = {
24926 vlr_ctx->struct_type, /* context_type */
24927 NULL_TREE, /* base_decl */
24928 NULL, /* dpi */
24929 false, /* placeholder_arg */
24930 false /* placeholder_seen */
24931 };
24932
24933 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24934 NULL_TREE if there is no such field. */
24935 tree discr_decl = NULL_TREE;
24936 dw_discr_list_ref *discr_lists;
24937 unsigned discr_lists_length = 0;
24938 unsigned i;
24939
24940 dw_die_ref dwarf_proc_die = NULL;
24941 dw_die_ref variant_part_die
24942 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24943
24944 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24945
24946 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24947 &discr_decl, &discr_lists, &discr_lists_length);
24948
24949 if (discr_decl != NULL_TREE)
24950 {
24951 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24952
24953 if (discr_die)
24954 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24955 else
24956 /* We have no DIE for the discriminant, so just discard all
24957 discrimimant information in the output. */
24958 discr_decl = NULL_TREE;
24959 }
24960
24961 /* If the offset for this variant part is more complex than a constant,
24962 create a DWARF procedure for it so that we will not have to generate DWARF
24963 expressions for it for each member. */
24964 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24965 && (dwarf_version >= 3 || !dwarf_strict))
24966 {
24967 const tree dwarf_proc_fndecl
24968 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24969 build_function_type (TREE_TYPE (variant_part_offset),
24970 NULL_TREE));
24971 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24972 const dw_loc_descr_ref dwarf_proc_body
24973 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24974
24975 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24976 dwarf_proc_fndecl, context_die);
24977 if (dwarf_proc_die != NULL)
24978 variant_part_offset = dwarf_proc_call;
24979 }
24980
24981 /* Output DIEs for all variants. */
24982 i = 0;
24983 for (tree variant = TYPE_FIELDS (variant_part_type);
24984 variant != NULL_TREE;
24985 variant = DECL_CHAIN (variant), ++i)
24986 {
24987 tree variant_type = TREE_TYPE (variant);
24988 dw_die_ref variant_die;
24989
24990 /* All variants (i.e. members of a variant part) are supposed to be
24991 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24992 under these records. */
24993 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24994
24995 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24996 equate_decl_number_to_die (variant, variant_die);
24997
24998 /* Output discriminant values this variant matches, if any. */
24999 if (discr_decl == NULL || discr_lists[i] == NULL)
25000 /* In the case we have discriminant information at all, this is
25001 probably the default variant: as the standard says, don't
25002 output any discriminant value/list attribute. */
25003 ;
25004 else if (discr_lists[i]->dw_discr_next == NULL
25005 && !discr_lists[i]->dw_discr_range)
25006 /* If there is only one accepted value, don't bother outputting a
25007 list. */
25008 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25009 else
25010 add_discr_list (variant_die, discr_lists[i]);
25011
25012 for (tree member = TYPE_FIELDS (variant_type);
25013 member != NULL_TREE;
25014 member = DECL_CHAIN (member))
25015 {
25016 struct vlr_context vlr_sub_ctx = {
25017 vlr_ctx->struct_type, /* struct_type */
25018 NULL /* variant_part_offset */
25019 };
25020 if (is_variant_part (member))
25021 {
25022 /* All offsets for fields inside variant parts are relative to
25023 the top-level embedding RECORD_TYPE's base address. On the
25024 other hand, offsets in GCC's types are relative to the
25025 nested-most variant part. So we have to sum offsets each time
25026 we recurse. */
25027
25028 vlr_sub_ctx.variant_part_offset
25029 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25030 variant_part_offset, byte_position (member));
25031 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25032 }
25033 else
25034 {
25035 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25036 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25037 }
25038 }
25039 }
25040
25041 free (discr_lists);
25042 }
25043
25044 /* Generate a DIE for a class member. */
25045
25046 static void
gen_member_die(tree type,dw_die_ref context_die)25047 gen_member_die (tree type, dw_die_ref context_die)
25048 {
25049 tree member;
25050 tree binfo = TYPE_BINFO (type);
25051
25052 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25053
25054 /* If this is not an incomplete type, output descriptions of each of its
25055 members. Note that as we output the DIEs necessary to represent the
25056 members of this record or union type, we will also be trying to output
25057 DIEs to represent the *types* of those members. However the `type'
25058 function (above) will specifically avoid generating type DIEs for member
25059 types *within* the list of member DIEs for this (containing) type except
25060 for those types (of members) which are explicitly marked as also being
25061 members of this (containing) type themselves. The g++ front- end can
25062 force any given type to be treated as a member of some other (containing)
25063 type by setting the TYPE_CONTEXT of the given (member) type to point to
25064 the TREE node representing the appropriate (containing) type. */
25065
25066 /* First output info about the base classes. */
25067 if (binfo && early_dwarf)
25068 {
25069 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25070 int i;
25071 tree base;
25072
25073 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25074 gen_inheritance_die (base,
25075 (accesses ? (*accesses)[i] : access_public_node),
25076 type,
25077 context_die);
25078 }
25079
25080 /* Now output info about the data members and type members. */
25081 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25082 {
25083 struct vlr_context vlr_ctx = { type, NULL_TREE };
25084 bool static_inline_p
25085 = (TREE_STATIC (member)
25086 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25087 != -1));
25088
25089 /* Ignore clones. */
25090 if (DECL_ABSTRACT_ORIGIN (member))
25091 continue;
25092
25093 /* If we thought we were generating minimal debug info for TYPE
25094 and then changed our minds, some of the member declarations
25095 may have already been defined. Don't define them again, but
25096 do put them in the right order. */
25097
25098 if (dw_die_ref child = lookup_decl_die (member))
25099 {
25100 /* Handle inline static data members, which only have in-class
25101 declarations. */
25102 dw_die_ref ref = NULL;
25103 if (child->die_tag == DW_TAG_variable
25104 && child->die_parent == comp_unit_die ())
25105 {
25106 ref = get_AT_ref (child, DW_AT_specification);
25107 /* For C++17 inline static data members followed by redundant
25108 out of class redeclaration, we might get here with
25109 child being the DIE created for the out of class
25110 redeclaration and with its DW_AT_specification being
25111 the DIE created for in-class definition. We want to
25112 reparent the latter, and don't want to create another
25113 DIE with DW_AT_specification in that case, because
25114 we already have one. */
25115 if (ref
25116 && static_inline_p
25117 && ref->die_tag == DW_TAG_variable
25118 && ref->die_parent == comp_unit_die ()
25119 && get_AT (ref, DW_AT_specification) == NULL)
25120 {
25121 child = ref;
25122 ref = NULL;
25123 static_inline_p = false;
25124 }
25125 }
25126
25127 if (child->die_tag == DW_TAG_variable
25128 && child->die_parent == comp_unit_die ()
25129 && ref == NULL)
25130 {
25131 reparent_child (child, context_die);
25132 if (dwarf_version < 5)
25133 child->die_tag = DW_TAG_member;
25134 }
25135 else
25136 splice_child_die (context_die, child);
25137 }
25138
25139 /* Do not generate standard DWARF for variant parts if we are generating
25140 the corresponding GNAT encodings: DIEs generated for both would
25141 conflict in our mappings. */
25142 else if (is_variant_part (member)
25143 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25144 {
25145 vlr_ctx.variant_part_offset = byte_position (member);
25146 gen_variant_part (member, &vlr_ctx, context_die);
25147 }
25148 else
25149 {
25150 vlr_ctx.variant_part_offset = NULL_TREE;
25151 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25152 }
25153
25154 /* For C++ inline static data members emit immediately a DW_TAG_variable
25155 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25156 DW_AT_specification. */
25157 if (static_inline_p)
25158 {
25159 int old_extern = DECL_EXTERNAL (member);
25160 DECL_EXTERNAL (member) = 0;
25161 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25162 DECL_EXTERNAL (member) = old_extern;
25163 }
25164 }
25165 }
25166
25167 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25168 is set, we pretend that the type was never defined, so we only get the
25169 member DIEs needed by later specification DIEs. */
25170
25171 static void
gen_struct_or_union_type_die(tree type,dw_die_ref context_die,enum debug_info_usage usage)25172 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25173 enum debug_info_usage usage)
25174 {
25175 if (TREE_ASM_WRITTEN (type))
25176 {
25177 /* Fill in the bound of variable-length fields in late dwarf if
25178 still incomplete. */
25179 if (!early_dwarf && variably_modified_type_p (type, NULL))
25180 for (tree member = TYPE_FIELDS (type);
25181 member;
25182 member = DECL_CHAIN (member))
25183 fill_variable_array_bounds (TREE_TYPE (member));
25184 return;
25185 }
25186
25187 dw_die_ref type_die = lookup_type_die (type);
25188 dw_die_ref scope_die = 0;
25189 int nested = 0;
25190 int complete = (TYPE_SIZE (type)
25191 && (! TYPE_STUB_DECL (type)
25192 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25193 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25194 complete = complete && should_emit_struct_debug (type, usage);
25195
25196 if (type_die && ! complete)
25197 return;
25198
25199 if (TYPE_CONTEXT (type) != NULL_TREE
25200 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25201 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25202 nested = 1;
25203
25204 scope_die = scope_die_for (type, context_die);
25205
25206 /* Generate child dies for template paramaters. */
25207 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25208 schedule_generic_params_dies_gen (type);
25209
25210 if (! type_die || (nested && is_cu_die (scope_die)))
25211 /* First occurrence of type or toplevel definition of nested class. */
25212 {
25213 dw_die_ref old_die = type_die;
25214
25215 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25216 ? record_type_tag (type) : DW_TAG_union_type,
25217 scope_die, type);
25218 equate_type_number_to_die (type, type_die);
25219 if (old_die)
25220 add_AT_specification (type_die, old_die);
25221 else
25222 add_name_attribute (type_die, type_tag (type));
25223 }
25224 else
25225 remove_AT (type_die, DW_AT_declaration);
25226
25227 /* If this type has been completed, then give it a byte_size attribute and
25228 then give a list of members. */
25229 if (complete && !ns_decl)
25230 {
25231 /* Prevent infinite recursion in cases where the type of some member of
25232 this type is expressed in terms of this type itself. */
25233 TREE_ASM_WRITTEN (type) = 1;
25234 add_byte_size_attribute (type_die, type);
25235 add_alignment_attribute (type_die, type);
25236 if (TYPE_STUB_DECL (type) != NULL_TREE)
25237 {
25238 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25239 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25240 }
25241
25242 /* If the first reference to this type was as the return type of an
25243 inline function, then it may not have a parent. Fix this now. */
25244 if (type_die->die_parent == NULL)
25245 add_child_die (scope_die, type_die);
25246
25247 push_decl_scope (type);
25248 gen_member_die (type, type_die);
25249 pop_decl_scope ();
25250
25251 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25252 if (TYPE_ARTIFICIAL (type))
25253 add_AT_flag (type_die, DW_AT_artificial, 1);
25254
25255 /* GNU extension: Record what type our vtable lives in. */
25256 if (TYPE_VFIELD (type))
25257 {
25258 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25259
25260 gen_type_die (vtype, context_die);
25261 add_AT_die_ref (type_die, DW_AT_containing_type,
25262 lookup_type_die (vtype));
25263 }
25264 }
25265 else
25266 {
25267 add_AT_flag (type_die, DW_AT_declaration, 1);
25268
25269 /* We don't need to do this for function-local types. */
25270 if (TYPE_STUB_DECL (type)
25271 && ! decl_function_context (TYPE_STUB_DECL (type)))
25272 vec_safe_push (incomplete_types, type);
25273 }
25274
25275 if (get_AT (type_die, DW_AT_name))
25276 add_pubtype (type, type_die);
25277 }
25278
25279 /* Generate a DIE for a subroutine _type_. */
25280
25281 static void
gen_subroutine_type_die(tree type,dw_die_ref context_die)25282 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25283 {
25284 tree return_type = TREE_TYPE (type);
25285 dw_die_ref subr_die
25286 = new_die (DW_TAG_subroutine_type,
25287 scope_die_for (type, context_die), type);
25288
25289 equate_type_number_to_die (type, subr_die);
25290 add_prototyped_attribute (subr_die, type);
25291 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25292 context_die);
25293 add_alignment_attribute (subr_die, type);
25294 gen_formal_types_die (type, subr_die);
25295
25296 if (get_AT (subr_die, DW_AT_name))
25297 add_pubtype (type, subr_die);
25298 if ((dwarf_version >= 5 || !dwarf_strict)
25299 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25300 add_AT_flag (subr_die, DW_AT_reference, 1);
25301 if ((dwarf_version >= 5 || !dwarf_strict)
25302 && lang_hooks.types.type_dwarf_attribute (type,
25303 DW_AT_rvalue_reference) != -1)
25304 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25305 }
25306
25307 /* Generate a DIE for a type definition. */
25308
25309 static void
gen_typedef_die(tree decl,dw_die_ref context_die)25310 gen_typedef_die (tree decl, dw_die_ref context_die)
25311 {
25312 dw_die_ref type_die;
25313 tree type;
25314
25315 if (TREE_ASM_WRITTEN (decl))
25316 {
25317 if (DECL_ORIGINAL_TYPE (decl))
25318 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25319 return;
25320 }
25321
25322 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25323 checks in process_scope_var and modified_type_die), this should be called
25324 only for original types. */
25325 gcc_assert (decl_ultimate_origin (decl) == NULL
25326 || decl_ultimate_origin (decl) == decl);
25327
25328 TREE_ASM_WRITTEN (decl) = 1;
25329 type_die = new_die (DW_TAG_typedef, context_die, decl);
25330
25331 add_name_and_src_coords_attributes (type_die, decl);
25332 if (DECL_ORIGINAL_TYPE (decl))
25333 {
25334 type = DECL_ORIGINAL_TYPE (decl);
25335 if (type == error_mark_node)
25336 return;
25337
25338 gcc_assert (type != TREE_TYPE (decl));
25339 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25340 }
25341 else
25342 {
25343 type = TREE_TYPE (decl);
25344 if (type == error_mark_node)
25345 return;
25346
25347 if (is_naming_typedef_decl (TYPE_NAME (type)))
25348 {
25349 /* Here, we are in the case of decl being a typedef naming
25350 an anonymous type, e.g:
25351 typedef struct {...} foo;
25352 In that case TREE_TYPE (decl) is not a typedef variant
25353 type and TYPE_NAME of the anonymous type is set to the
25354 TYPE_DECL of the typedef. This construct is emitted by
25355 the C++ FE.
25356
25357 TYPE is the anonymous struct named by the typedef
25358 DECL. As we need the DW_AT_type attribute of the
25359 DW_TAG_typedef to point to the DIE of TYPE, let's
25360 generate that DIE right away. add_type_attribute
25361 called below will then pick (via lookup_type_die) that
25362 anonymous struct DIE. */
25363 if (!TREE_ASM_WRITTEN (type))
25364 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25365
25366 /* This is a GNU Extension. We are adding a
25367 DW_AT_linkage_name attribute to the DIE of the
25368 anonymous struct TYPE. The value of that attribute
25369 is the name of the typedef decl naming the anonymous
25370 struct. This greatly eases the work of consumers of
25371 this debug info. */
25372 add_linkage_name_raw (lookup_type_die (type), decl);
25373 }
25374 }
25375
25376 add_type_attribute (type_die, type, decl_quals (decl), false,
25377 context_die);
25378
25379 if (is_naming_typedef_decl (decl))
25380 /* We want that all subsequent calls to lookup_type_die with
25381 TYPE in argument yield the DW_TAG_typedef we have just
25382 created. */
25383 equate_type_number_to_die (type, type_die);
25384
25385 add_alignment_attribute (type_die, TREE_TYPE (decl));
25386
25387 add_accessibility_attribute (type_die, decl);
25388
25389 if (DECL_ABSTRACT_P (decl))
25390 equate_decl_number_to_die (decl, type_die);
25391
25392 if (get_AT (type_die, DW_AT_name))
25393 add_pubtype (decl, type_die);
25394 }
25395
25396 /* Generate a DIE for a struct, class, enum or union type. */
25397
25398 static void
gen_tagged_type_die(tree type,dw_die_ref context_die,enum debug_info_usage usage)25399 gen_tagged_type_die (tree type,
25400 dw_die_ref context_die,
25401 enum debug_info_usage usage)
25402 {
25403 int need_pop;
25404
25405 if (type == NULL_TREE
25406 || !is_tagged_type (type))
25407 return;
25408
25409 if (TREE_ASM_WRITTEN (type))
25410 need_pop = 0;
25411 /* If this is a nested type whose containing class hasn't been written
25412 out yet, writing it out will cover this one, too. This does not apply
25413 to instantiations of member class templates; they need to be added to
25414 the containing class as they are generated. FIXME: This hurts the
25415 idea of combining type decls from multiple TUs, since we can't predict
25416 what set of template instantiations we'll get. */
25417 else if (TYPE_CONTEXT (type)
25418 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25419 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25420 {
25421 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25422
25423 if (TREE_ASM_WRITTEN (type))
25424 return;
25425
25426 /* If that failed, attach ourselves to the stub. */
25427 push_decl_scope (TYPE_CONTEXT (type));
25428 context_die = lookup_type_die (TYPE_CONTEXT (type));
25429 need_pop = 1;
25430 }
25431 else if (TYPE_CONTEXT (type) != NULL_TREE
25432 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25433 {
25434 /* If this type is local to a function that hasn't been written
25435 out yet, use a NULL context for now; it will be fixed up in
25436 decls_for_scope. */
25437 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25438 /* A declaration DIE doesn't count; nested types need to go in the
25439 specification. */
25440 if (context_die && is_declaration_die (context_die))
25441 context_die = NULL;
25442 need_pop = 0;
25443 }
25444 else
25445 {
25446 context_die = declare_in_namespace (type, context_die);
25447 need_pop = 0;
25448 }
25449
25450 if (TREE_CODE (type) == ENUMERAL_TYPE)
25451 {
25452 /* This might have been written out by the call to
25453 declare_in_namespace. */
25454 if (!TREE_ASM_WRITTEN (type))
25455 gen_enumeration_type_die (type, context_die);
25456 }
25457 else
25458 gen_struct_or_union_type_die (type, context_die, usage);
25459
25460 if (need_pop)
25461 pop_decl_scope ();
25462
25463 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25464 it up if it is ever completed. gen_*_type_die will set it for us
25465 when appropriate. */
25466 }
25467
25468 /* Generate a type description DIE. */
25469
25470 static void
gen_type_die_with_usage(tree type,dw_die_ref context_die,enum debug_info_usage usage)25471 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25472 enum debug_info_usage usage)
25473 {
25474 struct array_descr_info info;
25475
25476 if (type == NULL_TREE || type == error_mark_node)
25477 return;
25478
25479 if (flag_checking && type)
25480 verify_type (type);
25481
25482 if (TYPE_NAME (type) != NULL_TREE
25483 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25484 && is_redundant_typedef (TYPE_NAME (type))
25485 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25486 /* The DECL of this type is a typedef we don't want to emit debug
25487 info for but we want debug info for its underlying typedef.
25488 This can happen for e.g, the injected-class-name of a C++
25489 type. */
25490 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25491
25492 /* If TYPE is a typedef type variant, let's generate debug info
25493 for the parent typedef which TYPE is a type of. */
25494 if (typedef_variant_p (type))
25495 {
25496 if (TREE_ASM_WRITTEN (type))
25497 return;
25498
25499 tree name = TYPE_NAME (type);
25500 tree origin = decl_ultimate_origin (name);
25501 if (origin != NULL && origin != name)
25502 {
25503 gen_decl_die (origin, NULL, NULL, context_die);
25504 return;
25505 }
25506
25507 /* Prevent broken recursion; we can't hand off to the same type. */
25508 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25509
25510 /* Give typedefs the right scope. */
25511 context_die = scope_die_for (type, context_die);
25512
25513 TREE_ASM_WRITTEN (type) = 1;
25514
25515 gen_decl_die (name, NULL, NULL, context_die);
25516 return;
25517 }
25518
25519 /* If type is an anonymous tagged type named by a typedef, let's
25520 generate debug info for the typedef. */
25521 if (is_naming_typedef_decl (TYPE_NAME (type)))
25522 {
25523 /* Give typedefs the right scope. */
25524 context_die = scope_die_for (type, context_die);
25525
25526 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25527 return;
25528 }
25529
25530 if (lang_hooks.types.get_debug_type)
25531 {
25532 tree debug_type = lang_hooks.types.get_debug_type (type);
25533
25534 if (debug_type != NULL_TREE && debug_type != type)
25535 {
25536 gen_type_die_with_usage (debug_type, context_die, usage);
25537 return;
25538 }
25539 }
25540
25541 /* We are going to output a DIE to represent the unqualified version
25542 of this type (i.e. without any const or volatile qualifiers) so
25543 get the main variant (i.e. the unqualified version) of this type
25544 now. (Vectors and arrays are special because the debugging info is in the
25545 cloned type itself. Similarly function/method types can contain extra
25546 ref-qualification). */
25547 if (TREE_CODE (type) == FUNCTION_TYPE
25548 || TREE_CODE (type) == METHOD_TYPE)
25549 {
25550 /* For function/method types, can't use type_main_variant here,
25551 because that can have different ref-qualifiers for C++,
25552 but try to canonicalize. */
25553 tree main = TYPE_MAIN_VARIANT (type);
25554 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25555 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25556 && check_base_type (t, main)
25557 && check_lang_type (t, type))
25558 {
25559 type = t;
25560 break;
25561 }
25562 }
25563 else if (TREE_CODE (type) != VECTOR_TYPE
25564 && TREE_CODE (type) != ARRAY_TYPE)
25565 type = type_main_variant (type);
25566
25567 /* If this is an array type with hidden descriptor, handle it first. */
25568 if (!TREE_ASM_WRITTEN (type)
25569 && lang_hooks.types.get_array_descr_info)
25570 {
25571 memset (&info, 0, sizeof (info));
25572 if (lang_hooks.types.get_array_descr_info (type, &info))
25573 {
25574 /* Fortran sometimes emits array types with no dimension. */
25575 gcc_assert (info.ndimensions >= 0
25576 && (info.ndimensions
25577 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25578 gen_descr_array_type_die (type, &info, context_die);
25579 TREE_ASM_WRITTEN (type) = 1;
25580 return;
25581 }
25582 }
25583
25584 if (TREE_ASM_WRITTEN (type))
25585 {
25586 /* Variable-length types may be incomplete even if
25587 TREE_ASM_WRITTEN. For such types, fall through to
25588 gen_array_type_die() and possibly fill in
25589 DW_AT_{upper,lower}_bound attributes. */
25590 if ((TREE_CODE (type) != ARRAY_TYPE
25591 && TREE_CODE (type) != RECORD_TYPE
25592 && TREE_CODE (type) != UNION_TYPE
25593 && TREE_CODE (type) != QUAL_UNION_TYPE)
25594 || !variably_modified_type_p (type, NULL))
25595 return;
25596 }
25597
25598 switch (TREE_CODE (type))
25599 {
25600 case ERROR_MARK:
25601 break;
25602
25603 case POINTER_TYPE:
25604 case REFERENCE_TYPE:
25605 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25606 ensures that the gen_type_die recursion will terminate even if the
25607 type is recursive. Recursive types are possible in Ada. */
25608 /* ??? We could perhaps do this for all types before the switch
25609 statement. */
25610 TREE_ASM_WRITTEN (type) = 1;
25611
25612 /* For these types, all that is required is that we output a DIE (or a
25613 set of DIEs) to represent the "basis" type. */
25614 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25615 DINFO_USAGE_IND_USE);
25616 break;
25617
25618 case OFFSET_TYPE:
25619 /* This code is used for C++ pointer-to-data-member types.
25620 Output a description of the relevant class type. */
25621 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25622 DINFO_USAGE_IND_USE);
25623
25624 /* Output a description of the type of the object pointed to. */
25625 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25626 DINFO_USAGE_IND_USE);
25627
25628 /* Now output a DIE to represent this pointer-to-data-member type
25629 itself. */
25630 gen_ptr_to_mbr_type_die (type, context_die);
25631 break;
25632
25633 case FUNCTION_TYPE:
25634 /* Force out return type (in case it wasn't forced out already). */
25635 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25636 DINFO_USAGE_DIR_USE);
25637 gen_subroutine_type_die (type, context_die);
25638 break;
25639
25640 case METHOD_TYPE:
25641 /* Force out return type (in case it wasn't forced out already). */
25642 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25643 DINFO_USAGE_DIR_USE);
25644 gen_subroutine_type_die (type, context_die);
25645 break;
25646
25647 case ARRAY_TYPE:
25648 case VECTOR_TYPE:
25649 gen_array_type_die (type, context_die);
25650 break;
25651
25652 case ENUMERAL_TYPE:
25653 case RECORD_TYPE:
25654 case UNION_TYPE:
25655 case QUAL_UNION_TYPE:
25656 gen_tagged_type_die (type, context_die, usage);
25657 return;
25658
25659 case VOID_TYPE:
25660 case INTEGER_TYPE:
25661 case REAL_TYPE:
25662 case FIXED_POINT_TYPE:
25663 case COMPLEX_TYPE:
25664 case BOOLEAN_TYPE:
25665 case POINTER_BOUNDS_TYPE:
25666 /* No DIEs needed for fundamental types. */
25667 break;
25668
25669 case NULLPTR_TYPE:
25670 case LANG_TYPE:
25671 /* Just use DW_TAG_unspecified_type. */
25672 {
25673 dw_die_ref type_die = lookup_type_die (type);
25674 if (type_die == NULL)
25675 {
25676 tree name = TYPE_IDENTIFIER (type);
25677 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25678 type);
25679 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25680 equate_type_number_to_die (type, type_die);
25681 }
25682 }
25683 break;
25684
25685 default:
25686 if (is_cxx_auto (type))
25687 {
25688 tree name = TYPE_IDENTIFIER (type);
25689 dw_die_ref *die = (name == get_identifier ("auto")
25690 ? &auto_die : &decltype_auto_die);
25691 if (!*die)
25692 {
25693 *die = new_die (DW_TAG_unspecified_type,
25694 comp_unit_die (), NULL_TREE);
25695 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25696 }
25697 equate_type_number_to_die (type, *die);
25698 break;
25699 }
25700 gcc_unreachable ();
25701 }
25702
25703 TREE_ASM_WRITTEN (type) = 1;
25704 }
25705
25706 static void
gen_type_die(tree type,dw_die_ref context_die)25707 gen_type_die (tree type, dw_die_ref context_die)
25708 {
25709 if (type != error_mark_node)
25710 {
25711 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25712 if (flag_checking)
25713 {
25714 dw_die_ref die = lookup_type_die (type);
25715 if (die)
25716 check_die (die);
25717 }
25718 }
25719 }
25720
25721 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25722 things which are local to the given block. */
25723
25724 static void
gen_block_die(tree stmt,dw_die_ref context_die)25725 gen_block_die (tree stmt, dw_die_ref context_die)
25726 {
25727 int must_output_die = 0;
25728 bool inlined_func;
25729
25730 /* Ignore blocks that are NULL. */
25731 if (stmt == NULL_TREE)
25732 return;
25733
25734 inlined_func = inlined_function_outer_scope_p (stmt);
25735
25736 /* If the block is one fragment of a non-contiguous block, do not
25737 process the variables, since they will have been done by the
25738 origin block. Do process subblocks. */
25739 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25740 {
25741 tree sub;
25742
25743 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25744 gen_block_die (sub, context_die);
25745
25746 return;
25747 }
25748
25749 /* Determine if we need to output any Dwarf DIEs at all to represent this
25750 block. */
25751 if (inlined_func)
25752 /* The outer scopes for inlinings *must* always be represented. We
25753 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25754 must_output_die = 1;
25755 else
25756 {
25757 /* Determine if this block directly contains any "significant"
25758 local declarations which we will need to output DIEs for. */
25759 if (debug_info_level > DINFO_LEVEL_TERSE)
25760 /* We are not in terse mode so *any* local declaration counts
25761 as being a "significant" one. */
25762 must_output_die = ((BLOCK_VARS (stmt) != NULL
25763 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25764 && (TREE_USED (stmt)
25765 || TREE_ASM_WRITTEN (stmt)
25766 || BLOCK_ABSTRACT (stmt)));
25767 else if ((TREE_USED (stmt)
25768 || TREE_ASM_WRITTEN (stmt)
25769 || BLOCK_ABSTRACT (stmt))
25770 && !dwarf2out_ignore_block (stmt))
25771 must_output_die = 1;
25772 }
25773
25774 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25775 DIE for any block which contains no significant local declarations at
25776 all. Rather, in such cases we just call `decls_for_scope' so that any
25777 needed Dwarf info for any sub-blocks will get properly generated. Note
25778 that in terse mode, our definition of what constitutes a "significant"
25779 local declaration gets restricted to include only inlined function
25780 instances and local (nested) function definitions. */
25781 if (must_output_die)
25782 {
25783 if (inlined_func)
25784 {
25785 /* If STMT block is abstract, that means we have been called
25786 indirectly from dwarf2out_abstract_function.
25787 That function rightfully marks the descendent blocks (of
25788 the abstract function it is dealing with) as being abstract,
25789 precisely to prevent us from emitting any
25790 DW_TAG_inlined_subroutine DIE as a descendent
25791 of an abstract function instance. So in that case, we should
25792 not call gen_inlined_subroutine_die.
25793
25794 Later though, when cgraph asks dwarf2out to emit info
25795 for the concrete instance of the function decl into which
25796 the concrete instance of STMT got inlined, the later will lead
25797 to the generation of a DW_TAG_inlined_subroutine DIE. */
25798 if (! BLOCK_ABSTRACT (stmt))
25799 gen_inlined_subroutine_die (stmt, context_die);
25800 }
25801 else
25802 gen_lexical_block_die (stmt, context_die);
25803 }
25804 else
25805 decls_for_scope (stmt, context_die);
25806 }
25807
25808 /* Process variable DECL (or variable with origin ORIGIN) within
25809 block STMT and add it to CONTEXT_DIE. */
25810 static void
process_scope_var(tree stmt,tree decl,tree origin,dw_die_ref context_die)25811 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25812 {
25813 dw_die_ref die;
25814 tree decl_or_origin = decl ? decl : origin;
25815
25816 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25817 die = lookup_decl_die (decl_or_origin);
25818 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25819 {
25820 if (TYPE_DECL_IS_STUB (decl_or_origin))
25821 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25822 else
25823 die = lookup_decl_die (decl_or_origin);
25824 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25825 if (! die && ! early_dwarf)
25826 return;
25827 }
25828 else
25829 die = NULL;
25830
25831 /* Avoid creating DIEs for local typedefs and concrete static variables that
25832 will only be pruned later. */
25833 if ((origin || decl_ultimate_origin (decl))
25834 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25835 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25836 {
25837 origin = decl_ultimate_origin (decl_or_origin);
25838 if (decl && VAR_P (decl) && die != NULL)
25839 {
25840 die = lookup_decl_die (origin);
25841 if (die != NULL)
25842 equate_decl_number_to_die (decl, die);
25843 }
25844 return;
25845 }
25846
25847 if (die != NULL && die->die_parent == NULL)
25848 add_child_die (context_die, die);
25849 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25850 {
25851 if (early_dwarf)
25852 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25853 stmt, context_die);
25854 }
25855 else
25856 {
25857 if (decl && DECL_P (decl))
25858 {
25859 die = lookup_decl_die (decl);
25860
25861 /* Early created DIEs do not have a parent as the decls refer
25862 to the function as DECL_CONTEXT rather than the BLOCK. */
25863 if (die && die->die_parent == NULL)
25864 {
25865 gcc_assert (in_lto_p);
25866 add_child_die (context_die, die);
25867 }
25868 }
25869
25870 gen_decl_die (decl, origin, NULL, context_die);
25871 }
25872 }
25873
25874 /* Generate all of the decls declared within a given scope and (recursively)
25875 all of its sub-blocks. */
25876
25877 static void
decls_for_scope(tree stmt,dw_die_ref context_die)25878 decls_for_scope (tree stmt, dw_die_ref context_die)
25879 {
25880 tree decl;
25881 unsigned int i;
25882 tree subblocks;
25883
25884 /* Ignore NULL blocks. */
25885 if (stmt == NULL_TREE)
25886 return;
25887
25888 /* Output the DIEs to represent all of the data objects and typedefs
25889 declared directly within this block but not within any nested
25890 sub-blocks. Also, nested function and tag DIEs have been
25891 generated with a parent of NULL; fix that up now. We don't
25892 have to do this if we're at -g1. */
25893 if (debug_info_level > DINFO_LEVEL_TERSE)
25894 {
25895 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25896 process_scope_var (stmt, decl, NULL_TREE, context_die);
25897 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25898 origin - avoid doing this twice as we have no good way to see
25899 if we've done it once already. */
25900 if (! early_dwarf)
25901 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25902 {
25903 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25904 if (decl == current_function_decl)
25905 /* Ignore declarations of the current function, while they
25906 are declarations, gen_subprogram_die would treat them
25907 as definitions again, because they are equal to
25908 current_function_decl and endlessly recurse. */;
25909 else if (TREE_CODE (decl) == FUNCTION_DECL)
25910 process_scope_var (stmt, decl, NULL_TREE, context_die);
25911 else
25912 process_scope_var (stmt, NULL_TREE, decl, context_die);
25913 }
25914 }
25915
25916 /* Even if we're at -g1, we need to process the subblocks in order to get
25917 inlined call information. */
25918
25919 /* Output the DIEs to represent all sub-blocks (and the items declared
25920 therein) of this block. */
25921 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25922 subblocks != NULL;
25923 subblocks = BLOCK_CHAIN (subblocks))
25924 gen_block_die (subblocks, context_die);
25925 }
25926
25927 /* Is this a typedef we can avoid emitting? */
25928
25929 bool
is_redundant_typedef(const_tree decl)25930 is_redundant_typedef (const_tree decl)
25931 {
25932 if (TYPE_DECL_IS_STUB (decl))
25933 return true;
25934
25935 if (DECL_ARTIFICIAL (decl)
25936 && DECL_CONTEXT (decl)
25937 && is_tagged_type (DECL_CONTEXT (decl))
25938 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25939 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25940 /* Also ignore the artificial member typedef for the class name. */
25941 return true;
25942
25943 return false;
25944 }
25945
25946 /* Return TRUE if TYPE is a typedef that names a type for linkage
25947 purposes. This kind of typedefs is produced by the C++ FE for
25948 constructs like:
25949
25950 typedef struct {...} foo;
25951
25952 In that case, there is no typedef variant type produced for foo.
25953 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25954 struct type. */
25955
25956 static bool
is_naming_typedef_decl(const_tree decl)25957 is_naming_typedef_decl (const_tree decl)
25958 {
25959 if (decl == NULL_TREE
25960 || TREE_CODE (decl) != TYPE_DECL
25961 || DECL_NAMELESS (decl)
25962 || !is_tagged_type (TREE_TYPE (decl))
25963 || DECL_IS_BUILTIN (decl)
25964 || is_redundant_typedef (decl)
25965 /* It looks like Ada produces TYPE_DECLs that are very similar
25966 to C++ naming typedefs but that have different
25967 semantics. Let's be specific to c++ for now. */
25968 || !is_cxx (decl))
25969 return FALSE;
25970
25971 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25972 && TYPE_NAME (TREE_TYPE (decl)) == decl
25973 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25974 != TYPE_NAME (TREE_TYPE (decl))));
25975 }
25976
25977 /* Looks up the DIE for a context. */
25978
25979 static inline dw_die_ref
lookup_context_die(tree context)25980 lookup_context_die (tree context)
25981 {
25982 if (context)
25983 {
25984 /* Find die that represents this context. */
25985 if (TYPE_P (context))
25986 {
25987 context = TYPE_MAIN_VARIANT (context);
25988 dw_die_ref ctx = lookup_type_die (context);
25989 if (!ctx)
25990 return NULL;
25991 return strip_naming_typedef (context, ctx);
25992 }
25993 else
25994 return lookup_decl_die (context);
25995 }
25996 return comp_unit_die ();
25997 }
25998
25999 /* Returns the DIE for a context. */
26000
26001 static inline dw_die_ref
get_context_die(tree context)26002 get_context_die (tree context)
26003 {
26004 if (context)
26005 {
26006 /* Find die that represents this context. */
26007 if (TYPE_P (context))
26008 {
26009 context = TYPE_MAIN_VARIANT (context);
26010 return strip_naming_typedef (context, force_type_die (context));
26011 }
26012 else
26013 return force_decl_die (context);
26014 }
26015 return comp_unit_die ();
26016 }
26017
26018 /* Returns the DIE for decl. A DIE will always be returned. */
26019
26020 static dw_die_ref
force_decl_die(tree decl)26021 force_decl_die (tree decl)
26022 {
26023 dw_die_ref decl_die;
26024 unsigned saved_external_flag;
26025 tree save_fn = NULL_TREE;
26026 decl_die = lookup_decl_die (decl);
26027 if (!decl_die)
26028 {
26029 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26030
26031 decl_die = lookup_decl_die (decl);
26032 if (decl_die)
26033 return decl_die;
26034
26035 switch (TREE_CODE (decl))
26036 {
26037 case FUNCTION_DECL:
26038 /* Clear current_function_decl, so that gen_subprogram_die thinks
26039 that this is a declaration. At this point, we just want to force
26040 declaration die. */
26041 save_fn = current_function_decl;
26042 current_function_decl = NULL_TREE;
26043 gen_subprogram_die (decl, context_die);
26044 current_function_decl = save_fn;
26045 break;
26046
26047 case VAR_DECL:
26048 /* Set external flag to force declaration die. Restore it after
26049 gen_decl_die() call. */
26050 saved_external_flag = DECL_EXTERNAL (decl);
26051 DECL_EXTERNAL (decl) = 1;
26052 gen_decl_die (decl, NULL, NULL, context_die);
26053 DECL_EXTERNAL (decl) = saved_external_flag;
26054 break;
26055
26056 case NAMESPACE_DECL:
26057 if (dwarf_version >= 3 || !dwarf_strict)
26058 dwarf2out_decl (decl);
26059 else
26060 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26061 decl_die = comp_unit_die ();
26062 break;
26063
26064 case TRANSLATION_UNIT_DECL:
26065 decl_die = comp_unit_die ();
26066 break;
26067
26068 default:
26069 gcc_unreachable ();
26070 }
26071
26072 /* We should be able to find the DIE now. */
26073 if (!decl_die)
26074 decl_die = lookup_decl_die (decl);
26075 gcc_assert (decl_die);
26076 }
26077
26078 return decl_die;
26079 }
26080
26081 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26082 always returned. */
26083
26084 static dw_die_ref
force_type_die(tree type)26085 force_type_die (tree type)
26086 {
26087 dw_die_ref type_die;
26088
26089 type_die = lookup_type_die (type);
26090 if (!type_die)
26091 {
26092 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26093
26094 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26095 false, context_die);
26096 gcc_assert (type_die);
26097 }
26098 return type_die;
26099 }
26100
26101 /* Force out any required namespaces to be able to output DECL,
26102 and return the new context_die for it, if it's changed. */
26103
26104 static dw_die_ref
setup_namespace_context(tree thing,dw_die_ref context_die)26105 setup_namespace_context (tree thing, dw_die_ref context_die)
26106 {
26107 tree context = (DECL_P (thing)
26108 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26109 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26110 /* Force out the namespace. */
26111 context_die = force_decl_die (context);
26112
26113 return context_die;
26114 }
26115
26116 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26117 type) within its namespace, if appropriate.
26118
26119 For compatibility with older debuggers, namespace DIEs only contain
26120 declarations; all definitions are emitted at CU scope, with
26121 DW_AT_specification pointing to the declaration (like with class
26122 members). */
26123
26124 static dw_die_ref
declare_in_namespace(tree thing,dw_die_ref context_die)26125 declare_in_namespace (tree thing, dw_die_ref context_die)
26126 {
26127 dw_die_ref ns_context;
26128
26129 if (debug_info_level <= DINFO_LEVEL_TERSE)
26130 return context_die;
26131
26132 /* External declarations in the local scope only need to be emitted
26133 once, not once in the namespace and once in the scope.
26134
26135 This avoids declaring the `extern' below in the
26136 namespace DIE as well as in the innermost scope:
26137
26138 namespace S
26139 {
26140 int i=5;
26141 int foo()
26142 {
26143 int i=8;
26144 extern int i;
26145 return i;
26146 }
26147 }
26148 */
26149 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26150 return context_die;
26151
26152 /* If this decl is from an inlined function, then don't try to emit it in its
26153 namespace, as we will get confused. It would have already been emitted
26154 when the abstract instance of the inline function was emitted anyways. */
26155 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26156 return context_die;
26157
26158 ns_context = setup_namespace_context (thing, context_die);
26159
26160 if (ns_context != context_die)
26161 {
26162 if (is_fortran ())
26163 return ns_context;
26164 if (DECL_P (thing))
26165 gen_decl_die (thing, NULL, NULL, ns_context);
26166 else
26167 gen_type_die (thing, ns_context);
26168 }
26169 return context_die;
26170 }
26171
26172 /* Generate a DIE for a namespace or namespace alias. */
26173
26174 static void
gen_namespace_die(tree decl,dw_die_ref context_die)26175 gen_namespace_die (tree decl, dw_die_ref context_die)
26176 {
26177 dw_die_ref namespace_die;
26178
26179 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26180 they are an alias of. */
26181 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26182 {
26183 /* Output a real namespace or module. */
26184 context_die = setup_namespace_context (decl, comp_unit_die ());
26185 namespace_die = new_die (is_fortran ()
26186 ? DW_TAG_module : DW_TAG_namespace,
26187 context_die, decl);
26188 /* For Fortran modules defined in different CU don't add src coords. */
26189 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26190 {
26191 const char *name = dwarf2_name (decl, 0);
26192 if (name)
26193 add_name_attribute (namespace_die, name);
26194 }
26195 else
26196 add_name_and_src_coords_attributes (namespace_die, decl);
26197 if (DECL_EXTERNAL (decl))
26198 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26199 equate_decl_number_to_die (decl, namespace_die);
26200 }
26201 else
26202 {
26203 /* Output a namespace alias. */
26204
26205 /* Force out the namespace we are an alias of, if necessary. */
26206 dw_die_ref origin_die
26207 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26208
26209 if (DECL_FILE_SCOPE_P (decl)
26210 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26211 context_die = setup_namespace_context (decl, comp_unit_die ());
26212 /* Now create the namespace alias DIE. */
26213 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26214 add_name_and_src_coords_attributes (namespace_die, decl);
26215 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26216 equate_decl_number_to_die (decl, namespace_die);
26217 }
26218 if ((dwarf_version >= 5 || !dwarf_strict)
26219 && lang_hooks.decls.decl_dwarf_attribute (decl,
26220 DW_AT_export_symbols) == 1)
26221 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26222
26223 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26224 if (want_pubnames ())
26225 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26226 }
26227
26228 /* Generate Dwarf debug information for a decl described by DECL.
26229 The return value is currently only meaningful for PARM_DECLs,
26230 for all other decls it returns NULL.
26231
26232 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26233 It can be NULL otherwise. */
26234
26235 static dw_die_ref
gen_decl_die(tree decl,tree origin,struct vlr_context * ctx,dw_die_ref context_die)26236 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26237 dw_die_ref context_die)
26238 {
26239 tree decl_or_origin = decl ? decl : origin;
26240 tree class_origin = NULL, ultimate_origin;
26241
26242 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26243 return NULL;
26244
26245 /* Ignore pointer bounds decls. */
26246 if (DECL_P (decl_or_origin)
26247 && TREE_TYPE (decl_or_origin)
26248 && POINTER_BOUNDS_P (decl_or_origin))
26249 return NULL;
26250
26251 switch (TREE_CODE (decl_or_origin))
26252 {
26253 case ERROR_MARK:
26254 break;
26255
26256 case CONST_DECL:
26257 if (!is_fortran () && !is_ada ())
26258 {
26259 /* The individual enumerators of an enum type get output when we output
26260 the Dwarf representation of the relevant enum type itself. */
26261 break;
26262 }
26263
26264 /* Emit its type. */
26265 gen_type_die (TREE_TYPE (decl), context_die);
26266
26267 /* And its containing namespace. */
26268 context_die = declare_in_namespace (decl, context_die);
26269
26270 gen_const_die (decl, context_die);
26271 break;
26272
26273 case FUNCTION_DECL:
26274 #if 0
26275 /* FIXME */
26276 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26277 on local redeclarations of global functions. That seems broken. */
26278 if (current_function_decl != decl)
26279 /* This is only a declaration. */;
26280 #endif
26281
26282 /* We should have abstract copies already and should not generate
26283 stray type DIEs in late LTO dumping. */
26284 if (! early_dwarf)
26285 ;
26286
26287 /* If we're emitting a clone, emit info for the abstract instance. */
26288 else if (origin || DECL_ORIGIN (decl) != decl)
26289 dwarf2out_abstract_function (origin
26290 ? DECL_ORIGIN (origin)
26291 : DECL_ABSTRACT_ORIGIN (decl));
26292
26293 /* If we're emitting a possibly inlined function emit it as
26294 abstract instance. */
26295 else if (cgraph_function_possibly_inlined_p (decl)
26296 && ! DECL_ABSTRACT_P (decl)
26297 && ! class_or_namespace_scope_p (context_die)
26298 /* dwarf2out_abstract_function won't emit a die if this is just
26299 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26300 that case, because that works only if we have a die. */
26301 && DECL_INITIAL (decl) != NULL_TREE)
26302 dwarf2out_abstract_function (decl);
26303
26304 /* Otherwise we're emitting the primary DIE for this decl. */
26305 else if (debug_info_level > DINFO_LEVEL_TERSE)
26306 {
26307 /* Before we describe the FUNCTION_DECL itself, make sure that we
26308 have its containing type. */
26309 if (!origin)
26310 origin = decl_class_context (decl);
26311 if (origin != NULL_TREE)
26312 gen_type_die (origin, context_die);
26313
26314 /* And its return type. */
26315 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26316
26317 /* And its virtual context. */
26318 if (DECL_VINDEX (decl) != NULL_TREE)
26319 gen_type_die (DECL_CONTEXT (decl), context_die);
26320
26321 /* Make sure we have a member DIE for decl. */
26322 if (origin != NULL_TREE)
26323 gen_type_die_for_member (origin, decl, context_die);
26324
26325 /* And its containing namespace. */
26326 context_die = declare_in_namespace (decl, context_die);
26327 }
26328
26329 /* Now output a DIE to represent the function itself. */
26330 if (decl)
26331 gen_subprogram_die (decl, context_die);
26332 break;
26333
26334 case TYPE_DECL:
26335 /* If we are in terse mode, don't generate any DIEs to represent any
26336 actual typedefs. */
26337 if (debug_info_level <= DINFO_LEVEL_TERSE)
26338 break;
26339
26340 /* In the special case of a TYPE_DECL node representing the declaration
26341 of some type tag, if the given TYPE_DECL is marked as having been
26342 instantiated from some other (original) TYPE_DECL node (e.g. one which
26343 was generated within the original definition of an inline function) we
26344 used to generate a special (abbreviated) DW_TAG_structure_type,
26345 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26346 should be actually referencing those DIEs, as variable DIEs with that
26347 type would be emitted already in the abstract origin, so it was always
26348 removed during unused type prunning. Don't add anything in this
26349 case. */
26350 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26351 break;
26352
26353 if (is_redundant_typedef (decl))
26354 gen_type_die (TREE_TYPE (decl), context_die);
26355 else
26356 /* Output a DIE to represent the typedef itself. */
26357 gen_typedef_die (decl, context_die);
26358 break;
26359
26360 case LABEL_DECL:
26361 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26362 gen_label_die (decl, context_die);
26363 break;
26364
26365 case VAR_DECL:
26366 case RESULT_DECL:
26367 /* If we are in terse mode, don't generate any DIEs to represent any
26368 variable declarations or definitions. */
26369 if (debug_info_level <= DINFO_LEVEL_TERSE)
26370 break;
26371
26372 /* Avoid generating stray type DIEs during late dwarf dumping.
26373 All types have been dumped early. */
26374 if (early_dwarf
26375 /* ??? But in LTRANS we cannot annotate early created variably
26376 modified type DIEs without copying them and adjusting all
26377 references to them. Dump them again as happens for inlining
26378 which copies both the decl and the types. */
26379 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26380 in VLA bound information for example. */
26381 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26382 current_function_decl)))
26383 {
26384 /* Output any DIEs that are needed to specify the type of this data
26385 object. */
26386 if (decl_by_reference_p (decl_or_origin))
26387 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26388 else
26389 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26390 }
26391
26392 if (early_dwarf)
26393 {
26394 /* And its containing type. */
26395 class_origin = decl_class_context (decl_or_origin);
26396 if (class_origin != NULL_TREE)
26397 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26398
26399 /* And its containing namespace. */
26400 context_die = declare_in_namespace (decl_or_origin, context_die);
26401 }
26402
26403 /* Now output the DIE to represent the data object itself. This gets
26404 complicated because of the possibility that the VAR_DECL really
26405 represents an inlined instance of a formal parameter for an inline
26406 function. */
26407 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26408 if (ultimate_origin != NULL_TREE
26409 && TREE_CODE (ultimate_origin) == PARM_DECL)
26410 gen_formal_parameter_die (decl, origin,
26411 true /* Emit name attribute. */,
26412 context_die);
26413 else
26414 gen_variable_die (decl, origin, context_die);
26415 break;
26416
26417 case FIELD_DECL:
26418 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26419 /* Ignore the nameless fields that are used to skip bits but handle C++
26420 anonymous unions and structs. */
26421 if (DECL_NAME (decl) != NULL_TREE
26422 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26423 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26424 {
26425 gen_type_die (member_declared_type (decl), context_die);
26426 gen_field_die (decl, ctx, context_die);
26427 }
26428 break;
26429
26430 case PARM_DECL:
26431 /* Avoid generating stray type DIEs during late dwarf dumping.
26432 All types have been dumped early. */
26433 if (early_dwarf
26434 /* ??? But in LTRANS we cannot annotate early created variably
26435 modified type DIEs without copying them and adjusting all
26436 references to them. Dump them again as happens for inlining
26437 which copies both the decl and the types. */
26438 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26439 in VLA bound information for example. */
26440 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26441 current_function_decl)))
26442 {
26443 if (DECL_BY_REFERENCE (decl_or_origin))
26444 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26445 else
26446 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26447 }
26448 return gen_formal_parameter_die (decl, origin,
26449 true /* Emit name attribute. */,
26450 context_die);
26451
26452 case NAMESPACE_DECL:
26453 if (dwarf_version >= 3 || !dwarf_strict)
26454 gen_namespace_die (decl, context_die);
26455 break;
26456
26457 case IMPORTED_DECL:
26458 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26459 DECL_CONTEXT (decl), context_die);
26460 break;
26461
26462 case NAMELIST_DECL:
26463 gen_namelist_decl (DECL_NAME (decl), context_die,
26464 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26465 break;
26466
26467 default:
26468 /* Probably some frontend-internal decl. Assume we don't care. */
26469 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26470 break;
26471 }
26472
26473 return NULL;
26474 }
26475
26476 /* Output initial debug information for global DECL. Called at the
26477 end of the parsing process.
26478
26479 This is the initial debug generation process. As such, the DIEs
26480 generated may be incomplete. A later debug generation pass
26481 (dwarf2out_late_global_decl) will augment the information generated
26482 in this pass (e.g., with complete location info). */
26483
26484 static void
dwarf2out_early_global_decl(tree decl)26485 dwarf2out_early_global_decl (tree decl)
26486 {
26487 set_early_dwarf s;
26488
26489 /* gen_decl_die() will set DECL_ABSTRACT because
26490 cgraph_function_possibly_inlined_p() returns true. This is in
26491 turn will cause DW_AT_inline attributes to be set.
26492
26493 This happens because at early dwarf generation, there is no
26494 cgraph information, causing cgraph_function_possibly_inlined_p()
26495 to return true. Trick cgraph_function_possibly_inlined_p()
26496 while we generate dwarf early. */
26497 bool save = symtab->global_info_ready;
26498 symtab->global_info_ready = true;
26499
26500 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26501 other DECLs and they can point to template types or other things
26502 that dwarf2out can't handle when done via dwarf2out_decl. */
26503 if (TREE_CODE (decl) != TYPE_DECL
26504 && TREE_CODE (decl) != PARM_DECL)
26505 {
26506 if (TREE_CODE (decl) == FUNCTION_DECL)
26507 {
26508 tree save_fndecl = current_function_decl;
26509
26510 /* For nested functions, make sure we have DIEs for the parents first
26511 so that all nested DIEs are generated at the proper scope in the
26512 first shot. */
26513 tree context = decl_function_context (decl);
26514 if (context != NULL)
26515 {
26516 dw_die_ref context_die = lookup_decl_die (context);
26517 current_function_decl = context;
26518
26519 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26520 enough so that it lands in its own context. This avoids type
26521 pruning issues later on. */
26522 if (context_die == NULL || is_declaration_die (context_die))
26523 dwarf2out_early_global_decl (context);
26524 }
26525
26526 /* Emit an abstract origin of a function first. This happens
26527 with C++ constructor clones for example and makes
26528 dwarf2out_abstract_function happy which requires the early
26529 DIE of the abstract instance to be present. */
26530 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26531 dw_die_ref origin_die;
26532 if (origin != NULL
26533 /* Do not emit the DIE multiple times but make sure to
26534 process it fully here in case we just saw a declaration. */
26535 && ((origin_die = lookup_decl_die (origin)) == NULL
26536 || is_declaration_die (origin_die)))
26537 {
26538 current_function_decl = origin;
26539 dwarf2out_decl (origin);
26540 }
26541
26542 /* Emit the DIE for decl but avoid doing that multiple times. */
26543 dw_die_ref old_die;
26544 if ((old_die = lookup_decl_die (decl)) == NULL
26545 || is_declaration_die (old_die))
26546 {
26547 current_function_decl = decl;
26548 dwarf2out_decl (decl);
26549 }
26550
26551 current_function_decl = save_fndecl;
26552 }
26553 else
26554 dwarf2out_decl (decl);
26555 }
26556 symtab->global_info_ready = save;
26557 }
26558
26559 /* Return whether EXPR is an expression with the following pattern:
26560 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26561
26562 static bool
is_trivial_indirect_ref(tree expr)26563 is_trivial_indirect_ref (tree expr)
26564 {
26565 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26566 return false;
26567
26568 tree nop = TREE_OPERAND (expr, 0);
26569 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26570 return false;
26571
26572 tree int_cst = TREE_OPERAND (nop, 0);
26573 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26574 }
26575
26576 /* Output debug information for global decl DECL. Called from
26577 toplev.c after compilation proper has finished. */
26578
26579 static void
dwarf2out_late_global_decl(tree decl)26580 dwarf2out_late_global_decl (tree decl)
26581 {
26582 /* Fill-in any location information we were unable to determine
26583 on the first pass. */
26584 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26585 {
26586 dw_die_ref die = lookup_decl_die (decl);
26587
26588 /* We may have to generate early debug late for LTO in case debug
26589 was not enabled at compile-time or the target doesn't support
26590 the LTO early debug scheme. */
26591 if (! die && in_lto_p)
26592 {
26593 dwarf2out_decl (decl);
26594 die = lookup_decl_die (decl);
26595 }
26596
26597 if (die)
26598 {
26599 /* We get called via the symtab code invoking late_global_decl
26600 for symbols that are optimized out.
26601
26602 Do not add locations for those, except if they have a
26603 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26604 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26605 INDIRECT_REF expression, as this could generate relocations to
26606 text symbols in LTO object files, which is invalid. */
26607 varpool_node *node = varpool_node::get (decl);
26608 if ((! node || ! node->definition)
26609 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26610 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26611 tree_add_const_value_attribute_for_decl (die, decl);
26612 else
26613 add_location_or_const_value_attribute (die, decl, false);
26614 }
26615 }
26616 }
26617
26618 /* Output debug information for type decl DECL. Called from toplev.c
26619 and from language front ends (to record built-in types). */
26620 static void
dwarf2out_type_decl(tree decl,int local)26621 dwarf2out_type_decl (tree decl, int local)
26622 {
26623 if (!local)
26624 {
26625 set_early_dwarf s;
26626 dwarf2out_decl (decl);
26627 }
26628 }
26629
26630 /* Output debug information for imported module or decl DECL.
26631 NAME is non-NULL name in the lexical block if the decl has been renamed.
26632 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26633 that DECL belongs to.
26634 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26635 static void
dwarf2out_imported_module_or_decl_1(tree decl,tree name,tree lexical_block,dw_die_ref lexical_block_die)26636 dwarf2out_imported_module_or_decl_1 (tree decl,
26637 tree name,
26638 tree lexical_block,
26639 dw_die_ref lexical_block_die)
26640 {
26641 expanded_location xloc;
26642 dw_die_ref imported_die = NULL;
26643 dw_die_ref at_import_die;
26644
26645 if (TREE_CODE (decl) == IMPORTED_DECL)
26646 {
26647 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26648 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26649 gcc_assert (decl);
26650 }
26651 else
26652 xloc = expand_location (input_location);
26653
26654 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26655 {
26656 at_import_die = force_type_die (TREE_TYPE (decl));
26657 /* For namespace N { typedef void T; } using N::T; base_type_die
26658 returns NULL, but DW_TAG_imported_declaration requires
26659 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26660 if (!at_import_die)
26661 {
26662 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26663 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26664 at_import_die = lookup_type_die (TREE_TYPE (decl));
26665 gcc_assert (at_import_die);
26666 }
26667 }
26668 else
26669 {
26670 at_import_die = lookup_decl_die (decl);
26671 if (!at_import_die)
26672 {
26673 /* If we're trying to avoid duplicate debug info, we may not have
26674 emitted the member decl for this field. Emit it now. */
26675 if (TREE_CODE (decl) == FIELD_DECL)
26676 {
26677 tree type = DECL_CONTEXT (decl);
26678
26679 if (TYPE_CONTEXT (type)
26680 && TYPE_P (TYPE_CONTEXT (type))
26681 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26682 DINFO_USAGE_DIR_USE))
26683 return;
26684 gen_type_die_for_member (type, decl,
26685 get_context_die (TYPE_CONTEXT (type)));
26686 }
26687 if (TREE_CODE (decl) == NAMELIST_DECL)
26688 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26689 get_context_die (DECL_CONTEXT (decl)),
26690 NULL_TREE);
26691 else
26692 at_import_die = force_decl_die (decl);
26693 }
26694 }
26695
26696 if (TREE_CODE (decl) == NAMESPACE_DECL)
26697 {
26698 if (dwarf_version >= 3 || !dwarf_strict)
26699 imported_die = new_die (DW_TAG_imported_module,
26700 lexical_block_die,
26701 lexical_block);
26702 else
26703 return;
26704 }
26705 else
26706 imported_die = new_die (DW_TAG_imported_declaration,
26707 lexical_block_die,
26708 lexical_block);
26709
26710 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26711 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26712 if (debug_column_info && xloc.column)
26713 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26714 if (name)
26715 add_AT_string (imported_die, DW_AT_name,
26716 IDENTIFIER_POINTER (name));
26717 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26718 }
26719
26720 /* Output debug information for imported module or decl DECL.
26721 NAME is non-NULL name in context if the decl has been renamed.
26722 CHILD is true if decl is one of the renamed decls as part of
26723 importing whole module.
26724 IMPLICIT is set if this hook is called for an implicit import
26725 such as inline namespace. */
26726
26727 static void
dwarf2out_imported_module_or_decl(tree decl,tree name,tree context,bool child,bool implicit)26728 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26729 bool child, bool implicit)
26730 {
26731 /* dw_die_ref at_import_die; */
26732 dw_die_ref scope_die;
26733
26734 if (debug_info_level <= DINFO_LEVEL_TERSE)
26735 return;
26736
26737 gcc_assert (decl);
26738
26739 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26740 should be enough, for DWARF4 and older even if we emit as extension
26741 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26742 for the benefit of consumers unaware of DW_AT_export_symbols. */
26743 if (implicit
26744 && dwarf_version >= 5
26745 && lang_hooks.decls.decl_dwarf_attribute (decl,
26746 DW_AT_export_symbols) == 1)
26747 return;
26748
26749 set_early_dwarf s;
26750
26751 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26752 We need decl DIE for reference and scope die. First, get DIE for the decl
26753 itself. */
26754
26755 /* Get the scope die for decl context. Use comp_unit_die for global module
26756 or decl. If die is not found for non globals, force new die. */
26757 if (context
26758 && TYPE_P (context)
26759 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26760 return;
26761
26762 scope_die = get_context_die (context);
26763
26764 if (child)
26765 {
26766 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26767 there is nothing we can do, here. */
26768 if (dwarf_version < 3 && dwarf_strict)
26769 return;
26770
26771 gcc_assert (scope_die->die_child);
26772 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26773 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26774 scope_die = scope_die->die_child;
26775 }
26776
26777 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26778 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26779 }
26780
26781 /* Output debug information for namelists. */
26782
26783 static dw_die_ref
gen_namelist_decl(tree name,dw_die_ref scope_die,tree item_decls)26784 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26785 {
26786 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26787 tree value;
26788 unsigned i;
26789
26790 if (debug_info_level <= DINFO_LEVEL_TERSE)
26791 return NULL;
26792
26793 gcc_assert (scope_die != NULL);
26794 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26795 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26796
26797 /* If there are no item_decls, we have a nondefining namelist, e.g.
26798 with USE association; hence, set DW_AT_declaration. */
26799 if (item_decls == NULL_TREE)
26800 {
26801 add_AT_flag (nml_die, DW_AT_declaration, 1);
26802 return nml_die;
26803 }
26804
26805 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26806 {
26807 nml_item_ref_die = lookup_decl_die (value);
26808 if (!nml_item_ref_die)
26809 nml_item_ref_die = force_decl_die (value);
26810
26811 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26812 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26813 }
26814 return nml_die;
26815 }
26816
26817
26818 /* Write the debugging output for DECL and return the DIE. */
26819
26820 static void
dwarf2out_decl(tree decl)26821 dwarf2out_decl (tree decl)
26822 {
26823 dw_die_ref context_die = comp_unit_die ();
26824
26825 switch (TREE_CODE (decl))
26826 {
26827 case ERROR_MARK:
26828 return;
26829
26830 case FUNCTION_DECL:
26831 /* If we're a nested function, initially use a parent of NULL; if we're
26832 a plain function, this will be fixed up in decls_for_scope. If
26833 we're a method, it will be ignored, since we already have a DIE. */
26834 if (decl_function_context (decl)
26835 /* But if we're in terse mode, we don't care about scope. */
26836 && debug_info_level > DINFO_LEVEL_TERSE)
26837 context_die = NULL;
26838 break;
26839
26840 case VAR_DECL:
26841 /* For local statics lookup proper context die. */
26842 if (local_function_static (decl))
26843 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26844
26845 /* If we are in terse mode, don't generate any DIEs to represent any
26846 variable declarations or definitions. */
26847 if (debug_info_level <= DINFO_LEVEL_TERSE)
26848 return;
26849 break;
26850
26851 case CONST_DECL:
26852 if (debug_info_level <= DINFO_LEVEL_TERSE)
26853 return;
26854 if (!is_fortran () && !is_ada ())
26855 return;
26856 if (TREE_STATIC (decl) && decl_function_context (decl))
26857 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26858 break;
26859
26860 case NAMESPACE_DECL:
26861 case IMPORTED_DECL:
26862 if (debug_info_level <= DINFO_LEVEL_TERSE)
26863 return;
26864 if (lookup_decl_die (decl) != NULL)
26865 return;
26866 break;
26867
26868 case TYPE_DECL:
26869 /* Don't emit stubs for types unless they are needed by other DIEs. */
26870 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26871 return;
26872
26873 /* Don't bother trying to generate any DIEs to represent any of the
26874 normal built-in types for the language we are compiling. */
26875 if (DECL_IS_BUILTIN (decl))
26876 return;
26877
26878 /* If we are in terse mode, don't generate any DIEs for types. */
26879 if (debug_info_level <= DINFO_LEVEL_TERSE)
26880 return;
26881
26882 /* If we're a function-scope tag, initially use a parent of NULL;
26883 this will be fixed up in decls_for_scope. */
26884 if (decl_function_context (decl))
26885 context_die = NULL;
26886
26887 break;
26888
26889 case NAMELIST_DECL:
26890 break;
26891
26892 default:
26893 return;
26894 }
26895
26896 gen_decl_die (decl, NULL, NULL, context_die);
26897
26898 if (flag_checking)
26899 {
26900 dw_die_ref die = lookup_decl_die (decl);
26901 if (die)
26902 check_die (die);
26903 }
26904 }
26905
26906 /* Write the debugging output for DECL. */
26907
26908 static void
dwarf2out_function_decl(tree decl)26909 dwarf2out_function_decl (tree decl)
26910 {
26911 dwarf2out_decl (decl);
26912 call_arg_locations = NULL;
26913 call_arg_loc_last = NULL;
26914 call_site_count = -1;
26915 tail_call_site_count = -1;
26916 decl_loc_table->empty ();
26917 cached_dw_loc_list_table->empty ();
26918 }
26919
26920 /* Output a marker (i.e. a label) for the beginning of the generated code for
26921 a lexical block. */
26922
26923 static void
dwarf2out_begin_block(unsigned int line ATTRIBUTE_UNUSED,unsigned int blocknum)26924 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26925 unsigned int blocknum)
26926 {
26927 switch_to_section (current_function_section ());
26928 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26929 }
26930
26931 /* Output a marker (i.e. a label) for the end of the generated code for a
26932 lexical block. */
26933
26934 static void
dwarf2out_end_block(unsigned int line ATTRIBUTE_UNUSED,unsigned int blocknum)26935 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26936 {
26937 switch_to_section (current_function_section ());
26938 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26939 }
26940
26941 /* Returns nonzero if it is appropriate not to emit any debugging
26942 information for BLOCK, because it doesn't contain any instructions.
26943
26944 Don't allow this for blocks with nested functions or local classes
26945 as we would end up with orphans, and in the presence of scheduling
26946 we may end up calling them anyway. */
26947
26948 static bool
dwarf2out_ignore_block(const_tree block)26949 dwarf2out_ignore_block (const_tree block)
26950 {
26951 tree decl;
26952 unsigned int i;
26953
26954 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26955 if (TREE_CODE (decl) == FUNCTION_DECL
26956 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26957 return 0;
26958 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26959 {
26960 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26961 if (TREE_CODE (decl) == FUNCTION_DECL
26962 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26963 return 0;
26964 }
26965
26966 return 1;
26967 }
26968
26969 /* Hash table routines for file_hash. */
26970
26971 bool
equal(dwarf_file_data * p1,const char * p2)26972 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26973 {
26974 return filename_cmp (p1->filename, p2) == 0;
26975 }
26976
26977 hashval_t
hash(dwarf_file_data * p)26978 dwarf_file_hasher::hash (dwarf_file_data *p)
26979 {
26980 return htab_hash_string (p->filename);
26981 }
26982
26983 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26984 dwarf2out.c) and return its "index". The index of each (known) filename is
26985 just a unique number which is associated with only that one filename. We
26986 need such numbers for the sake of generating labels (in the .debug_sfnames
26987 section) and references to those files numbers (in the .debug_srcinfo
26988 and .debug_macinfo sections). If the filename given as an argument is not
26989 found in our current list, add it to the list and assign it the next
26990 available unique index number. */
26991
26992 static struct dwarf_file_data *
lookup_filename(const char * file_name)26993 lookup_filename (const char *file_name)
26994 {
26995 struct dwarf_file_data * created;
26996
26997 if (!file_name)
26998 return NULL;
26999
27000 dwarf_file_data **slot
27001 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27002 INSERT);
27003 if (*slot)
27004 return *slot;
27005
27006 created = ggc_alloc<dwarf_file_data> ();
27007 created->filename = file_name;
27008 created->emitted_number = 0;
27009 *slot = created;
27010 return created;
27011 }
27012
27013 /* If the assembler will construct the file table, then translate the compiler
27014 internal file table number into the assembler file table number, and emit
27015 a .file directive if we haven't already emitted one yet. The file table
27016 numbers are different because we prune debug info for unused variables and
27017 types, which may include filenames. */
27018
27019 static int
maybe_emit_file(struct dwarf_file_data * fd)27020 maybe_emit_file (struct dwarf_file_data * fd)
27021 {
27022 if (! fd->emitted_number)
27023 {
27024 if (last_emitted_file)
27025 fd->emitted_number = last_emitted_file->emitted_number + 1;
27026 else
27027 fd->emitted_number = 1;
27028 last_emitted_file = fd;
27029
27030 if (output_asm_line_debug_info ())
27031 {
27032 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27033 output_quoted_string (asm_out_file,
27034 remap_debug_filename (fd->filename));
27035 fputc ('\n', asm_out_file);
27036 }
27037 }
27038
27039 return fd->emitted_number;
27040 }
27041
27042 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27043 That generation should happen after function debug info has been
27044 generated. The value of the attribute is the constant value of ARG. */
27045
27046 static void
append_entry_to_tmpl_value_parm_die_table(dw_die_ref die,tree arg)27047 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27048 {
27049 die_arg_entry entry;
27050
27051 if (!die || !arg)
27052 return;
27053
27054 gcc_assert (early_dwarf);
27055
27056 if (!tmpl_value_parm_die_table)
27057 vec_alloc (tmpl_value_parm_die_table, 32);
27058
27059 entry.die = die;
27060 entry.arg = arg;
27061 vec_safe_push (tmpl_value_parm_die_table, entry);
27062 }
27063
27064 /* Return TRUE if T is an instance of generic type, FALSE
27065 otherwise. */
27066
27067 static bool
generic_type_p(tree t)27068 generic_type_p (tree t)
27069 {
27070 if (t == NULL_TREE || !TYPE_P (t))
27071 return false;
27072 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27073 }
27074
27075 /* Schedule the generation of the generic parameter dies for the
27076 instance of generic type T. The proper generation itself is later
27077 done by gen_scheduled_generic_parms_dies. */
27078
27079 static void
schedule_generic_params_dies_gen(tree t)27080 schedule_generic_params_dies_gen (tree t)
27081 {
27082 if (!generic_type_p (t))
27083 return;
27084
27085 gcc_assert (early_dwarf);
27086
27087 if (!generic_type_instances)
27088 vec_alloc (generic_type_instances, 256);
27089
27090 vec_safe_push (generic_type_instances, t);
27091 }
27092
27093 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27094 by append_entry_to_tmpl_value_parm_die_table. This function must
27095 be called after function DIEs have been generated. */
27096
27097 static void
gen_remaining_tmpl_value_param_die_attribute(void)27098 gen_remaining_tmpl_value_param_die_attribute (void)
27099 {
27100 if (tmpl_value_parm_die_table)
27101 {
27102 unsigned i, j;
27103 die_arg_entry *e;
27104
27105 /* We do this in two phases - first get the cases we can
27106 handle during early-finish, preserving those we cannot
27107 (containing symbolic constants where we don't yet know
27108 whether we are going to output the referenced symbols).
27109 For those we try again at late-finish. */
27110 j = 0;
27111 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27112 {
27113 if (!e->die->removed
27114 && !tree_add_const_value_attribute (e->die, e->arg))
27115 {
27116 dw_loc_descr_ref loc = NULL;
27117 if (! early_dwarf
27118 && (dwarf_version >= 5 || !dwarf_strict))
27119 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27120 if (loc)
27121 add_AT_loc (e->die, DW_AT_location, loc);
27122 else
27123 (*tmpl_value_parm_die_table)[j++] = *e;
27124 }
27125 }
27126 tmpl_value_parm_die_table->truncate (j);
27127 }
27128 }
27129
27130 /* Generate generic parameters DIEs for instances of generic types
27131 that have been previously scheduled by
27132 schedule_generic_params_dies_gen. This function must be called
27133 after all the types of the CU have been laid out. */
27134
27135 static void
gen_scheduled_generic_parms_dies(void)27136 gen_scheduled_generic_parms_dies (void)
27137 {
27138 unsigned i;
27139 tree t;
27140
27141 if (!generic_type_instances)
27142 return;
27143
27144 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27145 if (COMPLETE_TYPE_P (t))
27146 gen_generic_params_dies (t);
27147
27148 generic_type_instances = NULL;
27149 }
27150
27151
27152 /* Replace DW_AT_name for the decl with name. */
27153
27154 static void
dwarf2out_set_name(tree decl,tree name)27155 dwarf2out_set_name (tree decl, tree name)
27156 {
27157 dw_die_ref die;
27158 dw_attr_node *attr;
27159 const char *dname;
27160
27161 die = TYPE_SYMTAB_DIE (decl);
27162 if (!die)
27163 return;
27164
27165 dname = dwarf2_name (name, 0);
27166 if (!dname)
27167 return;
27168
27169 attr = get_AT (die, DW_AT_name);
27170 if (attr)
27171 {
27172 struct indirect_string_node *node;
27173
27174 node = find_AT_string (dname);
27175 /* replace the string. */
27176 attr->dw_attr_val.v.val_str = node;
27177 }
27178
27179 else
27180 add_name_attribute (die, dname);
27181 }
27182
27183 /* True if before or during processing of the first function being emitted. */
27184 static bool in_first_function_p = true;
27185 /* True if loc_note during dwarf2out_var_location call might still be
27186 before first real instruction at address equal to .Ltext0. */
27187 static bool maybe_at_text_label_p = true;
27188 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27189 static unsigned int first_loclabel_num_not_at_text_label;
27190
27191 /* Look ahead for a real insn, or for a begin stmt marker. */
27192
27193 static rtx_insn *
dwarf2out_next_real_insn(rtx_insn * loc_note)27194 dwarf2out_next_real_insn (rtx_insn *loc_note)
27195 {
27196 rtx_insn *next_real = NEXT_INSN (loc_note);
27197
27198 while (next_real)
27199 if (INSN_P (next_real))
27200 break;
27201 else
27202 next_real = NEXT_INSN (next_real);
27203
27204 return next_real;
27205 }
27206
27207 /* Called by the final INSN scan whenever we see a var location. We
27208 use it to drop labels in the right places, and throw the location in
27209 our lookup table. */
27210
27211 static void
dwarf2out_var_location(rtx_insn * loc_note)27212 dwarf2out_var_location (rtx_insn *loc_note)
27213 {
27214 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27215 struct var_loc_node *newloc;
27216 rtx_insn *next_real, *next_note;
27217 rtx_insn *call_insn = NULL;
27218 static const char *last_label;
27219 static const char *last_postcall_label;
27220 static bool last_in_cold_section_p;
27221 static rtx_insn *expected_next_loc_note;
27222 tree decl;
27223 bool var_loc_p;
27224 var_loc_view view = 0;
27225
27226 if (!NOTE_P (loc_note))
27227 {
27228 if (CALL_P (loc_note))
27229 {
27230 maybe_reset_location_view (loc_note, cur_line_info_table);
27231 call_site_count++;
27232 if (SIBLING_CALL_P (loc_note))
27233 tail_call_site_count++;
27234 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27235 {
27236 call_insn = loc_note;
27237 loc_note = NULL;
27238 var_loc_p = false;
27239
27240 next_real = dwarf2out_next_real_insn (call_insn);
27241 next_note = NULL;
27242 cached_next_real_insn = NULL;
27243 goto create_label;
27244 }
27245 if (optimize == 0 && !flag_var_tracking)
27246 {
27247 /* When the var-tracking pass is not running, there is no note
27248 for indirect calls whose target is compile-time known. In this
27249 case, process such calls specifically so that we generate call
27250 sites for them anyway. */
27251 rtx x = PATTERN (loc_note);
27252 if (GET_CODE (x) == PARALLEL)
27253 x = XVECEXP (x, 0, 0);
27254 if (GET_CODE (x) == SET)
27255 x = SET_SRC (x);
27256 if (GET_CODE (x) == CALL)
27257 x = XEXP (x, 0);
27258 if (!MEM_P (x)
27259 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27260 || !SYMBOL_REF_DECL (XEXP (x, 0))
27261 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27262 != FUNCTION_DECL))
27263 {
27264 call_insn = loc_note;
27265 loc_note = NULL;
27266 var_loc_p = false;
27267
27268 next_real = dwarf2out_next_real_insn (call_insn);
27269 next_note = NULL;
27270 cached_next_real_insn = NULL;
27271 goto create_label;
27272 }
27273 }
27274 }
27275 else if (!debug_variable_location_views)
27276 gcc_unreachable ();
27277 else
27278 maybe_reset_location_view (loc_note, cur_line_info_table);
27279
27280 return;
27281 }
27282
27283 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27284 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27285 return;
27286
27287 /* Optimize processing a large consecutive sequence of location
27288 notes so we don't spend too much time in next_real_insn. If the
27289 next insn is another location note, remember the next_real_insn
27290 calculation for next time. */
27291 next_real = cached_next_real_insn;
27292 if (next_real)
27293 {
27294 if (expected_next_loc_note != loc_note)
27295 next_real = NULL;
27296 }
27297
27298 next_note = NEXT_INSN (loc_note);
27299 if (! next_note
27300 || next_note->deleted ()
27301 || ! NOTE_P (next_note)
27302 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27303 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27304 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27305 next_note = NULL;
27306
27307 if (! next_real)
27308 next_real = dwarf2out_next_real_insn (loc_note);
27309
27310 if (next_note)
27311 {
27312 expected_next_loc_note = next_note;
27313 cached_next_real_insn = next_real;
27314 }
27315 else
27316 cached_next_real_insn = NULL;
27317
27318 /* If there are no instructions which would be affected by this note,
27319 don't do anything. */
27320 if (var_loc_p
27321 && next_real == NULL_RTX
27322 && !NOTE_DURING_CALL_P (loc_note))
27323 return;
27324
27325 create_label:
27326
27327 if (next_real == NULL_RTX)
27328 next_real = get_last_insn ();
27329
27330 /* If there were any real insns between note we processed last time
27331 and this note (or if it is the first note), clear
27332 last_{,postcall_}label so that they are not reused this time. */
27333 if (last_var_location_insn == NULL_RTX
27334 || last_var_location_insn != next_real
27335 || last_in_cold_section_p != in_cold_section_p)
27336 {
27337 last_label = NULL;
27338 last_postcall_label = NULL;
27339 }
27340
27341 if (var_loc_p)
27342 {
27343 const char *label
27344 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27345 view = cur_line_info_table->view;
27346 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27347 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27348 if (newloc == NULL)
27349 return;
27350 }
27351 else
27352 {
27353 decl = NULL_TREE;
27354 newloc = NULL;
27355 }
27356
27357 /* If there were no real insns between note we processed last time
27358 and this note, use the label we emitted last time. Otherwise
27359 create a new label and emit it. */
27360 if (last_label == NULL)
27361 {
27362 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27363 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27364 loclabel_num++;
27365 last_label = ggc_strdup (loclabel);
27366 /* See if loclabel might be equal to .Ltext0. If yes,
27367 bump first_loclabel_num_not_at_text_label. */
27368 if (!have_multiple_function_sections
27369 && in_first_function_p
27370 && maybe_at_text_label_p)
27371 {
27372 static rtx_insn *last_start;
27373 rtx_insn *insn;
27374 for (insn = loc_note; insn; insn = previous_insn (insn))
27375 if (insn == last_start)
27376 break;
27377 else if (!NONDEBUG_INSN_P (insn))
27378 continue;
27379 else
27380 {
27381 rtx body = PATTERN (insn);
27382 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27383 continue;
27384 /* Inline asm could occupy zero bytes. */
27385 else if (GET_CODE (body) == ASM_INPUT
27386 || asm_noperands (body) >= 0)
27387 continue;
27388 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27389 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27390 continue;
27391 #endif
27392 else
27393 {
27394 /* Assume insn has non-zero length. */
27395 maybe_at_text_label_p = false;
27396 break;
27397 }
27398 }
27399 if (maybe_at_text_label_p)
27400 {
27401 last_start = loc_note;
27402 first_loclabel_num_not_at_text_label = loclabel_num;
27403 }
27404 }
27405 }
27406
27407 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27408 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27409
27410 if (!var_loc_p)
27411 {
27412 struct call_arg_loc_node *ca_loc
27413 = ggc_cleared_alloc<call_arg_loc_node> ();
27414 rtx_insn *prev = call_insn;
27415
27416 ca_loc->call_arg_loc_note
27417 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27418 ca_loc->next = NULL;
27419 ca_loc->label = last_label;
27420 gcc_assert (prev
27421 && (CALL_P (prev)
27422 || (NONJUMP_INSN_P (prev)
27423 && GET_CODE (PATTERN (prev)) == SEQUENCE
27424 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27425 if (!CALL_P (prev))
27426 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27427 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27428
27429 /* Look for a SYMBOL_REF in the "prev" instruction. */
27430 rtx x = get_call_rtx_from (PATTERN (prev));
27431 if (x)
27432 {
27433 /* Try to get the call symbol, if any. */
27434 if (MEM_P (XEXP (x, 0)))
27435 x = XEXP (x, 0);
27436 /* First, look for a memory access to a symbol_ref. */
27437 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27438 && SYMBOL_REF_DECL (XEXP (x, 0))
27439 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27440 ca_loc->symbol_ref = XEXP (x, 0);
27441 /* Otherwise, look at a compile-time known user-level function
27442 declaration. */
27443 else if (MEM_P (x)
27444 && MEM_EXPR (x)
27445 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27446 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27447 }
27448
27449 ca_loc->block = insn_scope (prev);
27450 if (call_arg_locations)
27451 call_arg_loc_last->next = ca_loc;
27452 else
27453 call_arg_locations = ca_loc;
27454 call_arg_loc_last = ca_loc;
27455 }
27456 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27457 {
27458 newloc->label = last_label;
27459 newloc->view = view;
27460 }
27461 else
27462 {
27463 if (!last_postcall_label)
27464 {
27465 sprintf (loclabel, "%s-1", last_label);
27466 last_postcall_label = ggc_strdup (loclabel);
27467 }
27468 newloc->label = last_postcall_label;
27469 /* ??? This view is at last_label, not last_label-1, but we
27470 could only assume view at last_label-1 is zero if we could
27471 assume calls always have length greater than one. This is
27472 probably true in general, though there might be a rare
27473 exception to this rule, e.g. if a call insn is optimized out
27474 by target magic. Then, even the -1 in the label will be
27475 wrong, which might invalidate the range. Anyway, using view,
27476 though technically possibly incorrect, will work as far as
27477 ranges go: since L-1 is in the middle of the call insn,
27478 (L-1).0 and (L-1).V shouldn't make any difference, and having
27479 the loclist entry refer to the .loc entry might be useful, so
27480 leave it like this. */
27481 newloc->view = view;
27482 }
27483
27484 if (var_loc_p && flag_debug_asm)
27485 {
27486 const char *name, *sep, *patstr;
27487 if (decl && DECL_NAME (decl))
27488 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27489 else
27490 name = "";
27491 if (NOTE_VAR_LOCATION_LOC (loc_note))
27492 {
27493 sep = " => ";
27494 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27495 }
27496 else
27497 {
27498 sep = " ";
27499 patstr = "RESET";
27500 }
27501 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27502 name, sep, patstr);
27503 }
27504
27505 last_var_location_insn = next_real;
27506 last_in_cold_section_p = in_cold_section_p;
27507 }
27508
27509 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27510 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27511 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27512 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27513 BLOCK_FRAGMENT_ORIGIN links. */
27514 static bool
block_within_block_p(tree block,tree outer,bool bothways)27515 block_within_block_p (tree block, tree outer, bool bothways)
27516 {
27517 if (block == outer)
27518 return true;
27519
27520 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27521 for (tree context = BLOCK_SUPERCONTEXT (block);
27522 context != outer;
27523 context = BLOCK_SUPERCONTEXT (context))
27524 if (!context || TREE_CODE (context) != BLOCK)
27525 return false;
27526
27527 if (!bothways)
27528 return true;
27529
27530 /* Now check that each block is actually referenced by its
27531 parent. */
27532 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27533 context = BLOCK_SUPERCONTEXT (context))
27534 {
27535 if (BLOCK_FRAGMENT_ORIGIN (context))
27536 {
27537 gcc_assert (!BLOCK_SUBBLOCKS (context));
27538 context = BLOCK_FRAGMENT_ORIGIN (context);
27539 }
27540 for (tree sub = BLOCK_SUBBLOCKS (context);
27541 sub != block;
27542 sub = BLOCK_CHAIN (sub))
27543 if (!sub)
27544 return false;
27545 if (context == outer)
27546 return true;
27547 else
27548 block = context;
27549 }
27550 }
27551
27552 /* Called during final while assembling the marker of the entry point
27553 for an inlined function. */
27554
27555 static void
dwarf2out_inline_entry(tree block)27556 dwarf2out_inline_entry (tree block)
27557 {
27558 gcc_assert (debug_inline_points);
27559
27560 /* If we can't represent it, don't bother. */
27561 if (!(dwarf_version >= 3 || !dwarf_strict))
27562 return;
27563
27564 gcc_assert (DECL_P (block_ultimate_origin (block)));
27565
27566 /* Sanity check the block tree. This would catch a case in which
27567 BLOCK got removed from the tree reachable from the outermost
27568 lexical block, but got retained in markers. It would still link
27569 back to its parents, but some ancestor would be missing a link
27570 down the path to the sub BLOCK. If the block got removed, its
27571 BLOCK_NUMBER will not be a usable value. */
27572 if (flag_checking)
27573 gcc_assert (block_within_block_p (block,
27574 DECL_INITIAL (current_function_decl),
27575 true));
27576
27577 gcc_assert (inlined_function_outer_scope_p (block));
27578 gcc_assert (!BLOCK_DIE (block));
27579
27580 if (BLOCK_FRAGMENT_ORIGIN (block))
27581 block = BLOCK_FRAGMENT_ORIGIN (block);
27582 /* Can the entry point ever not be at the beginning of an
27583 unfragmented lexical block? */
27584 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27585 || (cur_line_info_table
27586 && !ZERO_VIEW_P (cur_line_info_table->view))))
27587 return;
27588
27589 if (!inline_entry_data_table)
27590 inline_entry_data_table
27591 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27592
27593
27594 inline_entry_data **iedp
27595 = inline_entry_data_table->find_slot_with_hash (block,
27596 htab_hash_pointer (block),
27597 INSERT);
27598 if (*iedp)
27599 /* ??? Ideally, we'd record all entry points for the same inlined
27600 function (some may have been duplicated by e.g. unrolling), but
27601 we have no way to represent that ATM. */
27602 return;
27603
27604 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27605 ied->block = block;
27606 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27607 ied->label_num = BLOCK_NUMBER (block);
27608 if (cur_line_info_table)
27609 ied->view = cur_line_info_table->view;
27610
27611 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27612
27613 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27614 BLOCK_NUMBER (block));
27615 ASM_OUTPUT_LABEL (asm_out_file, label);
27616 }
27617
27618 /* Called from finalize_size_functions for size functions so that their body
27619 can be encoded in the debug info to describe the layout of variable-length
27620 structures. */
27621
27622 static void
dwarf2out_size_function(tree decl)27623 dwarf2out_size_function (tree decl)
27624 {
27625 function_to_dwarf_procedure (decl);
27626 }
27627
27628 /* Note in one location list that text section has changed. */
27629
27630 int
var_location_switch_text_section_1(var_loc_list ** slot,void *)27631 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27632 {
27633 var_loc_list *list = *slot;
27634 if (list->first)
27635 list->last_before_switch
27636 = list->last->next ? list->last->next : list->last;
27637 return 1;
27638 }
27639
27640 /* Note in all location lists that text section has changed. */
27641
27642 static void
var_location_switch_text_section(void)27643 var_location_switch_text_section (void)
27644 {
27645 if (decl_loc_table == NULL)
27646 return;
27647
27648 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27649 }
27650
27651 /* Create a new line number table. */
27652
27653 static dw_line_info_table *
new_line_info_table(void)27654 new_line_info_table (void)
27655 {
27656 dw_line_info_table *table;
27657
27658 table = ggc_cleared_alloc<dw_line_info_table> ();
27659 table->file_num = 1;
27660 table->line_num = 1;
27661 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27662 FORCE_RESET_NEXT_VIEW (table->view);
27663 table->symviews_since_reset = 0;
27664
27665 return table;
27666 }
27667
27668 /* Lookup the "current" table into which we emit line info, so
27669 that we don't have to do it for every source line. */
27670
27671 static void
set_cur_line_info_table(section * sec)27672 set_cur_line_info_table (section *sec)
27673 {
27674 dw_line_info_table *table;
27675
27676 if (sec == text_section)
27677 table = text_section_line_info;
27678 else if (sec == cold_text_section)
27679 {
27680 table = cold_text_section_line_info;
27681 if (!table)
27682 {
27683 cold_text_section_line_info = table = new_line_info_table ();
27684 table->end_label = cold_end_label;
27685 }
27686 }
27687 else
27688 {
27689 const char *end_label;
27690
27691 if (crtl->has_bb_partition)
27692 {
27693 if (in_cold_section_p)
27694 end_label = crtl->subsections.cold_section_end_label;
27695 else
27696 end_label = crtl->subsections.hot_section_end_label;
27697 }
27698 else
27699 {
27700 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27701 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27702 current_function_funcdef_no);
27703 end_label = ggc_strdup (label);
27704 }
27705
27706 table = new_line_info_table ();
27707 table->end_label = end_label;
27708
27709 vec_safe_push (separate_line_info, table);
27710 }
27711
27712 if (output_asm_line_debug_info ())
27713 table->is_stmt = (cur_line_info_table
27714 ? cur_line_info_table->is_stmt
27715 : DWARF_LINE_DEFAULT_IS_STMT_START);
27716 cur_line_info_table = table;
27717 }
27718
27719
27720 /* We need to reset the locations at the beginning of each
27721 function. We can't do this in the end_function hook, because the
27722 declarations that use the locations won't have been output when
27723 that hook is called. Also compute have_multiple_function_sections here. */
27724
27725 static void
dwarf2out_begin_function(tree fun)27726 dwarf2out_begin_function (tree fun)
27727 {
27728 section *sec = function_section (fun);
27729
27730 if (sec != text_section)
27731 have_multiple_function_sections = true;
27732
27733 if (crtl->has_bb_partition && !cold_text_section)
27734 {
27735 gcc_assert (current_function_decl == fun);
27736 cold_text_section = unlikely_text_section ();
27737 switch_to_section (cold_text_section);
27738 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27739 switch_to_section (sec);
27740 }
27741
27742 dwarf2out_note_section_used ();
27743 call_site_count = 0;
27744 tail_call_site_count = 0;
27745
27746 set_cur_line_info_table (sec);
27747 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27748 }
27749
27750 /* Helper function of dwarf2out_end_function, called only after emitting
27751 the very first function into assembly. Check if some .debug_loc range
27752 might end with a .LVL* label that could be equal to .Ltext0.
27753 In that case we must force using absolute addresses in .debug_loc ranges,
27754 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27755 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27756 list terminator.
27757 Set have_multiple_function_sections to true in that case and
27758 terminate htab traversal. */
27759
27760 int
find_empty_loc_ranges_at_text_label(var_loc_list ** slot,int)27761 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27762 {
27763 var_loc_list *entry = *slot;
27764 struct var_loc_node *node;
27765
27766 node = entry->first;
27767 if (node && node->next && node->next->label)
27768 {
27769 unsigned int i;
27770 const char *label = node->next->label;
27771 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27772
27773 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27774 {
27775 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27776 if (strcmp (label, loclabel) == 0)
27777 {
27778 have_multiple_function_sections = true;
27779 return 0;
27780 }
27781 }
27782 }
27783 return 1;
27784 }
27785
27786 /* Hook called after emitting a function into assembly.
27787 This does something only for the very first function emitted. */
27788
27789 static void
dwarf2out_end_function(unsigned int)27790 dwarf2out_end_function (unsigned int)
27791 {
27792 if (in_first_function_p
27793 && !have_multiple_function_sections
27794 && first_loclabel_num_not_at_text_label
27795 && decl_loc_table)
27796 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27797 in_first_function_p = false;
27798 maybe_at_text_label_p = false;
27799 }
27800
27801 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27802 front-ends register a translation unit even before dwarf2out_init is
27803 called. */
27804 static tree main_translation_unit = NULL_TREE;
27805
27806 /* Hook called by front-ends after they built their main translation unit.
27807 Associate comp_unit_die to UNIT. */
27808
27809 static void
dwarf2out_register_main_translation_unit(tree unit)27810 dwarf2out_register_main_translation_unit (tree unit)
27811 {
27812 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27813 && main_translation_unit == NULL_TREE);
27814 main_translation_unit = unit;
27815 /* If dwarf2out_init has not been called yet, it will perform the association
27816 itself looking at main_translation_unit. */
27817 if (decl_die_table != NULL)
27818 equate_decl_number_to_die (unit, comp_unit_die ());
27819 }
27820
27821 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27822
27823 static void
push_dw_line_info_entry(dw_line_info_table * table,enum dw_line_info_opcode opcode,unsigned int val)27824 push_dw_line_info_entry (dw_line_info_table *table,
27825 enum dw_line_info_opcode opcode, unsigned int val)
27826 {
27827 dw_line_info_entry e;
27828 e.opcode = opcode;
27829 e.val = val;
27830 vec_safe_push (table->entries, e);
27831 }
27832
27833 /* Output a label to mark the beginning of a source code line entry
27834 and record information relating to this source line, in
27835 'line_info_table' for later output of the .debug_line section. */
27836 /* ??? The discriminator parameter ought to be unsigned. */
27837
27838 static void
dwarf2out_source_line(unsigned int line,unsigned int column,const char * filename,int discriminator,bool is_stmt)27839 dwarf2out_source_line (unsigned int line, unsigned int column,
27840 const char *filename,
27841 int discriminator, bool is_stmt)
27842 {
27843 unsigned int file_num;
27844 dw_line_info_table *table;
27845 static var_loc_view lvugid;
27846
27847 if (debug_info_level < DINFO_LEVEL_TERSE)
27848 return;
27849
27850 table = cur_line_info_table;
27851
27852 if (line == 0)
27853 {
27854 if (debug_variable_location_views
27855 && output_asm_line_debug_info ()
27856 && table && !RESETTING_VIEW_P (table->view))
27857 {
27858 /* If we're using the assembler to compute view numbers, we
27859 can't issue a .loc directive for line zero, so we can't
27860 get a view number at this point. We might attempt to
27861 compute it from the previous view, or equate it to a
27862 subsequent view (though it might not be there!), but
27863 since we're omitting the line number entry, we might as
27864 well omit the view number as well. That means pretending
27865 it's a view number zero, which might very well turn out
27866 to be correct. ??? Extend the assembler so that the
27867 compiler could emit e.g. ".locview .LVU#", to output a
27868 view without changing line number information. We'd then
27869 have to count it in symviews_since_reset; when it's omitted,
27870 it doesn't count. */
27871 if (!zero_view_p)
27872 zero_view_p = BITMAP_GGC_ALLOC ();
27873 bitmap_set_bit (zero_view_p, table->view);
27874 if (flag_debug_asm)
27875 {
27876 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27877 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27878 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27879 ASM_COMMENT_START);
27880 assemble_name (asm_out_file, label);
27881 putc ('\n', asm_out_file);
27882 }
27883 table->view = ++lvugid;
27884 }
27885 return;
27886 }
27887
27888 /* The discriminator column was added in dwarf4. Simplify the below
27889 by simply removing it if we're not supposed to output it. */
27890 if (dwarf_version < 4 && dwarf_strict)
27891 discriminator = 0;
27892
27893 if (!debug_column_info)
27894 column = 0;
27895
27896 file_num = maybe_emit_file (lookup_filename (filename));
27897
27898 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27899 the debugger has used the second (possibly duplicate) line number
27900 at the beginning of the function to mark the end of the prologue.
27901 We could eliminate any other duplicates within the function. For
27902 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27903 that second line number entry. */
27904 /* Recall that this end-of-prologue indication is *not* the same thing
27905 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27906 to which the hook corresponds, follows the last insn that was
27907 emitted by gen_prologue. What we need is to precede the first insn
27908 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27909 insn that corresponds to something the user wrote. These may be
27910 very different locations once scheduling is enabled. */
27911
27912 if (0 && file_num == table->file_num
27913 && line == table->line_num
27914 && column == table->column_num
27915 && discriminator == table->discrim_num
27916 && is_stmt == table->is_stmt)
27917 return;
27918
27919 switch_to_section (current_function_section ());
27920
27921 /* If requested, emit something human-readable. */
27922 if (flag_debug_asm)
27923 {
27924 if (debug_column_info)
27925 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27926 filename, line, column);
27927 else
27928 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27929 filename, line);
27930 }
27931
27932 if (output_asm_line_debug_info ())
27933 {
27934 /* Emit the .loc directive understood by GNU as. */
27935 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27936 file_num, line, is_stmt, discriminator */
27937 fputs ("\t.loc ", asm_out_file);
27938 fprint_ul (asm_out_file, file_num);
27939 putc (' ', asm_out_file);
27940 fprint_ul (asm_out_file, line);
27941 putc (' ', asm_out_file);
27942 fprint_ul (asm_out_file, column);
27943
27944 if (is_stmt != table->is_stmt)
27945 {
27946 fputs (" is_stmt ", asm_out_file);
27947 putc (is_stmt ? '1' : '0', asm_out_file);
27948 }
27949 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27950 {
27951 gcc_assert (discriminator > 0);
27952 fputs (" discriminator ", asm_out_file);
27953 fprint_ul (asm_out_file, (unsigned long) discriminator);
27954 }
27955 if (debug_variable_location_views)
27956 {
27957 if (!RESETTING_VIEW_P (table->view))
27958 {
27959 table->symviews_since_reset++;
27960 if (table->symviews_since_reset > symview_upper_bound)
27961 symview_upper_bound = table->symviews_since_reset;
27962 /* When we're using the assembler to compute view
27963 numbers, we output symbolic labels after "view" in
27964 .loc directives, and the assembler will set them for
27965 us, so that we can refer to the view numbers in
27966 location lists. The only exceptions are when we know
27967 a view will be zero: "-0" is a forced reset, used
27968 e.g. in the beginning of functions, whereas "0" tells
27969 the assembler to check that there was a PC change
27970 since the previous view, in a way that implicitly
27971 resets the next view. */
27972 fputs (" view ", asm_out_file);
27973 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27974 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27975 assemble_name (asm_out_file, label);
27976 table->view = ++lvugid;
27977 }
27978 else
27979 {
27980 table->symviews_since_reset = 0;
27981 if (FORCE_RESETTING_VIEW_P (table->view))
27982 fputs (" view -0", asm_out_file);
27983 else
27984 fputs (" view 0", asm_out_file);
27985 /* Mark the present view as a zero view. Earlier debug
27986 binds may have already added its id to loclists to be
27987 emitted later, so we can't reuse the id for something
27988 else. However, it's good to know whether a view is
27989 known to be zero, because then we may be able to
27990 optimize out locviews that are all zeros, so take
27991 note of it in zero_view_p. */
27992 if (!zero_view_p)
27993 zero_view_p = BITMAP_GGC_ALLOC ();
27994 bitmap_set_bit (zero_view_p, lvugid);
27995 table->view = ++lvugid;
27996 }
27997 }
27998 putc ('\n', asm_out_file);
27999 }
28000 else
28001 {
28002 unsigned int label_num = ++line_info_label_num;
28003
28004 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28005
28006 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28007 push_dw_line_info_entry (table, LI_adv_address, label_num);
28008 else
28009 push_dw_line_info_entry (table, LI_set_address, label_num);
28010 if (debug_variable_location_views)
28011 {
28012 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28013 if (resetting)
28014 table->view = 0;
28015
28016 if (flag_debug_asm)
28017 fprintf (asm_out_file, "\t%s view %s%d\n",
28018 ASM_COMMENT_START,
28019 resetting ? "-" : "",
28020 table->view);
28021
28022 table->view++;
28023 }
28024 if (file_num != table->file_num)
28025 push_dw_line_info_entry (table, LI_set_file, file_num);
28026 if (discriminator != table->discrim_num)
28027 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28028 if (is_stmt != table->is_stmt)
28029 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28030 push_dw_line_info_entry (table, LI_set_line, line);
28031 if (debug_column_info)
28032 push_dw_line_info_entry (table, LI_set_column, column);
28033 }
28034
28035 table->file_num = file_num;
28036 table->line_num = line;
28037 table->column_num = column;
28038 table->discrim_num = discriminator;
28039 table->is_stmt = is_stmt;
28040 table->in_use = true;
28041 }
28042
28043 /* Record the beginning of a new source file. */
28044
28045 static void
dwarf2out_start_source_file(unsigned int lineno,const char * filename)28046 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28047 {
28048 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28049 {
28050 macinfo_entry e;
28051 e.code = DW_MACINFO_start_file;
28052 e.lineno = lineno;
28053 e.info = ggc_strdup (filename);
28054 vec_safe_push (macinfo_table, e);
28055 }
28056 }
28057
28058 /* Record the end of a source file. */
28059
28060 static void
dwarf2out_end_source_file(unsigned int lineno ATTRIBUTE_UNUSED)28061 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28062 {
28063 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28064 {
28065 macinfo_entry e;
28066 e.code = DW_MACINFO_end_file;
28067 e.lineno = lineno;
28068 e.info = NULL;
28069 vec_safe_push (macinfo_table, e);
28070 }
28071 }
28072
28073 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28074 the tail part of the directive line, i.e. the part which is past the
28075 initial whitespace, #, whitespace, directive-name, whitespace part. */
28076
28077 static void
dwarf2out_define(unsigned int lineno ATTRIBUTE_UNUSED,const char * buffer ATTRIBUTE_UNUSED)28078 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28079 const char *buffer ATTRIBUTE_UNUSED)
28080 {
28081 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28082 {
28083 macinfo_entry e;
28084 /* Insert a dummy first entry to be able to optimize the whole
28085 predefined macro block using DW_MACRO_import. */
28086 if (macinfo_table->is_empty () && lineno <= 1)
28087 {
28088 e.code = 0;
28089 e.lineno = 0;
28090 e.info = NULL;
28091 vec_safe_push (macinfo_table, e);
28092 }
28093 e.code = DW_MACINFO_define;
28094 e.lineno = lineno;
28095 e.info = ggc_strdup (buffer);
28096 vec_safe_push (macinfo_table, e);
28097 }
28098 }
28099
28100 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28101 the tail part of the directive line, i.e. the part which is past the
28102 initial whitespace, #, whitespace, directive-name, whitespace part. */
28103
28104 static void
dwarf2out_undef(unsigned int lineno ATTRIBUTE_UNUSED,const char * buffer ATTRIBUTE_UNUSED)28105 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28106 const char *buffer ATTRIBUTE_UNUSED)
28107 {
28108 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28109 {
28110 macinfo_entry e;
28111 /* Insert a dummy first entry to be able to optimize the whole
28112 predefined macro block using DW_MACRO_import. */
28113 if (macinfo_table->is_empty () && lineno <= 1)
28114 {
28115 e.code = 0;
28116 e.lineno = 0;
28117 e.info = NULL;
28118 vec_safe_push (macinfo_table, e);
28119 }
28120 e.code = DW_MACINFO_undef;
28121 e.lineno = lineno;
28122 e.info = ggc_strdup (buffer);
28123 vec_safe_push (macinfo_table, e);
28124 }
28125 }
28126
28127 /* Helpers to manipulate hash table of CUs. */
28128
28129 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28130 {
28131 static inline hashval_t hash (const macinfo_entry *);
28132 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28133 };
28134
28135 inline hashval_t
hash(const macinfo_entry * entry)28136 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28137 {
28138 return htab_hash_string (entry->info);
28139 }
28140
28141 inline bool
equal(const macinfo_entry * entry1,const macinfo_entry * entry2)28142 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28143 const macinfo_entry *entry2)
28144 {
28145 return !strcmp (entry1->info, entry2->info);
28146 }
28147
28148 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28149
28150 /* Output a single .debug_macinfo entry. */
28151
28152 static void
output_macinfo_op(macinfo_entry * ref)28153 output_macinfo_op (macinfo_entry *ref)
28154 {
28155 int file_num;
28156 size_t len;
28157 struct indirect_string_node *node;
28158 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28159 struct dwarf_file_data *fd;
28160
28161 switch (ref->code)
28162 {
28163 case DW_MACINFO_start_file:
28164 fd = lookup_filename (ref->info);
28165 file_num = maybe_emit_file (fd);
28166 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28167 dw2_asm_output_data_uleb128 (ref->lineno,
28168 "Included from line number %lu",
28169 (unsigned long) ref->lineno);
28170 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28171 break;
28172 case DW_MACINFO_end_file:
28173 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28174 break;
28175 case DW_MACINFO_define:
28176 case DW_MACINFO_undef:
28177 len = strlen (ref->info) + 1;
28178 if (!dwarf_strict
28179 && len > DWARF_OFFSET_SIZE
28180 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28181 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28182 {
28183 ref->code = ref->code == DW_MACINFO_define
28184 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28185 output_macinfo_op (ref);
28186 return;
28187 }
28188 dw2_asm_output_data (1, ref->code,
28189 ref->code == DW_MACINFO_define
28190 ? "Define macro" : "Undefine macro");
28191 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28192 (unsigned long) ref->lineno);
28193 dw2_asm_output_nstring (ref->info, -1, "The macro");
28194 break;
28195 case DW_MACRO_define_strp:
28196 case DW_MACRO_undef_strp:
28197 node = find_AT_string (ref->info);
28198 gcc_assert (node
28199 && (node->form == DW_FORM_strp
28200 || node->form == DW_FORM_GNU_str_index));
28201 dw2_asm_output_data (1, ref->code,
28202 ref->code == DW_MACRO_define_strp
28203 ? "Define macro strp"
28204 : "Undefine macro strp");
28205 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28206 (unsigned long) ref->lineno);
28207 if (node->form == DW_FORM_strp)
28208 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28209 debug_str_section, "The macro: \"%s\"",
28210 ref->info);
28211 else
28212 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28213 ref->info);
28214 break;
28215 case DW_MACRO_import:
28216 dw2_asm_output_data (1, ref->code, "Import");
28217 ASM_GENERATE_INTERNAL_LABEL (label,
28218 DEBUG_MACRO_SECTION_LABEL,
28219 ref->lineno + macinfo_label_base);
28220 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28221 break;
28222 default:
28223 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28224 ASM_COMMENT_START, (unsigned long) ref->code);
28225 break;
28226 }
28227 }
28228
28229 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28230 other compilation unit .debug_macinfo sections. IDX is the first
28231 index of a define/undef, return the number of ops that should be
28232 emitted in a comdat .debug_macinfo section and emit
28233 a DW_MACRO_import entry referencing it.
28234 If the define/undef entry should be emitted normally, return 0. */
28235
28236 static unsigned
optimize_macinfo_range(unsigned int idx,vec<macinfo_entry,va_gc> * files,macinfo_hash_type ** macinfo_htab)28237 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28238 macinfo_hash_type **macinfo_htab)
28239 {
28240 macinfo_entry *first, *second, *cur, *inc;
28241 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28242 unsigned char checksum[16];
28243 struct md5_ctx ctx;
28244 char *grp_name, *tail;
28245 const char *base;
28246 unsigned int i, count, encoded_filename_len, linebuf_len;
28247 macinfo_entry **slot;
28248
28249 first = &(*macinfo_table)[idx];
28250 second = &(*macinfo_table)[idx + 1];
28251
28252 /* Optimize only if there are at least two consecutive define/undef ops,
28253 and either all of them are before first DW_MACINFO_start_file
28254 with lineno {0,1} (i.e. predefined macro block), or all of them are
28255 in some included header file. */
28256 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28257 return 0;
28258 if (vec_safe_is_empty (files))
28259 {
28260 if (first->lineno > 1 || second->lineno > 1)
28261 return 0;
28262 }
28263 else if (first->lineno == 0)
28264 return 0;
28265
28266 /* Find the last define/undef entry that can be grouped together
28267 with first and at the same time compute md5 checksum of their
28268 codes, linenumbers and strings. */
28269 md5_init_ctx (&ctx);
28270 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28271 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28272 break;
28273 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28274 break;
28275 else
28276 {
28277 unsigned char code = cur->code;
28278 md5_process_bytes (&code, 1, &ctx);
28279 checksum_uleb128 (cur->lineno, &ctx);
28280 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28281 }
28282 md5_finish_ctx (&ctx, checksum);
28283 count = i - idx;
28284
28285 /* From the containing include filename (if any) pick up just
28286 usable characters from its basename. */
28287 if (vec_safe_is_empty (files))
28288 base = "";
28289 else
28290 base = lbasename (files->last ().info);
28291 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28292 if (ISIDNUM (base[i]) || base[i] == '.')
28293 encoded_filename_len++;
28294 /* Count . at the end. */
28295 if (encoded_filename_len)
28296 encoded_filename_len++;
28297
28298 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28299 linebuf_len = strlen (linebuf);
28300
28301 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28302 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28303 + 16 * 2 + 1);
28304 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28305 tail = grp_name + 4;
28306 if (encoded_filename_len)
28307 {
28308 for (i = 0; base[i]; i++)
28309 if (ISIDNUM (base[i]) || base[i] == '.')
28310 *tail++ = base[i];
28311 *tail++ = '.';
28312 }
28313 memcpy (tail, linebuf, linebuf_len);
28314 tail += linebuf_len;
28315 *tail++ = '.';
28316 for (i = 0; i < 16; i++)
28317 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28318
28319 /* Construct a macinfo_entry for DW_MACRO_import
28320 in the empty vector entry before the first define/undef. */
28321 inc = &(*macinfo_table)[idx - 1];
28322 inc->code = DW_MACRO_import;
28323 inc->lineno = 0;
28324 inc->info = ggc_strdup (grp_name);
28325 if (!*macinfo_htab)
28326 *macinfo_htab = new macinfo_hash_type (10);
28327 /* Avoid emitting duplicates. */
28328 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28329 if (*slot != NULL)
28330 {
28331 inc->code = 0;
28332 inc->info = NULL;
28333 /* If such an entry has been used before, just emit
28334 a DW_MACRO_import op. */
28335 inc = *slot;
28336 output_macinfo_op (inc);
28337 /* And clear all macinfo_entry in the range to avoid emitting them
28338 in the second pass. */
28339 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28340 {
28341 cur->code = 0;
28342 cur->info = NULL;
28343 }
28344 }
28345 else
28346 {
28347 *slot = inc;
28348 inc->lineno = (*macinfo_htab)->elements ();
28349 output_macinfo_op (inc);
28350 }
28351 return count;
28352 }
28353
28354 /* Save any strings needed by the macinfo table in the debug str
28355 table. All strings must be collected into the table by the time
28356 index_string is called. */
28357
28358 static void
save_macinfo_strings(void)28359 save_macinfo_strings (void)
28360 {
28361 unsigned len;
28362 unsigned i;
28363 macinfo_entry *ref;
28364
28365 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28366 {
28367 switch (ref->code)
28368 {
28369 /* Match the logic in output_macinfo_op to decide on
28370 indirect strings. */
28371 case DW_MACINFO_define:
28372 case DW_MACINFO_undef:
28373 len = strlen (ref->info) + 1;
28374 if (!dwarf_strict
28375 && len > DWARF_OFFSET_SIZE
28376 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28377 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28378 set_indirect_string (find_AT_string (ref->info));
28379 break;
28380 case DW_MACRO_define_strp:
28381 case DW_MACRO_undef_strp:
28382 set_indirect_string (find_AT_string (ref->info));
28383 break;
28384 default:
28385 break;
28386 }
28387 }
28388 }
28389
28390 /* Output macinfo section(s). */
28391
28392 static void
output_macinfo(const char * debug_line_label,bool early_lto_debug)28393 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28394 {
28395 unsigned i;
28396 unsigned long length = vec_safe_length (macinfo_table);
28397 macinfo_entry *ref;
28398 vec<macinfo_entry, va_gc> *files = NULL;
28399 macinfo_hash_type *macinfo_htab = NULL;
28400 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28401
28402 if (! length)
28403 return;
28404
28405 /* output_macinfo* uses these interchangeably. */
28406 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28407 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28408 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28409 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28410
28411 /* AIX Assembler inserts the length, so adjust the reference to match the
28412 offset expected by debuggers. */
28413 strcpy (dl_section_ref, debug_line_label);
28414 if (XCOFF_DEBUGGING_INFO)
28415 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28416
28417 /* For .debug_macro emit the section header. */
28418 if (!dwarf_strict || dwarf_version >= 5)
28419 {
28420 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28421 "DWARF macro version number");
28422 if (DWARF_OFFSET_SIZE == 8)
28423 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28424 else
28425 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28426 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28427 debug_line_section, NULL);
28428 }
28429
28430 /* In the first loop, it emits the primary .debug_macinfo section
28431 and after each emitted op the macinfo_entry is cleared.
28432 If a longer range of define/undef ops can be optimized using
28433 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28434 the vector before the first define/undef in the range and the
28435 whole range of define/undef ops is not emitted and kept. */
28436 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28437 {
28438 switch (ref->code)
28439 {
28440 case DW_MACINFO_start_file:
28441 vec_safe_push (files, *ref);
28442 break;
28443 case DW_MACINFO_end_file:
28444 if (!vec_safe_is_empty (files))
28445 files->pop ();
28446 break;
28447 case DW_MACINFO_define:
28448 case DW_MACINFO_undef:
28449 if ((!dwarf_strict || dwarf_version >= 5)
28450 && HAVE_COMDAT_GROUP
28451 && vec_safe_length (files) != 1
28452 && i > 0
28453 && i + 1 < length
28454 && (*macinfo_table)[i - 1].code == 0)
28455 {
28456 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28457 if (count)
28458 {
28459 i += count - 1;
28460 continue;
28461 }
28462 }
28463 break;
28464 case 0:
28465 /* A dummy entry may be inserted at the beginning to be able
28466 to optimize the whole block of predefined macros. */
28467 if (i == 0)
28468 continue;
28469 default:
28470 break;
28471 }
28472 output_macinfo_op (ref);
28473 ref->info = NULL;
28474 ref->code = 0;
28475 }
28476
28477 if (!macinfo_htab)
28478 return;
28479
28480 /* Save the number of transparent includes so we can adjust the
28481 label number for the fat LTO object DWARF. */
28482 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28483
28484 delete macinfo_htab;
28485 macinfo_htab = NULL;
28486
28487 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28488 terminate the current chain and switch to a new comdat .debug_macinfo
28489 section and emit the define/undef entries within it. */
28490 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28491 switch (ref->code)
28492 {
28493 case 0:
28494 continue;
28495 case DW_MACRO_import:
28496 {
28497 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28498 tree comdat_key = get_identifier (ref->info);
28499 /* Terminate the previous .debug_macinfo section. */
28500 dw2_asm_output_data (1, 0, "End compilation unit");
28501 targetm.asm_out.named_section (debug_macinfo_section_name,
28502 SECTION_DEBUG
28503 | SECTION_LINKONCE
28504 | (early_lto_debug
28505 ? SECTION_EXCLUDE : 0),
28506 comdat_key);
28507 ASM_GENERATE_INTERNAL_LABEL (label,
28508 DEBUG_MACRO_SECTION_LABEL,
28509 ref->lineno + macinfo_label_base);
28510 ASM_OUTPUT_LABEL (asm_out_file, label);
28511 ref->code = 0;
28512 ref->info = NULL;
28513 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28514 "DWARF macro version number");
28515 if (DWARF_OFFSET_SIZE == 8)
28516 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28517 else
28518 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28519 }
28520 break;
28521 case DW_MACINFO_define:
28522 case DW_MACINFO_undef:
28523 output_macinfo_op (ref);
28524 ref->code = 0;
28525 ref->info = NULL;
28526 break;
28527 default:
28528 gcc_unreachable ();
28529 }
28530
28531 macinfo_label_base += macinfo_label_base_adj;
28532 }
28533
28534 /* Initialize the various sections and labels for dwarf output and prefix
28535 them with PREFIX if non-NULL. Returns the generation (zero based
28536 number of times function was called). */
28537
28538 static unsigned
init_sections_and_labels(bool early_lto_debug)28539 init_sections_and_labels (bool early_lto_debug)
28540 {
28541 /* As we may get called multiple times have a generation count for
28542 labels. */
28543 static unsigned generation = 0;
28544
28545 if (early_lto_debug)
28546 {
28547 if (!dwarf_split_debug_info)
28548 {
28549 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28550 SECTION_DEBUG | SECTION_EXCLUDE,
28551 NULL);
28552 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28553 SECTION_DEBUG | SECTION_EXCLUDE,
28554 NULL);
28555 debug_macinfo_section_name
28556 = ((dwarf_strict && dwarf_version < 5)
28557 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28558 debug_macinfo_section = get_section (debug_macinfo_section_name,
28559 SECTION_DEBUG
28560 | SECTION_EXCLUDE, NULL);
28561 }
28562 else
28563 {
28564 /* ??? Which of the following do we need early? */
28565 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28566 SECTION_DEBUG | SECTION_EXCLUDE,
28567 NULL);
28568 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28569 SECTION_DEBUG | SECTION_EXCLUDE,
28570 NULL);
28571 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28572 SECTION_DEBUG
28573 | SECTION_EXCLUDE, NULL);
28574 debug_skeleton_abbrev_section
28575 = get_section (DEBUG_LTO_ABBREV_SECTION,
28576 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28577 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28578 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28579 generation);
28580
28581 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28582 stay in the main .o, but the skeleton_line goes into the split
28583 off dwo. */
28584 debug_skeleton_line_section
28585 = get_section (DEBUG_LTO_LINE_SECTION,
28586 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28587 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28588 DEBUG_SKELETON_LINE_SECTION_LABEL,
28589 generation);
28590 debug_str_offsets_section
28591 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28592 SECTION_DEBUG | SECTION_EXCLUDE,
28593 NULL);
28594 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28595 DEBUG_SKELETON_INFO_SECTION_LABEL,
28596 generation);
28597 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28598 DEBUG_STR_DWO_SECTION_FLAGS,
28599 NULL);
28600 debug_macinfo_section_name
28601 = ((dwarf_strict && dwarf_version < 5)
28602 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28603 debug_macinfo_section = get_section (debug_macinfo_section_name,
28604 SECTION_DEBUG | SECTION_EXCLUDE,
28605 NULL);
28606 }
28607 /* For macro info and the file table we have to refer to a
28608 debug_line section. */
28609 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28610 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28611 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28612 DEBUG_LINE_SECTION_LABEL, generation);
28613
28614 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28615 DEBUG_STR_SECTION_FLAGS
28616 | SECTION_EXCLUDE, NULL);
28617 if (!dwarf_split_debug_info)
28618 debug_line_str_section
28619 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28620 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28621 }
28622 else
28623 {
28624 if (!dwarf_split_debug_info)
28625 {
28626 debug_info_section = get_section (DEBUG_INFO_SECTION,
28627 SECTION_DEBUG, NULL);
28628 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28629 SECTION_DEBUG, NULL);
28630 debug_loc_section = get_section (dwarf_version >= 5
28631 ? DEBUG_LOCLISTS_SECTION
28632 : DEBUG_LOC_SECTION,
28633 SECTION_DEBUG, NULL);
28634 debug_macinfo_section_name
28635 = ((dwarf_strict && dwarf_version < 5)
28636 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28637 debug_macinfo_section = get_section (debug_macinfo_section_name,
28638 SECTION_DEBUG, NULL);
28639 }
28640 else
28641 {
28642 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28643 SECTION_DEBUG | SECTION_EXCLUDE,
28644 NULL);
28645 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28646 SECTION_DEBUG | SECTION_EXCLUDE,
28647 NULL);
28648 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28649 SECTION_DEBUG, NULL);
28650 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28651 SECTION_DEBUG, NULL);
28652 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28653 SECTION_DEBUG, NULL);
28654 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28655 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28656 generation);
28657
28658 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28659 stay in the main .o, but the skeleton_line goes into the
28660 split off dwo. */
28661 debug_skeleton_line_section
28662 = get_section (DEBUG_DWO_LINE_SECTION,
28663 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28664 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28665 DEBUG_SKELETON_LINE_SECTION_LABEL,
28666 generation);
28667 debug_str_offsets_section
28668 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28669 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28670 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28671 DEBUG_SKELETON_INFO_SECTION_LABEL,
28672 generation);
28673 debug_loc_section = get_section (dwarf_version >= 5
28674 ? DEBUG_DWO_LOCLISTS_SECTION
28675 : DEBUG_DWO_LOC_SECTION,
28676 SECTION_DEBUG | SECTION_EXCLUDE,
28677 NULL);
28678 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28679 DEBUG_STR_DWO_SECTION_FLAGS,
28680 NULL);
28681 debug_macinfo_section_name
28682 = ((dwarf_strict && dwarf_version < 5)
28683 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28684 debug_macinfo_section = get_section (debug_macinfo_section_name,
28685 SECTION_DEBUG | SECTION_EXCLUDE,
28686 NULL);
28687 }
28688 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28689 SECTION_DEBUG, NULL);
28690 debug_line_section = get_section (DEBUG_LINE_SECTION,
28691 SECTION_DEBUG, NULL);
28692 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28693 SECTION_DEBUG, NULL);
28694 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28695 SECTION_DEBUG, NULL);
28696 debug_str_section = get_section (DEBUG_STR_SECTION,
28697 DEBUG_STR_SECTION_FLAGS, NULL);
28698 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28699 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28700 DEBUG_STR_SECTION_FLAGS, NULL);
28701
28702 debug_ranges_section = get_section (dwarf_version >= 5
28703 ? DEBUG_RNGLISTS_SECTION
28704 : DEBUG_RANGES_SECTION,
28705 SECTION_DEBUG, NULL);
28706 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28707 SECTION_DEBUG, NULL);
28708 }
28709
28710 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28711 DEBUG_ABBREV_SECTION_LABEL, generation);
28712 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28713 DEBUG_INFO_SECTION_LABEL, generation);
28714 info_section_emitted = false;
28715 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28716 DEBUG_LINE_SECTION_LABEL, generation);
28717 /* There are up to 4 unique ranges labels per generation.
28718 See also output_rnglists. */
28719 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28720 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28721 if (dwarf_version >= 5 && dwarf_split_debug_info)
28722 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28723 DEBUG_RANGES_SECTION_LABEL,
28724 1 + generation * 4);
28725 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28726 DEBUG_ADDR_SECTION_LABEL, generation);
28727 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28728 (dwarf_strict && dwarf_version < 5)
28729 ? DEBUG_MACINFO_SECTION_LABEL
28730 : DEBUG_MACRO_SECTION_LABEL, generation);
28731 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28732 generation);
28733
28734 ++generation;
28735 return generation - 1;
28736 }
28737
28738 /* Set up for Dwarf output at the start of compilation. */
28739
28740 static void
dwarf2out_init(const char * filename ATTRIBUTE_UNUSED)28741 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28742 {
28743 /* Allocate the file_table. */
28744 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28745
28746 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28747 /* Allocate the decl_die_table. */
28748 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28749
28750 /* Allocate the decl_loc_table. */
28751 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28752
28753 /* Allocate the cached_dw_loc_list_table. */
28754 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28755
28756 /* Allocate the initial hunk of the decl_scope_table. */
28757 vec_alloc (decl_scope_table, 256);
28758
28759 /* Allocate the initial hunk of the abbrev_die_table. */
28760 vec_alloc (abbrev_die_table, 256);
28761 /* Zero-th entry is allocated, but unused. */
28762 abbrev_die_table->quick_push (NULL);
28763
28764 /* Allocate the dwarf_proc_stack_usage_map. */
28765 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28766
28767 /* Allocate the pubtypes and pubnames vectors. */
28768 vec_alloc (pubname_table, 32);
28769 vec_alloc (pubtype_table, 32);
28770
28771 vec_alloc (incomplete_types, 64);
28772
28773 vec_alloc (used_rtx_array, 32);
28774
28775 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28776 vec_alloc (macinfo_table, 64);
28777 #endif
28778
28779 /* If front-ends already registered a main translation unit but we were not
28780 ready to perform the association, do this now. */
28781 if (main_translation_unit != NULL_TREE)
28782 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28783 }
28784
28785 /* Called before compile () starts outputtting functions, variables
28786 and toplevel asms into assembly. */
28787
28788 static void
dwarf2out_assembly_start(void)28789 dwarf2out_assembly_start (void)
28790 {
28791 if (text_section_line_info)
28792 return;
28793
28794 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28795 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28796 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28797 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28798 COLD_TEXT_SECTION_LABEL, 0);
28799 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28800
28801 switch_to_section (text_section);
28802 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28803 #endif
28804
28805 /* Make sure the line number table for .text always exists. */
28806 text_section_line_info = new_line_info_table ();
28807 text_section_line_info->end_label = text_end_label;
28808
28809 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28810 cur_line_info_table = text_section_line_info;
28811 #endif
28812
28813 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28814 && dwarf2out_do_cfi_asm ()
28815 && !dwarf2out_do_eh_frame ())
28816 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28817 }
28818
28819 /* A helper function for dwarf2out_finish called through
28820 htab_traverse. Assign a string its index. All strings must be
28821 collected into the table by the time index_string is called,
28822 because the indexing code relies on htab_traverse to traverse nodes
28823 in the same order for each run. */
28824
28825 int
index_string(indirect_string_node ** h,unsigned int * index)28826 index_string (indirect_string_node **h, unsigned int *index)
28827 {
28828 indirect_string_node *node = *h;
28829
28830 find_string_form (node);
28831 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28832 {
28833 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28834 node->index = *index;
28835 *index += 1;
28836 }
28837 return 1;
28838 }
28839
28840 /* A helper function for output_indirect_strings called through
28841 htab_traverse. Output the offset to a string and update the
28842 current offset. */
28843
28844 int
output_index_string_offset(indirect_string_node ** h,unsigned int * offset)28845 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28846 {
28847 indirect_string_node *node = *h;
28848
28849 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28850 {
28851 /* Assert that this node has been assigned an index. */
28852 gcc_assert (node->index != NO_INDEX_ASSIGNED
28853 && node->index != NOT_INDEXED);
28854 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28855 "indexed string 0x%x: %s", node->index, node->str);
28856 *offset += strlen (node->str) + 1;
28857 }
28858 return 1;
28859 }
28860
28861 /* A helper function for dwarf2out_finish called through
28862 htab_traverse. Output the indexed string. */
28863
28864 int
output_index_string(indirect_string_node ** h,unsigned int * cur_idx)28865 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28866 {
28867 struct indirect_string_node *node = *h;
28868
28869 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28870 {
28871 /* Assert that the strings are output in the same order as their
28872 indexes were assigned. */
28873 gcc_assert (*cur_idx == node->index);
28874 assemble_string (node->str, strlen (node->str) + 1);
28875 *cur_idx += 1;
28876 }
28877 return 1;
28878 }
28879
28880 /* A helper function for dwarf2out_finish called through
28881 htab_traverse. Emit one queued .debug_str string. */
28882
28883 int
output_indirect_string(indirect_string_node ** h,enum dwarf_form form)28884 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28885 {
28886 struct indirect_string_node *node = *h;
28887
28888 node->form = find_string_form (node);
28889 if (node->form == form && node->refcount > 0)
28890 {
28891 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28892 assemble_string (node->str, strlen (node->str) + 1);
28893 }
28894
28895 return 1;
28896 }
28897
28898 /* Output the indexed string table. */
28899
28900 static void
output_indirect_strings(void)28901 output_indirect_strings (void)
28902 {
28903 switch_to_section (debug_str_section);
28904 if (!dwarf_split_debug_info)
28905 debug_str_hash->traverse<enum dwarf_form,
28906 output_indirect_string> (DW_FORM_strp);
28907 else
28908 {
28909 unsigned int offset = 0;
28910 unsigned int cur_idx = 0;
28911
28912 if (skeleton_debug_str_hash)
28913 skeleton_debug_str_hash->traverse<enum dwarf_form,
28914 output_indirect_string> (DW_FORM_strp);
28915
28916 switch_to_section (debug_str_offsets_section);
28917 debug_str_hash->traverse_noresize
28918 <unsigned int *, output_index_string_offset> (&offset);
28919 switch_to_section (debug_str_dwo_section);
28920 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28921 (&cur_idx);
28922 }
28923 }
28924
28925 /* Callback for htab_traverse to assign an index to an entry in the
28926 table, and to write that entry to the .debug_addr section. */
28927
28928 int
output_addr_table_entry(addr_table_entry ** slot,unsigned int * cur_index)28929 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28930 {
28931 addr_table_entry *entry = *slot;
28932
28933 if (entry->refcount == 0)
28934 {
28935 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28936 || entry->index == NOT_INDEXED);
28937 return 1;
28938 }
28939
28940 gcc_assert (entry->index == *cur_index);
28941 (*cur_index)++;
28942
28943 switch (entry->kind)
28944 {
28945 case ate_kind_rtx:
28946 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28947 "0x%x", entry->index);
28948 break;
28949 case ate_kind_rtx_dtprel:
28950 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28951 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28952 DWARF2_ADDR_SIZE,
28953 entry->addr.rtl);
28954 fputc ('\n', asm_out_file);
28955 break;
28956 case ate_kind_label:
28957 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28958 "0x%x", entry->index);
28959 break;
28960 default:
28961 gcc_unreachable ();
28962 }
28963 return 1;
28964 }
28965
28966 /* Produce the .debug_addr section. */
28967
28968 static void
output_addr_table(void)28969 output_addr_table (void)
28970 {
28971 unsigned int index = 0;
28972 if (addr_index_table == NULL || addr_index_table->size () == 0)
28973 return;
28974
28975 switch_to_section (debug_addr_section);
28976 addr_index_table
28977 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28978 }
28979
28980 #if ENABLE_ASSERT_CHECKING
28981 /* Verify that all marks are clear. */
28982
28983 static void
verify_marks_clear(dw_die_ref die)28984 verify_marks_clear (dw_die_ref die)
28985 {
28986 dw_die_ref c;
28987
28988 gcc_assert (! die->die_mark);
28989 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28990 }
28991 #endif /* ENABLE_ASSERT_CHECKING */
28992
28993 /* Clear the marks for a die and its children.
28994 Be cool if the mark isn't set. */
28995
28996 static void
prune_unmark_dies(dw_die_ref die)28997 prune_unmark_dies (dw_die_ref die)
28998 {
28999 dw_die_ref c;
29000
29001 if (die->die_mark)
29002 die->die_mark = 0;
29003 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29004 }
29005
29006 /* Given LOC that is referenced by a DIE we're marking as used, find all
29007 referenced DWARF procedures it references and mark them as used. */
29008
29009 static void
prune_unused_types_walk_loc_descr(dw_loc_descr_ref loc)29010 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29011 {
29012 for (; loc != NULL; loc = loc->dw_loc_next)
29013 switch (loc->dw_loc_opc)
29014 {
29015 case DW_OP_implicit_pointer:
29016 case DW_OP_convert:
29017 case DW_OP_reinterpret:
29018 case DW_OP_GNU_implicit_pointer:
29019 case DW_OP_GNU_convert:
29020 case DW_OP_GNU_reinterpret:
29021 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29022 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29023 break;
29024 case DW_OP_GNU_variable_value:
29025 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29026 {
29027 dw_die_ref ref
29028 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29029 if (ref == NULL)
29030 break;
29031 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29032 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29033 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29034 }
29035 /* FALLTHRU */
29036 case DW_OP_call2:
29037 case DW_OP_call4:
29038 case DW_OP_call_ref:
29039 case DW_OP_const_type:
29040 case DW_OP_GNU_const_type:
29041 case DW_OP_GNU_parameter_ref:
29042 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29043 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29044 break;
29045 case DW_OP_regval_type:
29046 case DW_OP_deref_type:
29047 case DW_OP_GNU_regval_type:
29048 case DW_OP_GNU_deref_type:
29049 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29050 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29051 break;
29052 case DW_OP_entry_value:
29053 case DW_OP_GNU_entry_value:
29054 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29055 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29056 break;
29057 default:
29058 break;
29059 }
29060 }
29061
29062 /* Given DIE that we're marking as used, find any other dies
29063 it references as attributes and mark them as used. */
29064
29065 static void
prune_unused_types_walk_attribs(dw_die_ref die)29066 prune_unused_types_walk_attribs (dw_die_ref die)
29067 {
29068 dw_attr_node *a;
29069 unsigned ix;
29070
29071 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29072 {
29073 switch (AT_class (a))
29074 {
29075 /* Make sure DWARF procedures referenced by location descriptions will
29076 get emitted. */
29077 case dw_val_class_loc:
29078 prune_unused_types_walk_loc_descr (AT_loc (a));
29079 break;
29080 case dw_val_class_loc_list:
29081 for (dw_loc_list_ref list = AT_loc_list (a);
29082 list != NULL;
29083 list = list->dw_loc_next)
29084 prune_unused_types_walk_loc_descr (list->expr);
29085 break;
29086
29087 case dw_val_class_view_list:
29088 /* This points to a loc_list in another attribute, so it's
29089 already covered. */
29090 break;
29091
29092 case dw_val_class_die_ref:
29093 /* A reference to another DIE.
29094 Make sure that it will get emitted.
29095 If it was broken out into a comdat group, don't follow it. */
29096 if (! AT_ref (a)->comdat_type_p
29097 || a->dw_attr == DW_AT_specification)
29098 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29099 break;
29100
29101 case dw_val_class_str:
29102 /* Set the string's refcount to 0 so that prune_unused_types_mark
29103 accounts properly for it. */
29104 a->dw_attr_val.v.val_str->refcount = 0;
29105 break;
29106
29107 default:
29108 break;
29109 }
29110 }
29111 }
29112
29113 /* Mark the generic parameters and arguments children DIEs of DIE. */
29114
29115 static void
prune_unused_types_mark_generic_parms_dies(dw_die_ref die)29116 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29117 {
29118 dw_die_ref c;
29119
29120 if (die == NULL || die->die_child == NULL)
29121 return;
29122 c = die->die_child;
29123 do
29124 {
29125 if (is_template_parameter (c))
29126 prune_unused_types_mark (c, 1);
29127 c = c->die_sib;
29128 } while (c && c != die->die_child);
29129 }
29130
29131 /* Mark DIE as being used. If DOKIDS is true, then walk down
29132 to DIE's children. */
29133
29134 static void
prune_unused_types_mark(dw_die_ref die,int dokids)29135 prune_unused_types_mark (dw_die_ref die, int dokids)
29136 {
29137 dw_die_ref c;
29138
29139 if (die->die_mark == 0)
29140 {
29141 /* We haven't done this node yet. Mark it as used. */
29142 die->die_mark = 1;
29143 /* If this is the DIE of a generic type instantiation,
29144 mark the children DIEs that describe its generic parms and
29145 args. */
29146 prune_unused_types_mark_generic_parms_dies (die);
29147
29148 /* We also have to mark its parents as used.
29149 (But we don't want to mark our parent's kids due to this,
29150 unless it is a class.) */
29151 if (die->die_parent)
29152 prune_unused_types_mark (die->die_parent,
29153 class_scope_p (die->die_parent));
29154
29155 /* Mark any referenced nodes. */
29156 prune_unused_types_walk_attribs (die);
29157
29158 /* If this node is a specification,
29159 also mark the definition, if it exists. */
29160 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29161 prune_unused_types_mark (die->die_definition, 1);
29162 }
29163
29164 if (dokids && die->die_mark != 2)
29165 {
29166 /* We need to walk the children, but haven't done so yet.
29167 Remember that we've walked the kids. */
29168 die->die_mark = 2;
29169
29170 /* If this is an array type, we need to make sure our
29171 kids get marked, even if they're types. If we're
29172 breaking out types into comdat sections, do this
29173 for all type definitions. */
29174 if (die->die_tag == DW_TAG_array_type
29175 || (use_debug_types
29176 && is_type_die (die) && ! is_declaration_die (die)))
29177 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29178 else
29179 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29180 }
29181 }
29182
29183 /* For local classes, look if any static member functions were emitted
29184 and if so, mark them. */
29185
29186 static void
prune_unused_types_walk_local_classes(dw_die_ref die)29187 prune_unused_types_walk_local_classes (dw_die_ref die)
29188 {
29189 dw_die_ref c;
29190
29191 if (die->die_mark == 2)
29192 return;
29193
29194 switch (die->die_tag)
29195 {
29196 case DW_TAG_structure_type:
29197 case DW_TAG_union_type:
29198 case DW_TAG_class_type:
29199 break;
29200
29201 case DW_TAG_subprogram:
29202 if (!get_AT_flag (die, DW_AT_declaration)
29203 || die->die_definition != NULL)
29204 prune_unused_types_mark (die, 1);
29205 return;
29206
29207 default:
29208 return;
29209 }
29210
29211 /* Mark children. */
29212 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29213 }
29214
29215 /* Walk the tree DIE and mark types that we actually use. */
29216
29217 static void
prune_unused_types_walk(dw_die_ref die)29218 prune_unused_types_walk (dw_die_ref die)
29219 {
29220 dw_die_ref c;
29221
29222 /* Don't do anything if this node is already marked and
29223 children have been marked as well. */
29224 if (die->die_mark == 2)
29225 return;
29226
29227 switch (die->die_tag)
29228 {
29229 case DW_TAG_structure_type:
29230 case DW_TAG_union_type:
29231 case DW_TAG_class_type:
29232 if (die->die_perennial_p)
29233 break;
29234
29235 for (c = die->die_parent; c; c = c->die_parent)
29236 if (c->die_tag == DW_TAG_subprogram)
29237 break;
29238
29239 /* Finding used static member functions inside of classes
29240 is needed just for local classes, because for other classes
29241 static member function DIEs with DW_AT_specification
29242 are emitted outside of the DW_TAG_*_type. If we ever change
29243 it, we'd need to call this even for non-local classes. */
29244 if (c)
29245 prune_unused_types_walk_local_classes (die);
29246
29247 /* It's a type node --- don't mark it. */
29248 return;
29249
29250 case DW_TAG_const_type:
29251 case DW_TAG_packed_type:
29252 case DW_TAG_pointer_type:
29253 case DW_TAG_reference_type:
29254 case DW_TAG_rvalue_reference_type:
29255 case DW_TAG_volatile_type:
29256 case DW_TAG_typedef:
29257 case DW_TAG_array_type:
29258 case DW_TAG_interface_type:
29259 case DW_TAG_friend:
29260 case DW_TAG_enumeration_type:
29261 case DW_TAG_subroutine_type:
29262 case DW_TAG_string_type:
29263 case DW_TAG_set_type:
29264 case DW_TAG_subrange_type:
29265 case DW_TAG_ptr_to_member_type:
29266 case DW_TAG_file_type:
29267 /* Type nodes are useful only when other DIEs reference them --- don't
29268 mark them. */
29269 /* FALLTHROUGH */
29270
29271 case DW_TAG_dwarf_procedure:
29272 /* Likewise for DWARF procedures. */
29273
29274 if (die->die_perennial_p)
29275 break;
29276
29277 return;
29278
29279 default:
29280 /* Mark everything else. */
29281 break;
29282 }
29283
29284 if (die->die_mark == 0)
29285 {
29286 die->die_mark = 1;
29287
29288 /* Now, mark any dies referenced from here. */
29289 prune_unused_types_walk_attribs (die);
29290 }
29291
29292 die->die_mark = 2;
29293
29294 /* Mark children. */
29295 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29296 }
29297
29298 /* Increment the string counts on strings referred to from DIE's
29299 attributes. */
29300
29301 static void
prune_unused_types_update_strings(dw_die_ref die)29302 prune_unused_types_update_strings (dw_die_ref die)
29303 {
29304 dw_attr_node *a;
29305 unsigned ix;
29306
29307 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29308 if (AT_class (a) == dw_val_class_str)
29309 {
29310 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29311 s->refcount++;
29312 /* Avoid unnecessarily putting strings that are used less than
29313 twice in the hash table. */
29314 if (s->refcount
29315 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29316 {
29317 indirect_string_node **slot
29318 = debug_str_hash->find_slot_with_hash (s->str,
29319 htab_hash_string (s->str),
29320 INSERT);
29321 gcc_assert (*slot == NULL);
29322 *slot = s;
29323 }
29324 }
29325 }
29326
29327 /* Mark DIE and its children as removed. */
29328
29329 static void
mark_removed(dw_die_ref die)29330 mark_removed (dw_die_ref die)
29331 {
29332 dw_die_ref c;
29333 die->removed = true;
29334 FOR_EACH_CHILD (die, c, mark_removed (c));
29335 }
29336
29337 /* Remove from the tree DIE any dies that aren't marked. */
29338
29339 static void
prune_unused_types_prune(dw_die_ref die)29340 prune_unused_types_prune (dw_die_ref die)
29341 {
29342 dw_die_ref c;
29343
29344 gcc_assert (die->die_mark);
29345 prune_unused_types_update_strings (die);
29346
29347 if (! die->die_child)
29348 return;
29349
29350 c = die->die_child;
29351 do {
29352 dw_die_ref prev = c, next;
29353 for (c = c->die_sib; ! c->die_mark; c = next)
29354 if (c == die->die_child)
29355 {
29356 /* No marked children between 'prev' and the end of the list. */
29357 if (prev == c)
29358 /* No marked children at all. */
29359 die->die_child = NULL;
29360 else
29361 {
29362 prev->die_sib = c->die_sib;
29363 die->die_child = prev;
29364 }
29365 c->die_sib = NULL;
29366 mark_removed (c);
29367 return;
29368 }
29369 else
29370 {
29371 next = c->die_sib;
29372 c->die_sib = NULL;
29373 mark_removed (c);
29374 }
29375
29376 if (c != prev->die_sib)
29377 prev->die_sib = c;
29378 prune_unused_types_prune (c);
29379 } while (c != die->die_child);
29380 }
29381
29382 /* Remove dies representing declarations that we never use. */
29383
29384 static void
prune_unused_types(void)29385 prune_unused_types (void)
29386 {
29387 unsigned int i;
29388 limbo_die_node *node;
29389 comdat_type_node *ctnode;
29390 pubname_entry *pub;
29391 dw_die_ref base_type;
29392
29393 #if ENABLE_ASSERT_CHECKING
29394 /* All the marks should already be clear. */
29395 verify_marks_clear (comp_unit_die ());
29396 for (node = limbo_die_list; node; node = node->next)
29397 verify_marks_clear (node->die);
29398 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29399 verify_marks_clear (ctnode->root_die);
29400 #endif /* ENABLE_ASSERT_CHECKING */
29401
29402 /* Mark types that are used in global variables. */
29403 premark_types_used_by_global_vars ();
29404
29405 /* Set the mark on nodes that are actually used. */
29406 prune_unused_types_walk (comp_unit_die ());
29407 for (node = limbo_die_list; node; node = node->next)
29408 prune_unused_types_walk (node->die);
29409 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29410 {
29411 prune_unused_types_walk (ctnode->root_die);
29412 prune_unused_types_mark (ctnode->type_die, 1);
29413 }
29414
29415 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29416 are unusual in that they are pubnames that are the children of pubtypes.
29417 They should only be marked via their parent DW_TAG_enumeration_type die,
29418 not as roots in themselves. */
29419 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29420 if (pub->die->die_tag != DW_TAG_enumerator)
29421 prune_unused_types_mark (pub->die, 1);
29422 for (i = 0; base_types.iterate (i, &base_type); i++)
29423 prune_unused_types_mark (base_type, 1);
29424
29425 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29426 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29427 callees). */
29428 cgraph_node *cnode;
29429 FOR_EACH_FUNCTION (cnode)
29430 if (cnode->referred_to_p (false))
29431 {
29432 dw_die_ref die = lookup_decl_die (cnode->decl);
29433 if (die == NULL || die->die_mark)
29434 continue;
29435 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29436 if (e->caller != cnode
29437 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29438 {
29439 prune_unused_types_mark (die, 1);
29440 break;
29441 }
29442 }
29443
29444 if (debug_str_hash)
29445 debug_str_hash->empty ();
29446 if (skeleton_debug_str_hash)
29447 skeleton_debug_str_hash->empty ();
29448 prune_unused_types_prune (comp_unit_die ());
29449 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29450 {
29451 node = *pnode;
29452 if (!node->die->die_mark)
29453 *pnode = node->next;
29454 else
29455 {
29456 prune_unused_types_prune (node->die);
29457 pnode = &node->next;
29458 }
29459 }
29460 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29461 prune_unused_types_prune (ctnode->root_die);
29462
29463 /* Leave the marks clear. */
29464 prune_unmark_dies (comp_unit_die ());
29465 for (node = limbo_die_list; node; node = node->next)
29466 prune_unmark_dies (node->die);
29467 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29468 prune_unmark_dies (ctnode->root_die);
29469 }
29470
29471 /* Helpers to manipulate hash table of comdat type units. */
29472
29473 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29474 {
29475 static inline hashval_t hash (const comdat_type_node *);
29476 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29477 };
29478
29479 inline hashval_t
hash(const comdat_type_node * type_node)29480 comdat_type_hasher::hash (const comdat_type_node *type_node)
29481 {
29482 hashval_t h;
29483 memcpy (&h, type_node->signature, sizeof (h));
29484 return h;
29485 }
29486
29487 inline bool
equal(const comdat_type_node * type_node_1,const comdat_type_node * type_node_2)29488 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29489 const comdat_type_node *type_node_2)
29490 {
29491 return (! memcmp (type_node_1->signature, type_node_2->signature,
29492 DWARF_TYPE_SIGNATURE_SIZE));
29493 }
29494
29495 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29496 to the location it would have been added, should we know its
29497 DECL_ASSEMBLER_NAME when we added other attributes. This will
29498 probably improve compactness of debug info, removing equivalent
29499 abbrevs, and hide any differences caused by deferring the
29500 computation of the assembler name, triggered by e.g. PCH. */
29501
29502 static inline void
move_linkage_attr(dw_die_ref die)29503 move_linkage_attr (dw_die_ref die)
29504 {
29505 unsigned ix = vec_safe_length (die->die_attr);
29506 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29507
29508 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29509 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29510
29511 while (--ix > 0)
29512 {
29513 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29514
29515 if (prev->dw_attr == DW_AT_decl_line
29516 || prev->dw_attr == DW_AT_decl_column
29517 || prev->dw_attr == DW_AT_name)
29518 break;
29519 }
29520
29521 if (ix != vec_safe_length (die->die_attr) - 1)
29522 {
29523 die->die_attr->pop ();
29524 die->die_attr->quick_insert (ix, linkage);
29525 }
29526 }
29527
29528 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29529 referenced from typed stack ops and count how often they are used. */
29530
29531 static void
mark_base_types(dw_loc_descr_ref loc)29532 mark_base_types (dw_loc_descr_ref loc)
29533 {
29534 dw_die_ref base_type = NULL;
29535
29536 for (; loc; loc = loc->dw_loc_next)
29537 {
29538 switch (loc->dw_loc_opc)
29539 {
29540 case DW_OP_regval_type:
29541 case DW_OP_deref_type:
29542 case DW_OP_GNU_regval_type:
29543 case DW_OP_GNU_deref_type:
29544 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29545 break;
29546 case DW_OP_convert:
29547 case DW_OP_reinterpret:
29548 case DW_OP_GNU_convert:
29549 case DW_OP_GNU_reinterpret:
29550 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29551 continue;
29552 /* FALLTHRU */
29553 case DW_OP_const_type:
29554 case DW_OP_GNU_const_type:
29555 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29556 break;
29557 case DW_OP_entry_value:
29558 case DW_OP_GNU_entry_value:
29559 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29560 continue;
29561 default:
29562 continue;
29563 }
29564 gcc_assert (base_type->die_parent == comp_unit_die ());
29565 if (base_type->die_mark)
29566 base_type->die_mark++;
29567 else
29568 {
29569 base_types.safe_push (base_type);
29570 base_type->die_mark = 1;
29571 }
29572 }
29573 }
29574
29575 /* Comparison function for sorting marked base types. */
29576
29577 static int
base_type_cmp(const void * x,const void * y)29578 base_type_cmp (const void *x, const void *y)
29579 {
29580 dw_die_ref dx = *(const dw_die_ref *) x;
29581 dw_die_ref dy = *(const dw_die_ref *) y;
29582 unsigned int byte_size1, byte_size2;
29583 unsigned int encoding1, encoding2;
29584 unsigned int align1, align2;
29585 if (dx->die_mark > dy->die_mark)
29586 return -1;
29587 if (dx->die_mark < dy->die_mark)
29588 return 1;
29589 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29590 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29591 if (byte_size1 < byte_size2)
29592 return 1;
29593 if (byte_size1 > byte_size2)
29594 return -1;
29595 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29596 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29597 if (encoding1 < encoding2)
29598 return 1;
29599 if (encoding1 > encoding2)
29600 return -1;
29601 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29602 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29603 if (align1 < align2)
29604 return 1;
29605 if (align1 > align2)
29606 return -1;
29607 return 0;
29608 }
29609
29610 /* Move base types marked by mark_base_types as early as possible
29611 in the CU, sorted by decreasing usage count both to make the
29612 uleb128 references as small as possible and to make sure they
29613 will have die_offset already computed by calc_die_sizes when
29614 sizes of typed stack loc ops is computed. */
29615
29616 static void
move_marked_base_types(void)29617 move_marked_base_types (void)
29618 {
29619 unsigned int i;
29620 dw_die_ref base_type, die, c;
29621
29622 if (base_types.is_empty ())
29623 return;
29624
29625 /* Sort by decreasing usage count, they will be added again in that
29626 order later on. */
29627 base_types.qsort (base_type_cmp);
29628 die = comp_unit_die ();
29629 c = die->die_child;
29630 do
29631 {
29632 dw_die_ref prev = c;
29633 c = c->die_sib;
29634 while (c->die_mark)
29635 {
29636 remove_child_with_prev (c, prev);
29637 /* As base types got marked, there must be at least
29638 one node other than DW_TAG_base_type. */
29639 gcc_assert (die->die_child != NULL);
29640 c = prev->die_sib;
29641 }
29642 }
29643 while (c != die->die_child);
29644 gcc_assert (die->die_child);
29645 c = die->die_child;
29646 for (i = 0; base_types.iterate (i, &base_type); i++)
29647 {
29648 base_type->die_mark = 0;
29649 base_type->die_sib = c->die_sib;
29650 c->die_sib = base_type;
29651 c = base_type;
29652 }
29653 }
29654
29655 /* Helper function for resolve_addr, attempt to resolve
29656 one CONST_STRING, return true if successful. Similarly verify that
29657 SYMBOL_REFs refer to variables emitted in the current CU. */
29658
29659 static bool
resolve_one_addr(rtx * addr)29660 resolve_one_addr (rtx *addr)
29661 {
29662 rtx rtl = *addr;
29663
29664 if (GET_CODE (rtl) == CONST_STRING)
29665 {
29666 size_t len = strlen (XSTR (rtl, 0)) + 1;
29667 tree t = build_string (len, XSTR (rtl, 0));
29668 tree tlen = size_int (len - 1);
29669 TREE_TYPE (t)
29670 = build_array_type (char_type_node, build_index_type (tlen));
29671 rtl = lookup_constant_def (t);
29672 if (!rtl || !MEM_P (rtl))
29673 return false;
29674 rtl = XEXP (rtl, 0);
29675 if (GET_CODE (rtl) == SYMBOL_REF
29676 && SYMBOL_REF_DECL (rtl)
29677 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29678 return false;
29679 vec_safe_push (used_rtx_array, rtl);
29680 *addr = rtl;
29681 return true;
29682 }
29683
29684 if (GET_CODE (rtl) == SYMBOL_REF
29685 && SYMBOL_REF_DECL (rtl))
29686 {
29687 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29688 {
29689 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29690 return false;
29691 }
29692 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29693 return false;
29694 }
29695
29696 if (GET_CODE (rtl) == CONST)
29697 {
29698 subrtx_ptr_iterator::array_type array;
29699 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29700 if (!resolve_one_addr (*iter))
29701 return false;
29702 }
29703
29704 return true;
29705 }
29706
29707 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29708 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29709 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29710
29711 static rtx
string_cst_pool_decl(tree t)29712 string_cst_pool_decl (tree t)
29713 {
29714 rtx rtl = output_constant_def (t, 1);
29715 unsigned char *array;
29716 dw_loc_descr_ref l;
29717 tree decl;
29718 size_t len;
29719 dw_die_ref ref;
29720
29721 if (!rtl || !MEM_P (rtl))
29722 return NULL_RTX;
29723 rtl = XEXP (rtl, 0);
29724 if (GET_CODE (rtl) != SYMBOL_REF
29725 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29726 return NULL_RTX;
29727
29728 decl = SYMBOL_REF_DECL (rtl);
29729 if (!lookup_decl_die (decl))
29730 {
29731 len = TREE_STRING_LENGTH (t);
29732 vec_safe_push (used_rtx_array, rtl);
29733 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29734 array = ggc_vec_alloc<unsigned char> (len);
29735 memcpy (array, TREE_STRING_POINTER (t), len);
29736 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29737 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29738 l->dw_loc_oprnd2.v.val_vec.length = len;
29739 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29740 l->dw_loc_oprnd2.v.val_vec.array = array;
29741 add_AT_loc (ref, DW_AT_location, l);
29742 equate_decl_number_to_die (decl, ref);
29743 }
29744 return rtl;
29745 }
29746
29747 /* Helper function of resolve_addr_in_expr. LOC is
29748 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29749 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29750 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29751 with DW_OP_implicit_pointer if possible
29752 and return true, if unsuccessful, return false. */
29753
29754 static bool
optimize_one_addr_into_implicit_ptr(dw_loc_descr_ref loc)29755 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29756 {
29757 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29758 HOST_WIDE_INT offset = 0;
29759 dw_die_ref ref = NULL;
29760 tree decl;
29761
29762 if (GET_CODE (rtl) == CONST
29763 && GET_CODE (XEXP (rtl, 0)) == PLUS
29764 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29765 {
29766 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29767 rtl = XEXP (XEXP (rtl, 0), 0);
29768 }
29769 if (GET_CODE (rtl) == CONST_STRING)
29770 {
29771 size_t len = strlen (XSTR (rtl, 0)) + 1;
29772 tree t = build_string (len, XSTR (rtl, 0));
29773 tree tlen = size_int (len - 1);
29774
29775 TREE_TYPE (t)
29776 = build_array_type (char_type_node, build_index_type (tlen));
29777 rtl = string_cst_pool_decl (t);
29778 if (!rtl)
29779 return false;
29780 }
29781 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29782 {
29783 decl = SYMBOL_REF_DECL (rtl);
29784 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29785 {
29786 ref = lookup_decl_die (decl);
29787 if (ref && (get_AT (ref, DW_AT_location)
29788 || get_AT (ref, DW_AT_const_value)))
29789 {
29790 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29791 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29792 loc->dw_loc_oprnd1.val_entry = NULL;
29793 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29794 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29795 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29796 loc->dw_loc_oprnd2.v.val_int = offset;
29797 return true;
29798 }
29799 }
29800 }
29801 return false;
29802 }
29803
29804 /* Helper function for resolve_addr, handle one location
29805 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29806 the location list couldn't be resolved. */
29807
29808 static bool
resolve_addr_in_expr(dw_attr_node * a,dw_loc_descr_ref loc)29809 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29810 {
29811 dw_loc_descr_ref keep = NULL;
29812 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29813 switch (loc->dw_loc_opc)
29814 {
29815 case DW_OP_addr:
29816 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29817 {
29818 if ((prev == NULL
29819 || prev->dw_loc_opc == DW_OP_piece
29820 || prev->dw_loc_opc == DW_OP_bit_piece)
29821 && loc->dw_loc_next
29822 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29823 && (!dwarf_strict || dwarf_version >= 5)
29824 && optimize_one_addr_into_implicit_ptr (loc))
29825 break;
29826 return false;
29827 }
29828 break;
29829 case DW_OP_GNU_addr_index:
29830 case DW_OP_GNU_const_index:
29831 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
29832 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
29833 {
29834 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29835 if (!resolve_one_addr (&rtl))
29836 return false;
29837 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29838 loc->dw_loc_oprnd1.val_entry
29839 = add_addr_table_entry (rtl, ate_kind_rtx);
29840 }
29841 break;
29842 case DW_OP_const4u:
29843 case DW_OP_const8u:
29844 if (loc->dtprel
29845 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29846 return false;
29847 break;
29848 case DW_OP_plus_uconst:
29849 if (size_of_loc_descr (loc)
29850 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29851 + 1
29852 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29853 {
29854 dw_loc_descr_ref repl
29855 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29856 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29857 add_loc_descr (&repl, loc->dw_loc_next);
29858 *loc = *repl;
29859 }
29860 break;
29861 case DW_OP_implicit_value:
29862 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29863 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29864 return false;
29865 break;
29866 case DW_OP_implicit_pointer:
29867 case DW_OP_GNU_implicit_pointer:
29868 case DW_OP_GNU_parameter_ref:
29869 case DW_OP_GNU_variable_value:
29870 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29871 {
29872 dw_die_ref ref
29873 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29874 if (ref == NULL)
29875 return false;
29876 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29877 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29878 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29879 }
29880 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29881 {
29882 if (prev == NULL
29883 && loc->dw_loc_next == NULL
29884 && AT_class (a) == dw_val_class_loc)
29885 switch (a->dw_attr)
29886 {
29887 /* Following attributes allow both exprloc and reference,
29888 so if the whole expression is DW_OP_GNU_variable_value
29889 alone we could transform it into reference. */
29890 case DW_AT_byte_size:
29891 case DW_AT_bit_size:
29892 case DW_AT_lower_bound:
29893 case DW_AT_upper_bound:
29894 case DW_AT_bit_stride:
29895 case DW_AT_count:
29896 case DW_AT_allocated:
29897 case DW_AT_associated:
29898 case DW_AT_byte_stride:
29899 a->dw_attr_val.val_class = dw_val_class_die_ref;
29900 a->dw_attr_val.val_entry = NULL;
29901 a->dw_attr_val.v.val_die_ref.die
29902 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29903 a->dw_attr_val.v.val_die_ref.external = 0;
29904 return true;
29905 default:
29906 break;
29907 }
29908 if (dwarf_strict)
29909 return false;
29910 }
29911 break;
29912 case DW_OP_const_type:
29913 case DW_OP_regval_type:
29914 case DW_OP_deref_type:
29915 case DW_OP_convert:
29916 case DW_OP_reinterpret:
29917 case DW_OP_GNU_const_type:
29918 case DW_OP_GNU_regval_type:
29919 case DW_OP_GNU_deref_type:
29920 case DW_OP_GNU_convert:
29921 case DW_OP_GNU_reinterpret:
29922 while (loc->dw_loc_next
29923 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29924 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29925 {
29926 dw_die_ref base1, base2;
29927 unsigned enc1, enc2, size1, size2;
29928 if (loc->dw_loc_opc == DW_OP_regval_type
29929 || loc->dw_loc_opc == DW_OP_deref_type
29930 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29931 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29932 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29933 else if (loc->dw_loc_oprnd1.val_class
29934 == dw_val_class_unsigned_const)
29935 break;
29936 else
29937 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29938 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29939 == dw_val_class_unsigned_const)
29940 break;
29941 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29942 gcc_assert (base1->die_tag == DW_TAG_base_type
29943 && base2->die_tag == DW_TAG_base_type);
29944 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29945 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29946 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29947 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29948 if (size1 == size2
29949 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29950 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29951 && loc != keep)
29952 || enc1 == enc2))
29953 {
29954 /* Optimize away next DW_OP_convert after
29955 adjusting LOC's base type die reference. */
29956 if (loc->dw_loc_opc == DW_OP_regval_type
29957 || loc->dw_loc_opc == DW_OP_deref_type
29958 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29959 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29960 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29961 else
29962 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29963 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29964 continue;
29965 }
29966 /* Don't change integer DW_OP_convert after e.g. floating
29967 point typed stack entry. */
29968 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29969 keep = loc->dw_loc_next;
29970 break;
29971 }
29972 break;
29973 default:
29974 break;
29975 }
29976 return true;
29977 }
29978
29979 /* Helper function of resolve_addr. DIE had DW_AT_location of
29980 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29981 and DW_OP_addr couldn't be resolved. resolve_addr has already
29982 removed the DW_AT_location attribute. This function attempts to
29983 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29984 to it or DW_AT_const_value attribute, if possible. */
29985
29986 static void
optimize_location_into_implicit_ptr(dw_die_ref die,tree decl)29987 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29988 {
29989 if (!VAR_P (decl)
29990 || lookup_decl_die (decl) != die
29991 || DECL_EXTERNAL (decl)
29992 || !TREE_STATIC (decl)
29993 || DECL_INITIAL (decl) == NULL_TREE
29994 || DECL_P (DECL_INITIAL (decl))
29995 || get_AT (die, DW_AT_const_value))
29996 return;
29997
29998 tree init = DECL_INITIAL (decl);
29999 HOST_WIDE_INT offset = 0;
30000 /* For variables that have been optimized away and thus
30001 don't have a memory location, see if we can emit
30002 DW_AT_const_value instead. */
30003 if (tree_add_const_value_attribute (die, init))
30004 return;
30005 if (dwarf_strict && dwarf_version < 5)
30006 return;
30007 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30008 and ADDR_EXPR refers to a decl that has DW_AT_location or
30009 DW_AT_const_value (but isn't addressable, otherwise
30010 resolving the original DW_OP_addr wouldn't fail), see if
30011 we can add DW_OP_implicit_pointer. */
30012 STRIP_NOPS (init);
30013 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30014 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30015 {
30016 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30017 init = TREE_OPERAND (init, 0);
30018 STRIP_NOPS (init);
30019 }
30020 if (TREE_CODE (init) != ADDR_EXPR)
30021 return;
30022 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30023 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30024 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30025 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30026 && TREE_OPERAND (init, 0) != decl))
30027 {
30028 dw_die_ref ref;
30029 dw_loc_descr_ref l;
30030
30031 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30032 {
30033 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30034 if (!rtl)
30035 return;
30036 decl = SYMBOL_REF_DECL (rtl);
30037 }
30038 else
30039 decl = TREE_OPERAND (init, 0);
30040 ref = lookup_decl_die (decl);
30041 if (ref == NULL
30042 || (!get_AT (ref, DW_AT_location)
30043 && !get_AT (ref, DW_AT_const_value)))
30044 return;
30045 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30046 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30047 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30048 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30049 add_AT_loc (die, DW_AT_location, l);
30050 }
30051 }
30052
30053 /* Return NULL if l is a DWARF expression, or first op that is not
30054 valid DWARF expression. */
30055
30056 static dw_loc_descr_ref
non_dwarf_expression(dw_loc_descr_ref l)30057 non_dwarf_expression (dw_loc_descr_ref l)
30058 {
30059 while (l)
30060 {
30061 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30062 return l;
30063 switch (l->dw_loc_opc)
30064 {
30065 case DW_OP_regx:
30066 case DW_OP_implicit_value:
30067 case DW_OP_stack_value:
30068 case DW_OP_implicit_pointer:
30069 case DW_OP_GNU_implicit_pointer:
30070 case DW_OP_GNU_parameter_ref:
30071 case DW_OP_piece:
30072 case DW_OP_bit_piece:
30073 return l;
30074 default:
30075 break;
30076 }
30077 l = l->dw_loc_next;
30078 }
30079 return NULL;
30080 }
30081
30082 /* Return adjusted copy of EXPR:
30083 If it is empty DWARF expression, return it.
30084 If it is valid non-empty DWARF expression,
30085 return copy of EXPR with DW_OP_deref appended to it.
30086 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30087 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30088 If it is DWARF expression followed by DW_OP_stack_value, return
30089 copy of the DWARF expression without anything appended.
30090 Otherwise, return NULL. */
30091
30092 static dw_loc_descr_ref
copy_deref_exprloc(dw_loc_descr_ref expr)30093 copy_deref_exprloc (dw_loc_descr_ref expr)
30094 {
30095 dw_loc_descr_ref tail = NULL;
30096
30097 if (expr == NULL)
30098 return NULL;
30099
30100 dw_loc_descr_ref l = non_dwarf_expression (expr);
30101 if (l && l->dw_loc_next)
30102 return NULL;
30103
30104 if (l)
30105 {
30106 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30107 tail = new_loc_descr ((enum dwarf_location_atom)
30108 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30109 0, 0);
30110 else
30111 switch (l->dw_loc_opc)
30112 {
30113 case DW_OP_regx:
30114 tail = new_loc_descr (DW_OP_bregx,
30115 l->dw_loc_oprnd1.v.val_unsigned, 0);
30116 break;
30117 case DW_OP_stack_value:
30118 break;
30119 default:
30120 return NULL;
30121 }
30122 }
30123 else
30124 tail = new_loc_descr (DW_OP_deref, 0, 0);
30125
30126 dw_loc_descr_ref ret = NULL, *p = &ret;
30127 while (expr != l)
30128 {
30129 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30130 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30131 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30132 p = &(*p)->dw_loc_next;
30133 expr = expr->dw_loc_next;
30134 }
30135 *p = tail;
30136 return ret;
30137 }
30138
30139 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30140 reference to a variable or argument, adjust it if needed and return:
30141 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30142 attribute if present should be removed
30143 0 keep the attribute perhaps with minor modifications, no need to rescan
30144 1 if the attribute has been successfully adjusted. */
30145
30146 static int
optimize_string_length(dw_attr_node * a)30147 optimize_string_length (dw_attr_node *a)
30148 {
30149 dw_loc_descr_ref l = AT_loc (a), lv;
30150 dw_die_ref die;
30151 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30152 {
30153 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30154 die = lookup_decl_die (decl);
30155 if (die)
30156 {
30157 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30158 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30159 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30160 }
30161 else
30162 return -1;
30163 }
30164 else
30165 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30166
30167 /* DWARF5 allows reference class, so we can then reference the DIE.
30168 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30169 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30170 {
30171 a->dw_attr_val.val_class = dw_val_class_die_ref;
30172 a->dw_attr_val.val_entry = NULL;
30173 a->dw_attr_val.v.val_die_ref.die = die;
30174 a->dw_attr_val.v.val_die_ref.external = 0;
30175 return 0;
30176 }
30177
30178 dw_attr_node *av = get_AT (die, DW_AT_location);
30179 dw_loc_list_ref d;
30180 bool non_dwarf_expr = false;
30181
30182 if (av == NULL)
30183 return dwarf_strict ? -1 : 0;
30184 switch (AT_class (av))
30185 {
30186 case dw_val_class_loc_list:
30187 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30188 if (d->expr && non_dwarf_expression (d->expr))
30189 non_dwarf_expr = true;
30190 break;
30191 case dw_val_class_view_list:
30192 gcc_unreachable ();
30193 case dw_val_class_loc:
30194 lv = AT_loc (av);
30195 if (lv == NULL)
30196 return dwarf_strict ? -1 : 0;
30197 if (non_dwarf_expression (lv))
30198 non_dwarf_expr = true;
30199 break;
30200 default:
30201 return dwarf_strict ? -1 : 0;
30202 }
30203
30204 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30205 into DW_OP_call4 or DW_OP_GNU_variable_value into
30206 DW_OP_call4 DW_OP_deref, do so. */
30207 if (!non_dwarf_expr
30208 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30209 {
30210 l->dw_loc_opc = DW_OP_call4;
30211 if (l->dw_loc_next)
30212 l->dw_loc_next = NULL;
30213 else
30214 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30215 return 0;
30216 }
30217
30218 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30219 copy over the DW_AT_location attribute from die to a. */
30220 if (l->dw_loc_next != NULL)
30221 {
30222 a->dw_attr_val = av->dw_attr_val;
30223 return 1;
30224 }
30225
30226 dw_loc_list_ref list, *p;
30227 switch (AT_class (av))
30228 {
30229 case dw_val_class_loc_list:
30230 p = &list;
30231 list = NULL;
30232 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30233 {
30234 lv = copy_deref_exprloc (d->expr);
30235 if (lv)
30236 {
30237 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30238 p = &(*p)->dw_loc_next;
30239 }
30240 else if (!dwarf_strict && d->expr)
30241 return 0;
30242 }
30243 if (list == NULL)
30244 return dwarf_strict ? -1 : 0;
30245 a->dw_attr_val.val_class = dw_val_class_loc_list;
30246 gen_llsym (list);
30247 *AT_loc_list_ptr (a) = list;
30248 return 1;
30249 case dw_val_class_loc:
30250 lv = copy_deref_exprloc (AT_loc (av));
30251 if (lv == NULL)
30252 return dwarf_strict ? -1 : 0;
30253 a->dw_attr_val.v.val_loc = lv;
30254 return 1;
30255 default:
30256 gcc_unreachable ();
30257 }
30258 }
30259
30260 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30261 an address in .rodata section if the string literal is emitted there,
30262 or remove the containing location list or replace DW_AT_const_value
30263 with DW_AT_location and empty location expression, if it isn't found
30264 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30265 to something that has been emitted in the current CU. */
30266
30267 static void
resolve_addr(dw_die_ref die)30268 resolve_addr (dw_die_ref die)
30269 {
30270 dw_die_ref c;
30271 dw_attr_node *a;
30272 dw_loc_list_ref *curr, *start, loc;
30273 unsigned ix;
30274 bool remove_AT_byte_size = false;
30275
30276 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30277 switch (AT_class (a))
30278 {
30279 case dw_val_class_loc_list:
30280 start = curr = AT_loc_list_ptr (a);
30281 loc = *curr;
30282 gcc_assert (loc);
30283 /* The same list can be referenced more than once. See if we have
30284 already recorded the result from a previous pass. */
30285 if (loc->replaced)
30286 *curr = loc->dw_loc_next;
30287 else if (!loc->resolved_addr)
30288 {
30289 /* As things stand, we do not expect or allow one die to
30290 reference a suffix of another die's location list chain.
30291 References must be identical or completely separate.
30292 There is therefore no need to cache the result of this
30293 pass on any list other than the first; doing so
30294 would lead to unnecessary writes. */
30295 while (*curr)
30296 {
30297 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30298 if (!resolve_addr_in_expr (a, (*curr)->expr))
30299 {
30300 dw_loc_list_ref next = (*curr)->dw_loc_next;
30301 dw_loc_descr_ref l = (*curr)->expr;
30302
30303 if (next && (*curr)->ll_symbol)
30304 {
30305 gcc_assert (!next->ll_symbol);
30306 next->ll_symbol = (*curr)->ll_symbol;
30307 next->vl_symbol = (*curr)->vl_symbol;
30308 }
30309 if (dwarf_split_debug_info)
30310 remove_loc_list_addr_table_entries (l);
30311 *curr = next;
30312 }
30313 else
30314 {
30315 mark_base_types ((*curr)->expr);
30316 curr = &(*curr)->dw_loc_next;
30317 }
30318 }
30319 if (loc == *start)
30320 loc->resolved_addr = 1;
30321 else
30322 {
30323 loc->replaced = 1;
30324 loc->dw_loc_next = *start;
30325 }
30326 }
30327 if (!*start)
30328 {
30329 remove_AT (die, a->dw_attr);
30330 ix--;
30331 }
30332 break;
30333 case dw_val_class_view_list:
30334 {
30335 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30336 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30337 dw_val_node *llnode
30338 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30339 /* If we no longer have a loclist, or it no longer needs
30340 views, drop this attribute. */
30341 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30342 {
30343 remove_AT (die, a->dw_attr);
30344 ix--;
30345 }
30346 break;
30347 }
30348 case dw_val_class_loc:
30349 {
30350 dw_loc_descr_ref l = AT_loc (a);
30351 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30352 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30353 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30354 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30355 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30356 with DW_FORM_ref referencing the same DIE as
30357 DW_OP_GNU_variable_value used to reference. */
30358 if (a->dw_attr == DW_AT_string_length
30359 && l
30360 && l->dw_loc_opc == DW_OP_GNU_variable_value
30361 && (l->dw_loc_next == NULL
30362 || (l->dw_loc_next->dw_loc_next == NULL
30363 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30364 {
30365 switch (optimize_string_length (a))
30366 {
30367 case -1:
30368 remove_AT (die, a->dw_attr);
30369 ix--;
30370 /* If we drop DW_AT_string_length, we need to drop also
30371 DW_AT_{string_length_,}byte_size. */
30372 remove_AT_byte_size = true;
30373 continue;
30374 default:
30375 break;
30376 case 1:
30377 /* Even if we keep the optimized DW_AT_string_length,
30378 it might have changed AT_class, so process it again. */
30379 ix--;
30380 continue;
30381 }
30382 }
30383 /* For -gdwarf-2 don't attempt to optimize
30384 DW_AT_data_member_location containing
30385 DW_OP_plus_uconst - older consumers might
30386 rely on it being that op instead of a more complex,
30387 but shorter, location description. */
30388 if ((dwarf_version > 2
30389 || a->dw_attr != DW_AT_data_member_location
30390 || l == NULL
30391 || l->dw_loc_opc != DW_OP_plus_uconst
30392 || l->dw_loc_next != NULL)
30393 && !resolve_addr_in_expr (a, l))
30394 {
30395 if (dwarf_split_debug_info)
30396 remove_loc_list_addr_table_entries (l);
30397 if (l != NULL
30398 && l->dw_loc_next == NULL
30399 && l->dw_loc_opc == DW_OP_addr
30400 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30401 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30402 && a->dw_attr == DW_AT_location)
30403 {
30404 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30405 remove_AT (die, a->dw_attr);
30406 ix--;
30407 optimize_location_into_implicit_ptr (die, decl);
30408 break;
30409 }
30410 if (a->dw_attr == DW_AT_string_length)
30411 /* If we drop DW_AT_string_length, we need to drop also
30412 DW_AT_{string_length_,}byte_size. */
30413 remove_AT_byte_size = true;
30414 remove_AT (die, a->dw_attr);
30415 ix--;
30416 }
30417 else
30418 mark_base_types (l);
30419 }
30420 break;
30421 case dw_val_class_addr:
30422 if (a->dw_attr == DW_AT_const_value
30423 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30424 {
30425 if (AT_index (a) != NOT_INDEXED)
30426 remove_addr_table_entry (a->dw_attr_val.val_entry);
30427 remove_AT (die, a->dw_attr);
30428 ix--;
30429 }
30430 if ((die->die_tag == DW_TAG_call_site
30431 && a->dw_attr == DW_AT_call_origin)
30432 || (die->die_tag == DW_TAG_GNU_call_site
30433 && a->dw_attr == DW_AT_abstract_origin))
30434 {
30435 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30436 dw_die_ref tdie = lookup_decl_die (tdecl);
30437 dw_die_ref cdie;
30438 if (tdie == NULL
30439 && DECL_EXTERNAL (tdecl)
30440 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30441 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30442 {
30443 dw_die_ref pdie = cdie;
30444 /* Make sure we don't add these DIEs into type units.
30445 We could emit skeleton DIEs for context (namespaces,
30446 outer structs/classes) and a skeleton DIE for the
30447 innermost context with DW_AT_signature pointing to the
30448 type unit. See PR78835. */
30449 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30450 pdie = pdie->die_parent;
30451 if (pdie == NULL)
30452 {
30453 /* Creating a full DIE for tdecl is overly expensive and
30454 at this point even wrong when in the LTO phase
30455 as it can end up generating new type DIEs we didn't
30456 output and thus optimize_external_refs will crash. */
30457 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30458 add_AT_flag (tdie, DW_AT_external, 1);
30459 add_AT_flag (tdie, DW_AT_declaration, 1);
30460 add_linkage_attr (tdie, tdecl);
30461 add_name_and_src_coords_attributes (tdie, tdecl, true);
30462 equate_decl_number_to_die (tdecl, tdie);
30463 }
30464 }
30465 if (tdie)
30466 {
30467 a->dw_attr_val.val_class = dw_val_class_die_ref;
30468 a->dw_attr_val.v.val_die_ref.die = tdie;
30469 a->dw_attr_val.v.val_die_ref.external = 0;
30470 }
30471 else
30472 {
30473 if (AT_index (a) != NOT_INDEXED)
30474 remove_addr_table_entry (a->dw_attr_val.val_entry);
30475 remove_AT (die, a->dw_attr);
30476 ix--;
30477 }
30478 }
30479 break;
30480 default:
30481 break;
30482 }
30483
30484 if (remove_AT_byte_size)
30485 remove_AT (die, dwarf_version >= 5
30486 ? DW_AT_string_length_byte_size
30487 : DW_AT_byte_size);
30488
30489 FOR_EACH_CHILD (die, c, resolve_addr (c));
30490 }
30491
30492 /* Helper routines for optimize_location_lists.
30493 This pass tries to share identical local lists in .debug_loc
30494 section. */
30495
30496 /* Iteratively hash operands of LOC opcode into HSTATE. */
30497
30498 static void
hash_loc_operands(dw_loc_descr_ref loc,inchash::hash & hstate)30499 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30500 {
30501 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30502 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30503
30504 switch (loc->dw_loc_opc)
30505 {
30506 case DW_OP_const4u:
30507 case DW_OP_const8u:
30508 if (loc->dtprel)
30509 goto hash_addr;
30510 /* FALLTHRU */
30511 case DW_OP_const1u:
30512 case DW_OP_const1s:
30513 case DW_OP_const2u:
30514 case DW_OP_const2s:
30515 case DW_OP_const4s:
30516 case DW_OP_const8s:
30517 case DW_OP_constu:
30518 case DW_OP_consts:
30519 case DW_OP_pick:
30520 case DW_OP_plus_uconst:
30521 case DW_OP_breg0:
30522 case DW_OP_breg1:
30523 case DW_OP_breg2:
30524 case DW_OP_breg3:
30525 case DW_OP_breg4:
30526 case DW_OP_breg5:
30527 case DW_OP_breg6:
30528 case DW_OP_breg7:
30529 case DW_OP_breg8:
30530 case DW_OP_breg9:
30531 case DW_OP_breg10:
30532 case DW_OP_breg11:
30533 case DW_OP_breg12:
30534 case DW_OP_breg13:
30535 case DW_OP_breg14:
30536 case DW_OP_breg15:
30537 case DW_OP_breg16:
30538 case DW_OP_breg17:
30539 case DW_OP_breg18:
30540 case DW_OP_breg19:
30541 case DW_OP_breg20:
30542 case DW_OP_breg21:
30543 case DW_OP_breg22:
30544 case DW_OP_breg23:
30545 case DW_OP_breg24:
30546 case DW_OP_breg25:
30547 case DW_OP_breg26:
30548 case DW_OP_breg27:
30549 case DW_OP_breg28:
30550 case DW_OP_breg29:
30551 case DW_OP_breg30:
30552 case DW_OP_breg31:
30553 case DW_OP_regx:
30554 case DW_OP_fbreg:
30555 case DW_OP_piece:
30556 case DW_OP_deref_size:
30557 case DW_OP_xderef_size:
30558 hstate.add_object (val1->v.val_int);
30559 break;
30560 case DW_OP_skip:
30561 case DW_OP_bra:
30562 {
30563 int offset;
30564
30565 gcc_assert (val1->val_class == dw_val_class_loc);
30566 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30567 hstate.add_object (offset);
30568 }
30569 break;
30570 case DW_OP_implicit_value:
30571 hstate.add_object (val1->v.val_unsigned);
30572 switch (val2->val_class)
30573 {
30574 case dw_val_class_const:
30575 hstate.add_object (val2->v.val_int);
30576 break;
30577 case dw_val_class_vec:
30578 {
30579 unsigned int elt_size = val2->v.val_vec.elt_size;
30580 unsigned int len = val2->v.val_vec.length;
30581
30582 hstate.add_int (elt_size);
30583 hstate.add_int (len);
30584 hstate.add (val2->v.val_vec.array, len * elt_size);
30585 }
30586 break;
30587 case dw_val_class_const_double:
30588 hstate.add_object (val2->v.val_double.low);
30589 hstate.add_object (val2->v.val_double.high);
30590 break;
30591 case dw_val_class_wide_int:
30592 hstate.add (val2->v.val_wide->get_val (),
30593 get_full_len (*val2->v.val_wide)
30594 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30595 break;
30596 case dw_val_class_addr:
30597 inchash::add_rtx (val2->v.val_addr, hstate);
30598 break;
30599 default:
30600 gcc_unreachable ();
30601 }
30602 break;
30603 case DW_OP_bregx:
30604 case DW_OP_bit_piece:
30605 hstate.add_object (val1->v.val_int);
30606 hstate.add_object (val2->v.val_int);
30607 break;
30608 case DW_OP_addr:
30609 hash_addr:
30610 if (loc->dtprel)
30611 {
30612 unsigned char dtprel = 0xd1;
30613 hstate.add_object (dtprel);
30614 }
30615 inchash::add_rtx (val1->v.val_addr, hstate);
30616 break;
30617 case DW_OP_GNU_addr_index:
30618 case DW_OP_GNU_const_index:
30619 {
30620 if (loc->dtprel)
30621 {
30622 unsigned char dtprel = 0xd1;
30623 hstate.add_object (dtprel);
30624 }
30625 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30626 }
30627 break;
30628 case DW_OP_implicit_pointer:
30629 case DW_OP_GNU_implicit_pointer:
30630 hstate.add_int (val2->v.val_int);
30631 break;
30632 case DW_OP_entry_value:
30633 case DW_OP_GNU_entry_value:
30634 hstate.add_object (val1->v.val_loc);
30635 break;
30636 case DW_OP_regval_type:
30637 case DW_OP_deref_type:
30638 case DW_OP_GNU_regval_type:
30639 case DW_OP_GNU_deref_type:
30640 {
30641 unsigned int byte_size
30642 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30643 unsigned int encoding
30644 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30645 hstate.add_object (val1->v.val_int);
30646 hstate.add_object (byte_size);
30647 hstate.add_object (encoding);
30648 }
30649 break;
30650 case DW_OP_convert:
30651 case DW_OP_reinterpret:
30652 case DW_OP_GNU_convert:
30653 case DW_OP_GNU_reinterpret:
30654 if (val1->val_class == dw_val_class_unsigned_const)
30655 {
30656 hstate.add_object (val1->v.val_unsigned);
30657 break;
30658 }
30659 /* FALLTHRU */
30660 case DW_OP_const_type:
30661 case DW_OP_GNU_const_type:
30662 {
30663 unsigned int byte_size
30664 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30665 unsigned int encoding
30666 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30667 hstate.add_object (byte_size);
30668 hstate.add_object (encoding);
30669 if (loc->dw_loc_opc != DW_OP_const_type
30670 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30671 break;
30672 hstate.add_object (val2->val_class);
30673 switch (val2->val_class)
30674 {
30675 case dw_val_class_const:
30676 hstate.add_object (val2->v.val_int);
30677 break;
30678 case dw_val_class_vec:
30679 {
30680 unsigned int elt_size = val2->v.val_vec.elt_size;
30681 unsigned int len = val2->v.val_vec.length;
30682
30683 hstate.add_object (elt_size);
30684 hstate.add_object (len);
30685 hstate.add (val2->v.val_vec.array, len * elt_size);
30686 }
30687 break;
30688 case dw_val_class_const_double:
30689 hstate.add_object (val2->v.val_double.low);
30690 hstate.add_object (val2->v.val_double.high);
30691 break;
30692 case dw_val_class_wide_int:
30693 hstate.add (val2->v.val_wide->get_val (),
30694 get_full_len (*val2->v.val_wide)
30695 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30696 break;
30697 default:
30698 gcc_unreachable ();
30699 }
30700 }
30701 break;
30702
30703 default:
30704 /* Other codes have no operands. */
30705 break;
30706 }
30707 }
30708
30709 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30710
30711 static inline void
hash_locs(dw_loc_descr_ref loc,inchash::hash & hstate)30712 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30713 {
30714 dw_loc_descr_ref l;
30715 bool sizes_computed = false;
30716 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30717 size_of_locs (loc);
30718
30719 for (l = loc; l != NULL; l = l->dw_loc_next)
30720 {
30721 enum dwarf_location_atom opc = l->dw_loc_opc;
30722 hstate.add_object (opc);
30723 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30724 {
30725 size_of_locs (loc);
30726 sizes_computed = true;
30727 }
30728 hash_loc_operands (l, hstate);
30729 }
30730 }
30731
30732 /* Compute hash of the whole location list LIST_HEAD. */
30733
30734 static inline void
hash_loc_list(dw_loc_list_ref list_head)30735 hash_loc_list (dw_loc_list_ref list_head)
30736 {
30737 dw_loc_list_ref curr = list_head;
30738 inchash::hash hstate;
30739
30740 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30741 {
30742 hstate.add (curr->begin, strlen (curr->begin) + 1);
30743 hstate.add (curr->end, strlen (curr->end) + 1);
30744 hstate.add_object (curr->vbegin);
30745 hstate.add_object (curr->vend);
30746 if (curr->section)
30747 hstate.add (curr->section, strlen (curr->section) + 1);
30748 hash_locs (curr->expr, hstate);
30749 }
30750 list_head->hash = hstate.end ();
30751 }
30752
30753 /* Return true if X and Y opcodes have the same operands. */
30754
30755 static inline bool
compare_loc_operands(dw_loc_descr_ref x,dw_loc_descr_ref y)30756 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30757 {
30758 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30759 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30760 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30761 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30762
30763 switch (x->dw_loc_opc)
30764 {
30765 case DW_OP_const4u:
30766 case DW_OP_const8u:
30767 if (x->dtprel)
30768 goto hash_addr;
30769 /* FALLTHRU */
30770 case DW_OP_const1u:
30771 case DW_OP_const1s:
30772 case DW_OP_const2u:
30773 case DW_OP_const2s:
30774 case DW_OP_const4s:
30775 case DW_OP_const8s:
30776 case DW_OP_constu:
30777 case DW_OP_consts:
30778 case DW_OP_pick:
30779 case DW_OP_plus_uconst:
30780 case DW_OP_breg0:
30781 case DW_OP_breg1:
30782 case DW_OP_breg2:
30783 case DW_OP_breg3:
30784 case DW_OP_breg4:
30785 case DW_OP_breg5:
30786 case DW_OP_breg6:
30787 case DW_OP_breg7:
30788 case DW_OP_breg8:
30789 case DW_OP_breg9:
30790 case DW_OP_breg10:
30791 case DW_OP_breg11:
30792 case DW_OP_breg12:
30793 case DW_OP_breg13:
30794 case DW_OP_breg14:
30795 case DW_OP_breg15:
30796 case DW_OP_breg16:
30797 case DW_OP_breg17:
30798 case DW_OP_breg18:
30799 case DW_OP_breg19:
30800 case DW_OP_breg20:
30801 case DW_OP_breg21:
30802 case DW_OP_breg22:
30803 case DW_OP_breg23:
30804 case DW_OP_breg24:
30805 case DW_OP_breg25:
30806 case DW_OP_breg26:
30807 case DW_OP_breg27:
30808 case DW_OP_breg28:
30809 case DW_OP_breg29:
30810 case DW_OP_breg30:
30811 case DW_OP_breg31:
30812 case DW_OP_regx:
30813 case DW_OP_fbreg:
30814 case DW_OP_piece:
30815 case DW_OP_deref_size:
30816 case DW_OP_xderef_size:
30817 return valx1->v.val_int == valy1->v.val_int;
30818 case DW_OP_skip:
30819 case DW_OP_bra:
30820 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30821 can cause irrelevant differences in dw_loc_addr. */
30822 gcc_assert (valx1->val_class == dw_val_class_loc
30823 && valy1->val_class == dw_val_class_loc
30824 && (dwarf_split_debug_info
30825 || x->dw_loc_addr == y->dw_loc_addr));
30826 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30827 case DW_OP_implicit_value:
30828 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30829 || valx2->val_class != valy2->val_class)
30830 return false;
30831 switch (valx2->val_class)
30832 {
30833 case dw_val_class_const:
30834 return valx2->v.val_int == valy2->v.val_int;
30835 case dw_val_class_vec:
30836 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30837 && valx2->v.val_vec.length == valy2->v.val_vec.length
30838 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30839 valx2->v.val_vec.elt_size
30840 * valx2->v.val_vec.length) == 0;
30841 case dw_val_class_const_double:
30842 return valx2->v.val_double.low == valy2->v.val_double.low
30843 && valx2->v.val_double.high == valy2->v.val_double.high;
30844 case dw_val_class_wide_int:
30845 return *valx2->v.val_wide == *valy2->v.val_wide;
30846 case dw_val_class_addr:
30847 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30848 default:
30849 gcc_unreachable ();
30850 }
30851 case DW_OP_bregx:
30852 case DW_OP_bit_piece:
30853 return valx1->v.val_int == valy1->v.val_int
30854 && valx2->v.val_int == valy2->v.val_int;
30855 case DW_OP_addr:
30856 hash_addr:
30857 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30858 case DW_OP_GNU_addr_index:
30859 case DW_OP_GNU_const_index:
30860 {
30861 rtx ax1 = valx1->val_entry->addr.rtl;
30862 rtx ay1 = valy1->val_entry->addr.rtl;
30863 return rtx_equal_p (ax1, ay1);
30864 }
30865 case DW_OP_implicit_pointer:
30866 case DW_OP_GNU_implicit_pointer:
30867 return valx1->val_class == dw_val_class_die_ref
30868 && valx1->val_class == valy1->val_class
30869 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30870 && valx2->v.val_int == valy2->v.val_int;
30871 case DW_OP_entry_value:
30872 case DW_OP_GNU_entry_value:
30873 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30874 case DW_OP_const_type:
30875 case DW_OP_GNU_const_type:
30876 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30877 || valx2->val_class != valy2->val_class)
30878 return false;
30879 switch (valx2->val_class)
30880 {
30881 case dw_val_class_const:
30882 return valx2->v.val_int == valy2->v.val_int;
30883 case dw_val_class_vec:
30884 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30885 && valx2->v.val_vec.length == valy2->v.val_vec.length
30886 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30887 valx2->v.val_vec.elt_size
30888 * valx2->v.val_vec.length) == 0;
30889 case dw_val_class_const_double:
30890 return valx2->v.val_double.low == valy2->v.val_double.low
30891 && valx2->v.val_double.high == valy2->v.val_double.high;
30892 case dw_val_class_wide_int:
30893 return *valx2->v.val_wide == *valy2->v.val_wide;
30894 default:
30895 gcc_unreachable ();
30896 }
30897 case DW_OP_regval_type:
30898 case DW_OP_deref_type:
30899 case DW_OP_GNU_regval_type:
30900 case DW_OP_GNU_deref_type:
30901 return valx1->v.val_int == valy1->v.val_int
30902 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30903 case DW_OP_convert:
30904 case DW_OP_reinterpret:
30905 case DW_OP_GNU_convert:
30906 case DW_OP_GNU_reinterpret:
30907 if (valx1->val_class != valy1->val_class)
30908 return false;
30909 if (valx1->val_class == dw_val_class_unsigned_const)
30910 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30911 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30912 case DW_OP_GNU_parameter_ref:
30913 return valx1->val_class == dw_val_class_die_ref
30914 && valx1->val_class == valy1->val_class
30915 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30916 default:
30917 /* Other codes have no operands. */
30918 return true;
30919 }
30920 }
30921
30922 /* Return true if DWARF location expressions X and Y are the same. */
30923
30924 static inline bool
compare_locs(dw_loc_descr_ref x,dw_loc_descr_ref y)30925 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30926 {
30927 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30928 if (x->dw_loc_opc != y->dw_loc_opc
30929 || x->dtprel != y->dtprel
30930 || !compare_loc_operands (x, y))
30931 break;
30932 return x == NULL && y == NULL;
30933 }
30934
30935 /* Hashtable helpers. */
30936
30937 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30938 {
30939 static inline hashval_t hash (const dw_loc_list_struct *);
30940 static inline bool equal (const dw_loc_list_struct *,
30941 const dw_loc_list_struct *);
30942 };
30943
30944 /* Return precomputed hash of location list X. */
30945
30946 inline hashval_t
hash(const dw_loc_list_struct * x)30947 loc_list_hasher::hash (const dw_loc_list_struct *x)
30948 {
30949 return x->hash;
30950 }
30951
30952 /* Return true if location lists A and B are the same. */
30953
30954 inline bool
equal(const dw_loc_list_struct * a,const dw_loc_list_struct * b)30955 loc_list_hasher::equal (const dw_loc_list_struct *a,
30956 const dw_loc_list_struct *b)
30957 {
30958 if (a == b)
30959 return 1;
30960 if (a->hash != b->hash)
30961 return 0;
30962 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30963 if (strcmp (a->begin, b->begin) != 0
30964 || strcmp (a->end, b->end) != 0
30965 || (a->section == NULL) != (b->section == NULL)
30966 || (a->section && strcmp (a->section, b->section) != 0)
30967 || a->vbegin != b->vbegin || a->vend != b->vend
30968 || !compare_locs (a->expr, b->expr))
30969 break;
30970 return a == NULL && b == NULL;
30971 }
30972
30973 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30974
30975
30976 /* Recursively optimize location lists referenced from DIE
30977 children and share them whenever possible. */
30978
30979 static void
optimize_location_lists_1(dw_die_ref die,loc_list_hash_type * htab)30980 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30981 {
30982 dw_die_ref c;
30983 dw_attr_node *a;
30984 unsigned ix;
30985 dw_loc_list_struct **slot;
30986 bool drop_locviews = false;
30987 bool has_locviews = false;
30988
30989 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30990 if (AT_class (a) == dw_val_class_loc_list)
30991 {
30992 dw_loc_list_ref list = AT_loc_list (a);
30993 /* TODO: perform some optimizations here, before hashing
30994 it and storing into the hash table. */
30995 hash_loc_list (list);
30996 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30997 if (*slot == NULL)
30998 {
30999 *slot = list;
31000 if (loc_list_has_views (list))
31001 gcc_assert (list->vl_symbol);
31002 else if (list->vl_symbol)
31003 {
31004 drop_locviews = true;
31005 list->vl_symbol = NULL;
31006 }
31007 }
31008 else
31009 {
31010 if (list->vl_symbol && !(*slot)->vl_symbol)
31011 drop_locviews = true;
31012 a->dw_attr_val.v.val_loc_list = *slot;
31013 }
31014 }
31015 else if (AT_class (a) == dw_val_class_view_list)
31016 {
31017 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31018 has_locviews = true;
31019 }
31020
31021
31022 if (drop_locviews && has_locviews)
31023 remove_AT (die, DW_AT_GNU_locviews);
31024
31025 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31026 }
31027
31028
31029 /* Recursively assign each location list a unique index into the debug_addr
31030 section. */
31031
31032 static void
index_location_lists(dw_die_ref die)31033 index_location_lists (dw_die_ref die)
31034 {
31035 dw_die_ref c;
31036 dw_attr_node *a;
31037 unsigned ix;
31038
31039 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31040 if (AT_class (a) == dw_val_class_loc_list)
31041 {
31042 dw_loc_list_ref list = AT_loc_list (a);
31043 dw_loc_list_ref curr;
31044 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31045 {
31046 /* Don't index an entry that has already been indexed
31047 or won't be output. Make sure skip_loc_list_entry doesn't
31048 call size_of_locs, because that might cause circular dependency,
31049 index_location_lists requiring address table indexes to be
31050 computed, but adding new indexes through add_addr_table_entry
31051 and address table index computation requiring no new additions
31052 to the hash table. In the rare case of DWARF[234] >= 64KB
31053 location expression, we'll just waste unused address table entry
31054 for it. */
31055 if (curr->begin_entry != NULL
31056 || skip_loc_list_entry (curr))
31057 continue;
31058
31059 curr->begin_entry
31060 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31061 }
31062 }
31063
31064 FOR_EACH_CHILD (die, c, index_location_lists (c));
31065 }
31066
31067 /* Optimize location lists referenced from DIE
31068 children and share them whenever possible. */
31069
31070 static void
optimize_location_lists(dw_die_ref die)31071 optimize_location_lists (dw_die_ref die)
31072 {
31073 loc_list_hash_type htab (500);
31074 optimize_location_lists_1 (die, &htab);
31075 }
31076
31077 /* Traverse the limbo die list, and add parent/child links. The only
31078 dies without parents that should be here are concrete instances of
31079 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31080 For concrete instances, we can get the parent die from the abstract
31081 instance. */
31082
31083 static void
flush_limbo_die_list(void)31084 flush_limbo_die_list (void)
31085 {
31086 limbo_die_node *node;
31087
31088 /* get_context_die calls force_decl_die, which can put new DIEs on the
31089 limbo list in LTO mode when nested functions are put in a different
31090 partition than that of their parent function. */
31091 while ((node = limbo_die_list))
31092 {
31093 dw_die_ref die = node->die;
31094 limbo_die_list = node->next;
31095
31096 if (die->die_parent == NULL)
31097 {
31098 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31099
31100 if (origin && origin->die_parent)
31101 add_child_die (origin->die_parent, die);
31102 else if (is_cu_die (die))
31103 ;
31104 else if (seen_error ())
31105 /* It's OK to be confused by errors in the input. */
31106 add_child_die (comp_unit_die (), die);
31107 else
31108 {
31109 /* In certain situations, the lexical block containing a
31110 nested function can be optimized away, which results
31111 in the nested function die being orphaned. Likewise
31112 with the return type of that nested function. Force
31113 this to be a child of the containing function.
31114
31115 It may happen that even the containing function got fully
31116 inlined and optimized out. In that case we are lost and
31117 assign the empty child. This should not be big issue as
31118 the function is likely unreachable too. */
31119 gcc_assert (node->created_for);
31120
31121 if (DECL_P (node->created_for))
31122 origin = get_context_die (DECL_CONTEXT (node->created_for));
31123 else if (TYPE_P (node->created_for))
31124 origin = scope_die_for (node->created_for, comp_unit_die ());
31125 else
31126 origin = comp_unit_die ();
31127
31128 add_child_die (origin, die);
31129 }
31130 }
31131 }
31132 }
31133
31134 /* Reset DIEs so we can output them again. */
31135
31136 static void
reset_dies(dw_die_ref die)31137 reset_dies (dw_die_ref die)
31138 {
31139 dw_die_ref c;
31140
31141 /* Remove stuff we re-generate. */
31142 die->die_mark = 0;
31143 die->die_offset = 0;
31144 die->die_abbrev = 0;
31145 remove_AT (die, DW_AT_sibling);
31146
31147 FOR_EACH_CHILD (die, c, reset_dies (c));
31148 }
31149
31150 /* Output stuff that dwarf requires at the end of every file,
31151 and generate the DWARF-2 debugging info. */
31152
31153 static void
dwarf2out_finish(const char *)31154 dwarf2out_finish (const char *)
31155 {
31156 comdat_type_node *ctnode;
31157 dw_die_ref main_comp_unit_die;
31158 unsigned char checksum[16];
31159 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31160
31161 /* Flush out any latecomers to the limbo party. */
31162 flush_limbo_die_list ();
31163
31164 if (inline_entry_data_table)
31165 gcc_assert (inline_entry_data_table->elements () == 0);
31166
31167 if (flag_checking)
31168 {
31169 verify_die (comp_unit_die ());
31170 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31171 verify_die (node->die);
31172 }
31173
31174 /* We shouldn't have any symbols with delayed asm names for
31175 DIEs generated after early finish. */
31176 gcc_assert (deferred_asm_name == NULL);
31177
31178 gen_remaining_tmpl_value_param_die_attribute ();
31179
31180 if (flag_generate_lto || flag_generate_offload)
31181 {
31182 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31183
31184 /* Prune stuff so that dwarf2out_finish runs successfully
31185 for the fat part of the object. */
31186 reset_dies (comp_unit_die ());
31187 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31188 reset_dies (node->die);
31189
31190 hash_table<comdat_type_hasher> comdat_type_table (100);
31191 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31192 {
31193 comdat_type_node **slot
31194 = comdat_type_table.find_slot (ctnode, INSERT);
31195
31196 /* Don't reset types twice. */
31197 if (*slot != HTAB_EMPTY_ENTRY)
31198 continue;
31199
31200 /* Remove the pointer to the line table. */
31201 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31202
31203 if (debug_info_level >= DINFO_LEVEL_TERSE)
31204 reset_dies (ctnode->root_die);
31205
31206 *slot = ctnode;
31207 }
31208
31209 /* Reset die CU symbol so we don't output it twice. */
31210 comp_unit_die ()->die_id.die_symbol = NULL;
31211
31212 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31213 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31214 if (have_macinfo)
31215 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31216
31217 /* Remove indirect string decisions. */
31218 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31219 if (debug_line_str_hash)
31220 {
31221 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31222 debug_line_str_hash = NULL;
31223 }
31224 }
31225
31226 #if ENABLE_ASSERT_CHECKING
31227 {
31228 dw_die_ref die = comp_unit_die (), c;
31229 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31230 }
31231 #endif
31232 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31233 resolve_addr (ctnode->root_die);
31234 resolve_addr (comp_unit_die ());
31235 move_marked_base_types ();
31236
31237 /* Initialize sections and labels used for actual assembler output. */
31238 unsigned generation = init_sections_and_labels (false);
31239
31240 /* Traverse the DIE's and add sibling attributes to those DIE's that
31241 have children. */
31242 add_sibling_attributes (comp_unit_die ());
31243 limbo_die_node *node;
31244 for (node = cu_die_list; node; node = node->next)
31245 add_sibling_attributes (node->die);
31246 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31247 add_sibling_attributes (ctnode->root_die);
31248
31249 /* When splitting DWARF info, we put some attributes in the
31250 skeleton compile_unit DIE that remains in the .o, while
31251 most attributes go in the DWO compile_unit_die. */
31252 if (dwarf_split_debug_info)
31253 {
31254 limbo_die_node *cu;
31255 main_comp_unit_die = gen_compile_unit_die (NULL);
31256 if (dwarf_version >= 5)
31257 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31258 cu = limbo_die_list;
31259 gcc_assert (cu->die == main_comp_unit_die);
31260 limbo_die_list = limbo_die_list->next;
31261 cu->next = cu_die_list;
31262 cu_die_list = cu;
31263 }
31264 else
31265 main_comp_unit_die = comp_unit_die ();
31266
31267 /* Output a terminator label for the .text section. */
31268 switch_to_section (text_section);
31269 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31270 if (cold_text_section)
31271 {
31272 switch_to_section (cold_text_section);
31273 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31274 }
31275
31276 /* We can only use the low/high_pc attributes if all of the code was
31277 in .text. */
31278 if (!have_multiple_function_sections
31279 || (dwarf_version < 3 && dwarf_strict))
31280 {
31281 /* Don't add if the CU has no associated code. */
31282 if (text_section_used)
31283 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31284 text_end_label, true);
31285 }
31286 else
31287 {
31288 unsigned fde_idx;
31289 dw_fde_ref fde;
31290 bool range_list_added = false;
31291
31292 if (text_section_used)
31293 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31294 text_end_label, &range_list_added, true);
31295 if (cold_text_section_used)
31296 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31297 cold_end_label, &range_list_added, true);
31298
31299 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31300 {
31301 if (DECL_IGNORED_P (fde->decl))
31302 continue;
31303 if (!fde->in_std_section)
31304 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31305 fde->dw_fde_end, &range_list_added,
31306 true);
31307 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31308 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31309 fde->dw_fde_second_end, &range_list_added,
31310 true);
31311 }
31312
31313 if (range_list_added)
31314 {
31315 /* We need to give .debug_loc and .debug_ranges an appropriate
31316 "base address". Use zero so that these addresses become
31317 absolute. Historically, we've emitted the unexpected
31318 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31319 Emit both to give time for other tools to adapt. */
31320 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31321 if (! dwarf_strict && dwarf_version < 4)
31322 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31323
31324 add_ranges (NULL);
31325 }
31326 }
31327
31328 /* AIX Assembler inserts the length, so adjust the reference to match the
31329 offset expected by debuggers. */
31330 strcpy (dl_section_ref, debug_line_section_label);
31331 if (XCOFF_DEBUGGING_INFO)
31332 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31333
31334 if (debug_info_level >= DINFO_LEVEL_TERSE)
31335 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31336 dl_section_ref);
31337
31338 if (have_macinfo)
31339 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31340 macinfo_section_label);
31341
31342 if (dwarf_split_debug_info)
31343 {
31344 if (have_location_lists)
31345 {
31346 if (dwarf_version >= 5)
31347 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31348 loc_section_label);
31349 /* optimize_location_lists calculates the size of the lists,
31350 so index them first, and assign indices to the entries.
31351 Although optimize_location_lists will remove entries from
31352 the table, it only does so for duplicates, and therefore
31353 only reduces ref_counts to 1. */
31354 index_location_lists (comp_unit_die ());
31355 }
31356
31357 if (addr_index_table != NULL)
31358 {
31359 unsigned int index = 0;
31360 addr_index_table
31361 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31362 (&index);
31363 }
31364 }
31365
31366 loc_list_idx = 0;
31367 if (have_location_lists)
31368 {
31369 optimize_location_lists (comp_unit_die ());
31370 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31371 if (dwarf_version >= 5 && dwarf_split_debug_info)
31372 assign_location_list_indexes (comp_unit_die ());
31373 }
31374
31375 save_macinfo_strings ();
31376
31377 if (dwarf_split_debug_info)
31378 {
31379 unsigned int index = 0;
31380
31381 /* Add attributes common to skeleton compile_units and
31382 type_units. Because these attributes include strings, it
31383 must be done before freezing the string table. Top-level
31384 skeleton die attrs are added when the skeleton type unit is
31385 created, so ensure it is created by this point. */
31386 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31387 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31388 }
31389
31390 /* Output all of the compilation units. We put the main one last so that
31391 the offsets are available to output_pubnames. */
31392 for (node = cu_die_list; node; node = node->next)
31393 output_comp_unit (node->die, 0, NULL);
31394
31395 hash_table<comdat_type_hasher> comdat_type_table (100);
31396 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31397 {
31398 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31399
31400 /* Don't output duplicate types. */
31401 if (*slot != HTAB_EMPTY_ENTRY)
31402 continue;
31403
31404 /* Add a pointer to the line table for the main compilation unit
31405 so that the debugger can make sense of DW_AT_decl_file
31406 attributes. */
31407 if (debug_info_level >= DINFO_LEVEL_TERSE)
31408 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31409 (!dwarf_split_debug_info
31410 ? dl_section_ref
31411 : debug_skeleton_line_section_label));
31412
31413 output_comdat_type_unit (ctnode, false);
31414 *slot = ctnode;
31415 }
31416
31417 if (dwarf_split_debug_info)
31418 {
31419 int mark;
31420 struct md5_ctx ctx;
31421
31422 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31423 index_rnglists ();
31424
31425 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31426 md5_init_ctx (&ctx);
31427 mark = 0;
31428 die_checksum (comp_unit_die (), &ctx, &mark);
31429 unmark_all_dies (comp_unit_die ());
31430 md5_finish_ctx (&ctx, checksum);
31431
31432 if (dwarf_version < 5)
31433 {
31434 /* Use the first 8 bytes of the checksum as the dwo_id,
31435 and add it to both comp-unit DIEs. */
31436 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31437 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31438 }
31439
31440 /* Add the base offset of the ranges table to the skeleton
31441 comp-unit DIE. */
31442 if (!vec_safe_is_empty (ranges_table))
31443 {
31444 if (dwarf_version >= 5)
31445 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31446 ranges_base_label);
31447 else
31448 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31449 ranges_section_label);
31450 }
31451
31452 switch_to_section (debug_addr_section);
31453 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31454 output_addr_table ();
31455 }
31456
31457 /* Output the main compilation unit if non-empty or if .debug_macinfo
31458 or .debug_macro will be emitted. */
31459 output_comp_unit (comp_unit_die (), have_macinfo,
31460 dwarf_split_debug_info ? checksum : NULL);
31461
31462 if (dwarf_split_debug_info && info_section_emitted)
31463 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31464
31465 /* Output the abbreviation table. */
31466 if (vec_safe_length (abbrev_die_table) != 1)
31467 {
31468 switch_to_section (debug_abbrev_section);
31469 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31470 output_abbrev_section ();
31471 }
31472
31473 /* Output location list section if necessary. */
31474 if (have_location_lists)
31475 {
31476 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31477 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31478 /* Output the location lists info. */
31479 switch_to_section (debug_loc_section);
31480 if (dwarf_version >= 5)
31481 {
31482 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31483 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31484 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31485 dw2_asm_output_data (4, 0xffffffff,
31486 "Initial length escape value indicating "
31487 "64-bit DWARF extension");
31488 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31489 "Length of Location Lists");
31490 ASM_OUTPUT_LABEL (asm_out_file, l1);
31491 output_dwarf_version ();
31492 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31493 dw2_asm_output_data (1, 0, "Segment Size");
31494 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31495 "Offset Entry Count");
31496 }
31497 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31498 if (dwarf_version >= 5 && dwarf_split_debug_info)
31499 {
31500 unsigned int save_loc_list_idx = loc_list_idx;
31501 loc_list_idx = 0;
31502 output_loclists_offsets (comp_unit_die ());
31503 gcc_assert (save_loc_list_idx == loc_list_idx);
31504 }
31505 output_location_lists (comp_unit_die ());
31506 if (dwarf_version >= 5)
31507 ASM_OUTPUT_LABEL (asm_out_file, l2);
31508 }
31509
31510 output_pubtables ();
31511
31512 /* Output the address range information if a CU (.debug_info section)
31513 was emitted. We output an empty table even if we had no functions
31514 to put in it. This because the consumer has no way to tell the
31515 difference between an empty table that we omitted and failure to
31516 generate a table that would have contained data. */
31517 if (info_section_emitted)
31518 {
31519 switch_to_section (debug_aranges_section);
31520 output_aranges ();
31521 }
31522
31523 /* Output ranges section if necessary. */
31524 if (!vec_safe_is_empty (ranges_table))
31525 {
31526 if (dwarf_version >= 5)
31527 output_rnglists (generation);
31528 else
31529 output_ranges ();
31530 }
31531
31532 /* Have to end the macro section. */
31533 if (have_macinfo)
31534 {
31535 switch_to_section (debug_macinfo_section);
31536 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31537 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31538 : debug_skeleton_line_section_label, false);
31539 dw2_asm_output_data (1, 0, "End compilation unit");
31540 }
31541
31542 /* Output the source line correspondence table. We must do this
31543 even if there is no line information. Otherwise, on an empty
31544 translation unit, we will generate a present, but empty,
31545 .debug_info section. IRIX 6.5 `nm' will then complain when
31546 examining the file. This is done late so that any filenames
31547 used by the debug_info section are marked as 'used'. */
31548 switch_to_section (debug_line_section);
31549 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31550 if (! output_asm_line_debug_info ())
31551 output_line_info (false);
31552
31553 if (dwarf_split_debug_info && info_section_emitted)
31554 {
31555 switch_to_section (debug_skeleton_line_section);
31556 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31557 output_line_info (true);
31558 }
31559
31560 /* If we emitted any indirect strings, output the string table too. */
31561 if (debug_str_hash || skeleton_debug_str_hash)
31562 output_indirect_strings ();
31563 if (debug_line_str_hash)
31564 {
31565 switch_to_section (debug_line_str_section);
31566 const enum dwarf_form form = DW_FORM_line_strp;
31567 debug_line_str_hash->traverse<enum dwarf_form,
31568 output_indirect_string> (form);
31569 }
31570
31571 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31572 symview_upper_bound = 0;
31573 if (zero_view_p)
31574 bitmap_clear (zero_view_p);
31575 }
31576
31577 /* Returns a hash value for X (which really is a variable_value_struct). */
31578
31579 inline hashval_t
hash(variable_value_struct * x)31580 variable_value_hasher::hash (variable_value_struct *x)
31581 {
31582 return (hashval_t) x->decl_id;
31583 }
31584
31585 /* Return nonzero if decl_id of variable_value_struct X is the same as
31586 UID of decl Y. */
31587
31588 inline bool
equal(variable_value_struct * x,tree y)31589 variable_value_hasher::equal (variable_value_struct *x, tree y)
31590 {
31591 return x->decl_id == DECL_UID (y);
31592 }
31593
31594 /* Helper function for resolve_variable_value, handle
31595 DW_OP_GNU_variable_value in one location expression.
31596 Return true if exprloc has been changed into loclist. */
31597
31598 static bool
resolve_variable_value_in_expr(dw_attr_node * a,dw_loc_descr_ref loc)31599 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31600 {
31601 dw_loc_descr_ref next;
31602 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31603 {
31604 next = loc->dw_loc_next;
31605 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31606 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31607 continue;
31608
31609 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31610 if (DECL_CONTEXT (decl) != current_function_decl)
31611 continue;
31612
31613 dw_die_ref ref = lookup_decl_die (decl);
31614 if (ref)
31615 {
31616 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31617 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31618 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31619 continue;
31620 }
31621 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31622 if (l == NULL)
31623 continue;
31624 if (l->dw_loc_next)
31625 {
31626 if (AT_class (a) != dw_val_class_loc)
31627 continue;
31628 switch (a->dw_attr)
31629 {
31630 /* Following attributes allow both exprloc and loclist
31631 classes, so we can change them into a loclist. */
31632 case DW_AT_location:
31633 case DW_AT_string_length:
31634 case DW_AT_return_addr:
31635 case DW_AT_data_member_location:
31636 case DW_AT_frame_base:
31637 case DW_AT_segment:
31638 case DW_AT_static_link:
31639 case DW_AT_use_location:
31640 case DW_AT_vtable_elem_location:
31641 if (prev)
31642 {
31643 prev->dw_loc_next = NULL;
31644 prepend_loc_descr_to_each (l, AT_loc (a));
31645 }
31646 if (next)
31647 add_loc_descr_to_each (l, next);
31648 a->dw_attr_val.val_class = dw_val_class_loc_list;
31649 a->dw_attr_val.val_entry = NULL;
31650 a->dw_attr_val.v.val_loc_list = l;
31651 have_location_lists = true;
31652 return true;
31653 /* Following attributes allow both exprloc and reference,
31654 so if the whole expression is DW_OP_GNU_variable_value alone
31655 we could transform it into reference. */
31656 case DW_AT_byte_size:
31657 case DW_AT_bit_size:
31658 case DW_AT_lower_bound:
31659 case DW_AT_upper_bound:
31660 case DW_AT_bit_stride:
31661 case DW_AT_count:
31662 case DW_AT_allocated:
31663 case DW_AT_associated:
31664 case DW_AT_byte_stride:
31665 if (prev == NULL && next == NULL)
31666 break;
31667 /* FALLTHRU */
31668 default:
31669 if (dwarf_strict)
31670 continue;
31671 break;
31672 }
31673 /* Create DW_TAG_variable that we can refer to. */
31674 gen_decl_die (decl, NULL_TREE, NULL,
31675 lookup_decl_die (current_function_decl));
31676 ref = lookup_decl_die (decl);
31677 if (ref)
31678 {
31679 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31680 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31681 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31682 }
31683 continue;
31684 }
31685 if (prev)
31686 {
31687 prev->dw_loc_next = l->expr;
31688 add_loc_descr (&prev->dw_loc_next, next);
31689 free_loc_descr (loc, NULL);
31690 next = prev->dw_loc_next;
31691 }
31692 else
31693 {
31694 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31695 add_loc_descr (&loc, next);
31696 next = loc;
31697 }
31698 loc = prev;
31699 }
31700 return false;
31701 }
31702
31703 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31704
31705 static void
resolve_variable_value(dw_die_ref die)31706 resolve_variable_value (dw_die_ref die)
31707 {
31708 dw_attr_node *a;
31709 dw_loc_list_ref loc;
31710 unsigned ix;
31711
31712 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31713 switch (AT_class (a))
31714 {
31715 case dw_val_class_loc:
31716 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31717 break;
31718 /* FALLTHRU */
31719 case dw_val_class_loc_list:
31720 loc = AT_loc_list (a);
31721 gcc_assert (loc);
31722 for (; loc; loc = loc->dw_loc_next)
31723 resolve_variable_value_in_expr (a, loc->expr);
31724 break;
31725 default:
31726 break;
31727 }
31728 }
31729
31730 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31731 temporaries in the current function. */
31732
31733 static void
resolve_variable_values(void)31734 resolve_variable_values (void)
31735 {
31736 if (!variable_value_hash || !current_function_decl)
31737 return;
31738
31739 struct variable_value_struct *node
31740 = variable_value_hash->find_with_hash (current_function_decl,
31741 DECL_UID (current_function_decl));
31742
31743 if (node == NULL)
31744 return;
31745
31746 unsigned int i;
31747 dw_die_ref die;
31748 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31749 resolve_variable_value (die);
31750 }
31751
31752 /* Helper function for note_variable_value, handle one location
31753 expression. */
31754
31755 static void
note_variable_value_in_expr(dw_die_ref die,dw_loc_descr_ref loc)31756 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31757 {
31758 for (; loc; loc = loc->dw_loc_next)
31759 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31760 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31761 {
31762 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31763 dw_die_ref ref = lookup_decl_die (decl);
31764 if (! ref && (flag_generate_lto || flag_generate_offload))
31765 {
31766 /* ??? This is somewhat a hack because we do not create DIEs
31767 for variables not in BLOCK trees early but when generating
31768 early LTO output we need the dw_val_class_decl_ref to be
31769 fully resolved. For fat LTO objects we'd also like to
31770 undo this after LTO dwarf output. */
31771 gcc_assert (DECL_CONTEXT (decl));
31772 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31773 gcc_assert (ctx != NULL);
31774 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31775 ref = lookup_decl_die (decl);
31776 gcc_assert (ref != NULL);
31777 }
31778 if (ref)
31779 {
31780 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31781 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31782 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31783 continue;
31784 }
31785 if (VAR_P (decl)
31786 && DECL_CONTEXT (decl)
31787 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31788 && lookup_decl_die (DECL_CONTEXT (decl)))
31789 {
31790 if (!variable_value_hash)
31791 variable_value_hash
31792 = hash_table<variable_value_hasher>::create_ggc (10);
31793
31794 tree fndecl = DECL_CONTEXT (decl);
31795 struct variable_value_struct *node;
31796 struct variable_value_struct **slot
31797 = variable_value_hash->find_slot_with_hash (fndecl,
31798 DECL_UID (fndecl),
31799 INSERT);
31800 if (*slot == NULL)
31801 {
31802 node = ggc_cleared_alloc<variable_value_struct> ();
31803 node->decl_id = DECL_UID (fndecl);
31804 *slot = node;
31805 }
31806 else
31807 node = *slot;
31808
31809 vec_safe_push (node->dies, die);
31810 }
31811 }
31812 }
31813
31814 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31815 with dw_val_class_decl_ref operand. */
31816
31817 static void
note_variable_value(dw_die_ref die)31818 note_variable_value (dw_die_ref die)
31819 {
31820 dw_die_ref c;
31821 dw_attr_node *a;
31822 dw_loc_list_ref loc;
31823 unsigned ix;
31824
31825 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31826 switch (AT_class (a))
31827 {
31828 case dw_val_class_loc_list:
31829 loc = AT_loc_list (a);
31830 gcc_assert (loc);
31831 if (!loc->noted_variable_value)
31832 {
31833 loc->noted_variable_value = 1;
31834 for (; loc; loc = loc->dw_loc_next)
31835 note_variable_value_in_expr (die, loc->expr);
31836 }
31837 break;
31838 case dw_val_class_loc:
31839 note_variable_value_in_expr (die, AT_loc (a));
31840 break;
31841 default:
31842 break;
31843 }
31844
31845 /* Mark children. */
31846 FOR_EACH_CHILD (die, c, note_variable_value (c));
31847 }
31848
31849 /* Perform any cleanups needed after the early debug generation pass
31850 has run. */
31851
31852 static void
dwarf2out_early_finish(const char * filename)31853 dwarf2out_early_finish (const char *filename)
31854 {
31855 set_early_dwarf s;
31856 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31857
31858 /* PCH might result in DW_AT_producer string being restored from the
31859 header compilation, so always fill it with empty string initially
31860 and overwrite only here. */
31861 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31862 producer_string = gen_producer_string ();
31863 producer->dw_attr_val.v.val_str->refcount--;
31864 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31865
31866 /* Add the name for the main input file now. We delayed this from
31867 dwarf2out_init to avoid complications with PCH. */
31868 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31869 add_comp_dir_attribute (comp_unit_die ());
31870
31871 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31872 DW_AT_comp_dir into .debug_line_str section. */
31873 if (!output_asm_line_debug_info ()
31874 && dwarf_version >= 5
31875 && DWARF5_USE_DEBUG_LINE_STR)
31876 {
31877 for (int i = 0; i < 2; i++)
31878 {
31879 dw_attr_node *a = get_AT (comp_unit_die (),
31880 i ? DW_AT_comp_dir : DW_AT_name);
31881 if (a == NULL
31882 || AT_class (a) != dw_val_class_str
31883 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31884 continue;
31885
31886 if (! debug_line_str_hash)
31887 debug_line_str_hash
31888 = hash_table<indirect_string_hasher>::create_ggc (10);
31889
31890 struct indirect_string_node *node
31891 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31892 set_indirect_string (node);
31893 node->form = DW_FORM_line_strp;
31894 a->dw_attr_val.v.val_str->refcount--;
31895 a->dw_attr_val.v.val_str = node;
31896 }
31897 }
31898
31899 /* With LTO early dwarf was really finished at compile-time, so make
31900 sure to adjust the phase after annotating the LTRANS CU DIE. */
31901 if (in_lto_p)
31902 {
31903 early_dwarf_finished = true;
31904 return;
31905 }
31906
31907 /* Walk through the list of incomplete types again, trying once more to
31908 emit full debugging info for them. */
31909 retry_incomplete_types ();
31910
31911 /* The point here is to flush out the limbo list so that it is empty
31912 and we don't need to stream it for LTO. */
31913 flush_limbo_die_list ();
31914
31915 gen_scheduled_generic_parms_dies ();
31916 gen_remaining_tmpl_value_param_die_attribute ();
31917
31918 /* Add DW_AT_linkage_name for all deferred DIEs. */
31919 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31920 {
31921 tree decl = node->created_for;
31922 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31923 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31924 ended up in deferred_asm_name before we knew it was
31925 constant and never written to disk. */
31926 && DECL_ASSEMBLER_NAME (decl))
31927 {
31928 add_linkage_attr (node->die, decl);
31929 move_linkage_attr (node->die);
31930 }
31931 }
31932 deferred_asm_name = NULL;
31933
31934 if (flag_eliminate_unused_debug_types)
31935 prune_unused_types ();
31936
31937 /* Generate separate COMDAT sections for type DIEs. */
31938 if (use_debug_types)
31939 {
31940 break_out_comdat_types (comp_unit_die ());
31941
31942 /* Each new type_unit DIE was added to the limbo die list when created.
31943 Since these have all been added to comdat_type_list, clear the
31944 limbo die list. */
31945 limbo_die_list = NULL;
31946
31947 /* For each new comdat type unit, copy declarations for incomplete
31948 types to make the new unit self-contained (i.e., no direct
31949 references to the main compile unit). */
31950 for (comdat_type_node *ctnode = comdat_type_list;
31951 ctnode != NULL; ctnode = ctnode->next)
31952 copy_decls_for_unworthy_types (ctnode->root_die);
31953 copy_decls_for_unworthy_types (comp_unit_die ());
31954
31955 /* In the process of copying declarations from one unit to another,
31956 we may have left some declarations behind that are no longer
31957 referenced. Prune them. */
31958 prune_unused_types ();
31959 }
31960
31961 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31962 with dw_val_class_decl_ref operand. */
31963 note_variable_value (comp_unit_die ());
31964 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31965 note_variable_value (node->die);
31966 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31967 ctnode = ctnode->next)
31968 note_variable_value (ctnode->root_die);
31969 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31970 note_variable_value (node->die);
31971
31972 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31973 both the main_cu and all skeleton TUs. Making this call unconditional
31974 would end up either adding a second copy of the AT_pubnames attribute, or
31975 requiring a special case in add_top_level_skeleton_die_attrs. */
31976 if (!dwarf_split_debug_info)
31977 add_AT_pubnames (comp_unit_die ());
31978
31979 /* The early debug phase is now finished. */
31980 early_dwarf_finished = true;
31981
31982 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31983 if ((!flag_generate_lto && !flag_generate_offload)
31984 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
31985 copy_lto_debug_sections operation of the simple object support in
31986 libiberty is not implemented for them yet. */
31987 || TARGET_PECOFF || TARGET_COFF)
31988 return;
31989
31990 /* Now as we are going to output for LTO initialize sections and labels
31991 to the LTO variants. We don't need a random-seed postfix as other
31992 LTO sections as linking the LTO debug sections into one in a partial
31993 link is fine. */
31994 init_sections_and_labels (true);
31995
31996 /* The output below is modeled after dwarf2out_finish with all
31997 location related output removed and some LTO specific changes.
31998 Some refactoring might make both smaller and easier to match up. */
31999
32000 /* Traverse the DIE's and add add sibling attributes to those DIE's
32001 that have children. */
32002 add_sibling_attributes (comp_unit_die ());
32003 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32004 add_sibling_attributes (node->die);
32005 for (comdat_type_node *ctnode = comdat_type_list;
32006 ctnode != NULL; ctnode = ctnode->next)
32007 add_sibling_attributes (ctnode->root_die);
32008
32009 /* AIX Assembler inserts the length, so adjust the reference to match the
32010 offset expected by debuggers. */
32011 strcpy (dl_section_ref, debug_line_section_label);
32012 if (XCOFF_DEBUGGING_INFO)
32013 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32014
32015 if (debug_info_level >= DINFO_LEVEL_TERSE)
32016 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32017
32018 if (have_macinfo)
32019 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32020 macinfo_section_label);
32021
32022 save_macinfo_strings ();
32023
32024 if (dwarf_split_debug_info)
32025 {
32026 unsigned int index = 0;
32027 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32028 }
32029
32030 /* Output all of the compilation units. We put the main one last so that
32031 the offsets are available to output_pubnames. */
32032 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32033 output_comp_unit (node->die, 0, NULL);
32034
32035 hash_table<comdat_type_hasher> comdat_type_table (100);
32036 for (comdat_type_node *ctnode = comdat_type_list;
32037 ctnode != NULL; ctnode = ctnode->next)
32038 {
32039 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32040
32041 /* Don't output duplicate types. */
32042 if (*slot != HTAB_EMPTY_ENTRY)
32043 continue;
32044
32045 /* Add a pointer to the line table for the main compilation unit
32046 so that the debugger can make sense of DW_AT_decl_file
32047 attributes. */
32048 if (debug_info_level >= DINFO_LEVEL_TERSE)
32049 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32050 (!dwarf_split_debug_info
32051 ? debug_line_section_label
32052 : debug_skeleton_line_section_label));
32053
32054 output_comdat_type_unit (ctnode, true);
32055 *slot = ctnode;
32056 }
32057
32058 /* Stick a unique symbol to the main debuginfo section. */
32059 compute_comp_unit_symbol (comp_unit_die ());
32060
32061 /* Output the main compilation unit. We always need it if only for
32062 the CU symbol. */
32063 output_comp_unit (comp_unit_die (), true, NULL);
32064
32065 /* Output the abbreviation table. */
32066 if (vec_safe_length (abbrev_die_table) != 1)
32067 {
32068 switch_to_section (debug_abbrev_section);
32069 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32070 output_abbrev_section ();
32071 }
32072
32073 /* Have to end the macro section. */
32074 if (have_macinfo)
32075 {
32076 /* We have to save macinfo state if we need to output it again
32077 for the FAT part of the object. */
32078 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32079 if (flag_fat_lto_objects)
32080 macinfo_table = macinfo_table->copy ();
32081
32082 switch_to_section (debug_macinfo_section);
32083 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32084 output_macinfo (debug_line_section_label, true);
32085 dw2_asm_output_data (1, 0, "End compilation unit");
32086
32087 if (flag_fat_lto_objects)
32088 {
32089 vec_free (macinfo_table);
32090 macinfo_table = saved_macinfo_table;
32091 }
32092 }
32093
32094 /* Emit a skeleton debug_line section. */
32095 switch_to_section (debug_line_section);
32096 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32097 output_line_info (true);
32098
32099 /* If we emitted any indirect strings, output the string table too. */
32100 if (debug_str_hash || skeleton_debug_str_hash)
32101 output_indirect_strings ();
32102 if (debug_line_str_hash)
32103 {
32104 switch_to_section (debug_line_str_section);
32105 const enum dwarf_form form = DW_FORM_line_strp;
32106 debug_line_str_hash->traverse<enum dwarf_form,
32107 output_indirect_string> (form);
32108 }
32109
32110 /* Switch back to the text section. */
32111 switch_to_section (text_section);
32112 }
32113
32114 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32115 within the same process. For use by toplev::finalize. */
32116
32117 void
dwarf2out_c_finalize(void)32118 dwarf2out_c_finalize (void)
32119 {
32120 last_var_location_insn = NULL;
32121 cached_next_real_insn = NULL;
32122 used_rtx_array = NULL;
32123 incomplete_types = NULL;
32124 decl_scope_table = NULL;
32125 debug_info_section = NULL;
32126 debug_skeleton_info_section = NULL;
32127 debug_abbrev_section = NULL;
32128 debug_skeleton_abbrev_section = NULL;
32129 debug_aranges_section = NULL;
32130 debug_addr_section = NULL;
32131 debug_macinfo_section = NULL;
32132 debug_line_section = NULL;
32133 debug_skeleton_line_section = NULL;
32134 debug_loc_section = NULL;
32135 debug_pubnames_section = NULL;
32136 debug_pubtypes_section = NULL;
32137 debug_str_section = NULL;
32138 debug_line_str_section = NULL;
32139 debug_str_dwo_section = NULL;
32140 debug_str_offsets_section = NULL;
32141 debug_ranges_section = NULL;
32142 debug_frame_section = NULL;
32143 fde_vec = NULL;
32144 debug_str_hash = NULL;
32145 debug_line_str_hash = NULL;
32146 skeleton_debug_str_hash = NULL;
32147 dw2_string_counter = 0;
32148 have_multiple_function_sections = false;
32149 text_section_used = false;
32150 cold_text_section_used = false;
32151 cold_text_section = NULL;
32152 current_unit_personality = NULL;
32153
32154 early_dwarf = false;
32155 early_dwarf_finished = false;
32156
32157 next_die_offset = 0;
32158 single_comp_unit_die = NULL;
32159 comdat_type_list = NULL;
32160 limbo_die_list = NULL;
32161 file_table = NULL;
32162 decl_die_table = NULL;
32163 common_block_die_table = NULL;
32164 decl_loc_table = NULL;
32165 call_arg_locations = NULL;
32166 call_arg_loc_last = NULL;
32167 call_site_count = -1;
32168 tail_call_site_count = -1;
32169 cached_dw_loc_list_table = NULL;
32170 abbrev_die_table = NULL;
32171 delete dwarf_proc_stack_usage_map;
32172 dwarf_proc_stack_usage_map = NULL;
32173 line_info_label_num = 0;
32174 cur_line_info_table = NULL;
32175 text_section_line_info = NULL;
32176 cold_text_section_line_info = NULL;
32177 separate_line_info = NULL;
32178 info_section_emitted = false;
32179 pubname_table = NULL;
32180 pubtype_table = NULL;
32181 macinfo_table = NULL;
32182 ranges_table = NULL;
32183 ranges_by_label = NULL;
32184 rnglist_idx = 0;
32185 have_location_lists = false;
32186 loclabel_num = 0;
32187 poc_label_num = 0;
32188 last_emitted_file = NULL;
32189 label_num = 0;
32190 tmpl_value_parm_die_table = NULL;
32191 generic_type_instances = NULL;
32192 frame_pointer_fb_offset = 0;
32193 frame_pointer_fb_offset_valid = false;
32194 base_types.release ();
32195 XDELETEVEC (producer_string);
32196 producer_string = NULL;
32197 }
32198
32199 #include "gt-dwarf2out.h"
32200