1 /* Top-level LTO routines.
2 Copyright (C) 2009-2022 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "function.h"
26 #include "bitmap.h"
27 #include "basic-block.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "tree-streamer.h"
34 #include "cgraph.h"
35 #include "opts.h"
36 #include "toplev.h"
37 #include "stor-layout.h"
38 #include "symbol-summary.h"
39 #include "tree-vrp.h"
40 #include "ipa-prop.h"
41 #include "common.h"
42 #include "debug.h"
43 #include "lto.h"
44 #include "lto-section-names.h"
45 #include "splay-tree.h"
46 #include "lto-partition.h"
47 #include "context.h"
48 #include "pass_manager.h"
49 #include "ipa-fnsummary.h"
50 #include "ipa-utils.h"
51 #include "gomp-constants.h"
52 #include "lto-symtab.h"
53 #include "stringpool.h"
54 #include "fold-const.h"
55 #include "attribs.h"
56 #include "builtins.h"
57 #include "lto-common.h"
58 #include "tree-pretty-print.h"
59 #include "print-tree.h"
60
61 /* True when no new types are going to be streamd from the global stream. */
62
63 static bool type_streaming_finished = false;
64
65 GTY(()) tree first_personality_decl;
66
67 GTY(()) const unsigned char *lto_mode_identity_table;
68
69 /* Returns a hash code for P. */
70
71 static hashval_t
hash_name(const void * p)72 hash_name (const void *p)
73 {
74 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
75 return (hashval_t) htab_hash_string (ds->name);
76 }
77
78
79 /* Returns nonzero if P1 and P2 are equal. */
80
81 static int
eq_name(const void * p1,const void * p2)82 eq_name (const void *p1, const void *p2)
83 {
84 const struct lto_section_slot *s1
85 = (const struct lto_section_slot *) p1;
86 const struct lto_section_slot *s2
87 = (const struct lto_section_slot *) p2;
88
89 return strcmp (s1->name, s2->name) == 0;
90 }
91
92 /* Free lto_section_slot. */
93
94 static void
free_with_string(void * arg)95 free_with_string (void *arg)
96 {
97 struct lto_section_slot *s = (struct lto_section_slot *)arg;
98
99 free (CONST_CAST (char *, s->name));
100 free (arg);
101 }
102
103 /* Create section hash table. */
104
105 htab_t
lto_obj_create_section_hash_table(void)106 lto_obj_create_section_hash_table (void)
107 {
108 return htab_create (37, hash_name, eq_name, free_with_string);
109 }
110
111 /* Delete an allocated integer KEY in the splay tree. */
112
113 static void
lto_splay_tree_delete_id(splay_tree_key key)114 lto_splay_tree_delete_id (splay_tree_key key)
115 {
116 free ((void *) key);
117 }
118
119 /* Compare splay tree node ids A and B. */
120
121 static int
lto_splay_tree_compare_ids(splay_tree_key a,splay_tree_key b)122 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
123 {
124 unsigned HOST_WIDE_INT ai;
125 unsigned HOST_WIDE_INT bi;
126
127 ai = *(unsigned HOST_WIDE_INT *) a;
128 bi = *(unsigned HOST_WIDE_INT *) b;
129
130 if (ai < bi)
131 return -1;
132 else if (ai > bi)
133 return 1;
134 return 0;
135 }
136
137 /* Look up splay tree node by ID in splay tree T. */
138
139 static splay_tree_node
lto_splay_tree_lookup(splay_tree t,unsigned HOST_WIDE_INT id)140 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
141 {
142 return splay_tree_lookup (t, (splay_tree_key) &id);
143 }
144
145 /* Check if KEY has ID. */
146
147 static bool
lto_splay_tree_id_equal_p(splay_tree_key key,unsigned HOST_WIDE_INT id)148 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
149 {
150 return *(unsigned HOST_WIDE_INT *) key == id;
151 }
152
153 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
154 The ID is allocated separately because we need HOST_WIDE_INTs which may
155 be wider than a splay_tree_key. */
156
157 static void
lto_splay_tree_insert(splay_tree t,unsigned HOST_WIDE_INT id,struct lto_file_decl_data * file_data)158 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
159 struct lto_file_decl_data *file_data)
160 {
161 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
162 *idp = id;
163 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
164 }
165
166 /* Create a splay tree. */
167
168 static splay_tree
lto_splay_tree_new(void)169 lto_splay_tree_new (void)
170 {
171 return splay_tree_new (lto_splay_tree_compare_ids,
172 lto_splay_tree_delete_id,
173 NULL);
174 }
175
176 /* Decode the content of memory pointed to by DATA in the in decl
177 state object STATE. DATA_IN points to a data_in structure for
178 decoding. Return the address after the decoded object in the
179 input. */
180
181 static const uint32_t *
lto_read_in_decl_state(class data_in * data_in,const uint32_t * data,struct lto_in_decl_state * state)182 lto_read_in_decl_state (class data_in *data_in, const uint32_t *data,
183 struct lto_in_decl_state *state)
184 {
185 uint32_t ix;
186 tree decl;
187 uint32_t i, j;
188
189 ix = *data++;
190 state->compressed = ix & 1;
191 ix /= 2;
192 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
193 if (!VAR_OR_FUNCTION_DECL_P (decl))
194 {
195 gcc_assert (decl == void_type_node);
196 decl = NULL_TREE;
197 }
198 state->fn_decl = decl;
199
200 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
201 {
202 uint32_t size = *data++;
203 vec<tree, va_gc> *decls = NULL;
204 vec_alloc (decls, size);
205
206 for (j = 0; j < size; j++)
207 vec_safe_push (decls,
208 streamer_tree_cache_get_tree (data_in->reader_cache,
209 data[j]));
210
211 state->streams[i] = decls;
212 data += size;
213 }
214
215 return data;
216 }
217
218
219 /* Global canonical type table. */
220 static htab_t gimple_canonical_types;
221 static hash_map<const_tree, hashval_t> *canonical_type_hash_cache;
222 static unsigned long num_canonical_type_hash_entries;
223 static unsigned long num_canonical_type_hash_queries;
224
225 /* Types postponed for registration to the canonical type table.
226 During streaming we postpone all TYPE_CXX_ODR_P types so we can alter
227 decide whether there is conflict with non-ODR type or not. */
228 static GTY(()) vec<tree, va_gc> *types_to_register = NULL;
229
230 static void iterative_hash_canonical_type (tree type, inchash::hash &hstate);
231 static hashval_t gimple_canonical_type_hash (const void *p);
232 static hashval_t gimple_register_canonical_type_1 (tree t, hashval_t hash);
233
234 /* Returning a hash value for gimple type TYPE.
235
236 The hash value returned is equal for types considered compatible
237 by gimple_canonical_types_compatible_p. */
238
239 static hashval_t
hash_canonical_type(tree type)240 hash_canonical_type (tree type)
241 {
242 inchash::hash hstate;
243 enum tree_code code;
244
245 /* We compute alias sets only for types that needs them.
246 Be sure we do not recurse to something else as we cannot hash incomplete
247 types in a way they would have same hash value as compatible complete
248 types. */
249 gcc_checking_assert (type_with_alias_set_p (type));
250
251 /* Combine a few common features of types so that types are grouped into
252 smaller sets; when searching for existing matching types to merge,
253 only existing types having the same features as the new type will be
254 checked. */
255 code = tree_code_for_canonical_type_merging (TREE_CODE (type));
256 hstate.add_int (code);
257 hstate.add_int (TYPE_MODE (type));
258
259 /* Incorporate common features of numerical types. */
260 if (INTEGRAL_TYPE_P (type)
261 || SCALAR_FLOAT_TYPE_P (type)
262 || FIXED_POINT_TYPE_P (type)
263 || TREE_CODE (type) == OFFSET_TYPE
264 || POINTER_TYPE_P (type))
265 {
266 hstate.add_int (TYPE_PRECISION (type));
267 if (!type_with_interoperable_signedness (type))
268 hstate.add_int (TYPE_UNSIGNED (type));
269 }
270
271 if (VECTOR_TYPE_P (type))
272 {
273 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
274 hstate.add_int (TYPE_UNSIGNED (type));
275 }
276
277 if (TREE_CODE (type) == COMPLEX_TYPE)
278 hstate.add_int (TYPE_UNSIGNED (type));
279
280 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
281 interoperable with "signed char". Unless all frontends are revisited to
282 agree on these types, we must ignore the flag completely. */
283
284 /* Fortran standard define C_PTR type that is compatible with every
285 C pointer. For this reason we need to glob all pointers into one.
286 Still pointers in different address spaces are not compatible. */
287 if (POINTER_TYPE_P (type))
288 hstate.add_int (TYPE_ADDR_SPACE (TREE_TYPE (type)));
289
290 /* For array types hash the domain bounds and the string flag. */
291 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
292 {
293 hstate.add_int (TYPE_STRING_FLAG (type));
294 /* OMP lowering can introduce error_mark_node in place of
295 random local decls in types. */
296 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
297 inchash::add_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), hstate);
298 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
299 inchash::add_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), hstate);
300 }
301
302 /* Recurse for aggregates with a single element type. */
303 if (TREE_CODE (type) == ARRAY_TYPE
304 || TREE_CODE (type) == COMPLEX_TYPE
305 || TREE_CODE (type) == VECTOR_TYPE)
306 iterative_hash_canonical_type (TREE_TYPE (type), hstate);
307
308 /* Incorporate function return and argument types. */
309 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
310 {
311 unsigned na;
312 tree p;
313
314 iterative_hash_canonical_type (TREE_TYPE (type), hstate);
315
316 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
317 {
318 iterative_hash_canonical_type (TREE_VALUE (p), hstate);
319 na++;
320 }
321
322 hstate.add_int (na);
323 }
324
325 if (RECORD_OR_UNION_TYPE_P (type))
326 {
327 unsigned nf;
328 tree f;
329
330 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
331 if (TREE_CODE (f) == FIELD_DECL
332 && (! DECL_SIZE (f)
333 || ! integer_zerop (DECL_SIZE (f))))
334 {
335 iterative_hash_canonical_type (TREE_TYPE (f), hstate);
336 nf++;
337 }
338
339 hstate.add_int (nf);
340 }
341
342 return hstate.end();
343 }
344
345 /* Returning a hash value for gimple type TYPE combined with VAL. */
346
347 static void
iterative_hash_canonical_type(tree type,inchash::hash & hstate)348 iterative_hash_canonical_type (tree type, inchash::hash &hstate)
349 {
350 hashval_t v;
351
352 /* All type variants have same TYPE_CANONICAL. */
353 type = TYPE_MAIN_VARIANT (type);
354
355 if (!canonical_type_used_p (type))
356 v = hash_canonical_type (type);
357 /* An already processed type. */
358 else if (TYPE_CANONICAL (type))
359 {
360 type = TYPE_CANONICAL (type);
361 v = gimple_canonical_type_hash (type);
362 }
363 else
364 {
365 /* Canonical types should not be able to form SCCs by design, this
366 recursion is just because we do not register canonical types in
367 optimal order. To avoid quadratic behavior also register the
368 type here. */
369 v = hash_canonical_type (type);
370 v = gimple_register_canonical_type_1 (type, v);
371 }
372 hstate.merge_hash (v);
373 }
374
375 /* Returns the hash for a canonical type P. */
376
377 static hashval_t
gimple_canonical_type_hash(const void * p)378 gimple_canonical_type_hash (const void *p)
379 {
380 num_canonical_type_hash_queries++;
381 hashval_t *slot = canonical_type_hash_cache->get ((const_tree) p);
382 gcc_assert (slot != NULL);
383 return *slot;
384 }
385
386
387
388 /* Returns nonzero if P1 and P2 are equal. */
389
390 static int
gimple_canonical_type_eq(const void * p1,const void * p2)391 gimple_canonical_type_eq (const void *p1, const void *p2)
392 {
393 const_tree t1 = (const_tree) p1;
394 const_tree t2 = (const_tree) p2;
395 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
396 CONST_CAST_TREE (t2));
397 }
398
399 /* Main worker for gimple_register_canonical_type. */
400
401 static hashval_t
gimple_register_canonical_type_1(tree t,hashval_t hash)402 gimple_register_canonical_type_1 (tree t, hashval_t hash)
403 {
404 void **slot;
405
406 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t)
407 && type_with_alias_set_p (t)
408 && canonical_type_used_p (t));
409
410 /* ODR types for which there is no ODR violation and we did not record
411 structurally equivalent non-ODR type can be treated as unique by their
412 name.
413
414 hash passed to gimple_register_canonical_type_1 is a structural hash
415 that we can use to lookup structurally equivalent non-ODR type.
416 In case we decide to treat type as unique ODR type we recompute hash based
417 on name and let TBAA machinery know about our decision. */
418 if (RECORD_OR_UNION_TYPE_P (t) && odr_type_p (t)
419 && TYPE_CXX_ODR_P (t) && !odr_type_violation_reported_p (t))
420 {
421 /* Anonymous namespace types never conflict with non-C++ types. */
422 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
423 slot = NULL;
424 else
425 {
426 /* Here we rely on fact that all non-ODR types was inserted into
427 canonical type hash and thus we can safely detect conflicts between
428 ODR types and interoperable non-ODR types. */
429 gcc_checking_assert (type_streaming_finished
430 && TYPE_MAIN_VARIANT (t) == t);
431 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash,
432 NO_INSERT);
433 }
434 if (slot && !TYPE_CXX_ODR_P (*(tree *)slot))
435 {
436 tree nonodr = *(tree *)slot;
437 gcc_checking_assert (!flag_ltrans);
438 if (symtab->dump_file)
439 {
440 fprintf (symtab->dump_file,
441 "ODR and non-ODR type conflict: ");
442 print_generic_expr (symtab->dump_file, t);
443 fprintf (symtab->dump_file, " and ");
444 print_generic_expr (symtab->dump_file, nonodr);
445 fprintf (symtab->dump_file, " mangled:%s\n",
446 IDENTIFIER_POINTER
447 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))));
448 }
449 /* Set canonical for T and all other ODR equivalent duplicates
450 including incomplete structures. */
451 set_type_canonical_for_odr_type (t, nonodr);
452 }
453 else
454 {
455 tree prevail = prevailing_odr_type (t);
456
457 if (symtab->dump_file)
458 {
459 fprintf (symtab->dump_file,
460 "New canonical ODR type: ");
461 print_generic_expr (symtab->dump_file, t);
462 fprintf (symtab->dump_file, " mangled:%s\n",
463 IDENTIFIER_POINTER
464 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))));
465 }
466 /* Set canonical for T and all other ODR equivalent duplicates
467 including incomplete structures. */
468 set_type_canonical_for_odr_type (t, prevail);
469 enable_odr_based_tbaa (t);
470 if (!type_in_anonymous_namespace_p (t))
471 hash = htab_hash_string (IDENTIFIER_POINTER
472 (DECL_ASSEMBLER_NAME
473 (TYPE_NAME (t))));
474 else
475 hash = TYPE_UID (t);
476
477 /* All variants of t now have TYPE_CANONICAL set to prevail.
478 Update canonical type hash cache accordingly. */
479 num_canonical_type_hash_entries++;
480 bool existed_p = canonical_type_hash_cache->put (prevail, hash);
481 gcc_checking_assert (!existed_p);
482 }
483 return hash;
484 }
485
486 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
487 if (*slot)
488 {
489 tree new_type = (tree)(*slot);
490 gcc_checking_assert (new_type != t);
491 TYPE_CANONICAL (t) = new_type;
492 }
493 else
494 {
495 TYPE_CANONICAL (t) = t;
496 *slot = (void *) t;
497 /* Cache the just computed hash value. */
498 num_canonical_type_hash_entries++;
499 bool existed_p = canonical_type_hash_cache->put (t, hash);
500 gcc_assert (!existed_p);
501 }
502 return hash;
503 }
504
505 /* Register type T in the global type table gimple_types and set
506 TYPE_CANONICAL of T accordingly.
507 This is used by LTO to merge structurally equivalent types for
508 type-based aliasing purposes across different TUs and languages.
509
510 ??? This merging does not exactly match how the tree.cc middle-end
511 functions will assign TYPE_CANONICAL when new types are created
512 during optimization (which at least happens for pointer and array
513 types). */
514
515 static void
gimple_register_canonical_type(tree t)516 gimple_register_canonical_type (tree t)
517 {
518 if (TYPE_CANONICAL (t) || !type_with_alias_set_p (t)
519 || !canonical_type_used_p (t))
520 return;
521
522 /* Canonical types are same among all complete variants. */
523 if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (t)))
524 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t));
525 else
526 {
527 hashval_t h = hash_canonical_type (TYPE_MAIN_VARIANT (t));
528 gimple_register_canonical_type_1 (TYPE_MAIN_VARIANT (t), h);
529 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t));
530 }
531 }
532
533 /* Re-compute TYPE_CANONICAL for NODE and related types. */
534
535 static void
lto_register_canonical_types(tree node,bool first_p)536 lto_register_canonical_types (tree node, bool first_p)
537 {
538 if (!node
539 || !TYPE_P (node))
540 return;
541
542 if (first_p)
543 TYPE_CANONICAL (node) = NULL_TREE;
544
545 if (POINTER_TYPE_P (node)
546 || TREE_CODE (node) == COMPLEX_TYPE
547 || TREE_CODE (node) == ARRAY_TYPE)
548 lto_register_canonical_types (TREE_TYPE (node), first_p);
549
550 if (!first_p)
551 gimple_register_canonical_type (node);
552 }
553
554 /* Finish canonical type calculation: after all units has been streamed in we
555 can check if given ODR type structurally conflicts with a non-ODR type. In
556 the first case we set type canonical according to the canonical type hash.
557 In the second case we use type names. */
558
559 static void
lto_register_canonical_types_for_odr_types()560 lto_register_canonical_types_for_odr_types ()
561 {
562 tree t;
563 unsigned int i;
564
565 if (!types_to_register)
566 return;
567
568 type_streaming_finished = true;
569
570 /* Be sure that no types derived from ODR types was
571 not inserted into the hash table. */
572 if (flag_checking)
573 FOR_EACH_VEC_ELT (*types_to_register, i, t)
574 gcc_assert (!TYPE_CANONICAL (t));
575
576 /* Register all remaining types. */
577 FOR_EACH_VEC_ELT (*types_to_register, i, t)
578 {
579 /* For pre-streamed types like va-arg it is possible that main variant
580 is !CXX_ODR_P while the variant (which is streamed) is.
581 Copy CXX_ODR_P to make type verifier happy. This is safe because
582 in canonical type calculation we only consider main variants.
583 However we can not change this flag before streaming is finished
584 to not affect tree merging. */
585 TYPE_CXX_ODR_P (t) = TYPE_CXX_ODR_P (TYPE_MAIN_VARIANT (t));
586 if (!TYPE_CANONICAL (t))
587 gimple_register_canonical_type (t);
588 }
589 }
590
591
592 /* Remember trees that contains references to declarations. */
593 vec <tree, va_gc> *tree_with_vars;
594
595 #define CHECK_VAR(tt) \
596 do \
597 { \
598 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
599 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
600 return true; \
601 } while (0)
602
603 #define CHECK_NO_VAR(tt) \
604 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
605
606 /* Check presence of pointers to decls in fields of a tree_typed T. */
607
608 static inline bool
mentions_vars_p_typed(tree t)609 mentions_vars_p_typed (tree t)
610 {
611 CHECK_NO_VAR (TREE_TYPE (t));
612 return false;
613 }
614
615 /* Check presence of pointers to decls in fields of a tree_common T. */
616
617 static inline bool
mentions_vars_p_common(tree t)618 mentions_vars_p_common (tree t)
619 {
620 if (mentions_vars_p_typed (t))
621 return true;
622 CHECK_NO_VAR (TREE_CHAIN (t));
623 return false;
624 }
625
626 /* Check presence of pointers to decls in fields of a decl_minimal T. */
627
628 static inline bool
mentions_vars_p_decl_minimal(tree t)629 mentions_vars_p_decl_minimal (tree t)
630 {
631 if (mentions_vars_p_common (t))
632 return true;
633 CHECK_NO_VAR (DECL_NAME (t));
634 CHECK_VAR (DECL_CONTEXT (t));
635 return false;
636 }
637
638 /* Check presence of pointers to decls in fields of a decl_common T. */
639
640 static inline bool
mentions_vars_p_decl_common(tree t)641 mentions_vars_p_decl_common (tree t)
642 {
643 if (mentions_vars_p_decl_minimal (t))
644 return true;
645 CHECK_VAR (DECL_SIZE (t));
646 CHECK_VAR (DECL_SIZE_UNIT (t));
647 CHECK_VAR (DECL_INITIAL (t));
648 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
649 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
650 return false;
651 }
652
653 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
654
655 static inline bool
mentions_vars_p_decl_with_vis(tree t)656 mentions_vars_p_decl_with_vis (tree t)
657 {
658 if (mentions_vars_p_decl_common (t))
659 return true;
660
661 /* Accessor macro has side-effects, use field-name here. */
662 CHECK_NO_VAR (DECL_ASSEMBLER_NAME_RAW (t));
663 return false;
664 }
665
666 /* Check presence of pointers to decls in fields of a decl_non_common T. */
667
668 static inline bool
mentions_vars_p_decl_non_common(tree t)669 mentions_vars_p_decl_non_common (tree t)
670 {
671 if (mentions_vars_p_decl_with_vis (t))
672 return true;
673 CHECK_NO_VAR (DECL_RESULT_FLD (t));
674 return false;
675 }
676
677 /* Check presence of pointers to decls in fields of a decl_non_common T. */
678
679 static bool
mentions_vars_p_function(tree t)680 mentions_vars_p_function (tree t)
681 {
682 if (mentions_vars_p_decl_non_common (t))
683 return true;
684 CHECK_NO_VAR (DECL_ARGUMENTS (t));
685 CHECK_NO_VAR (DECL_VINDEX (t));
686 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
687 return false;
688 }
689
690 /* Check presence of pointers to decls in fields of a field_decl T. */
691
692 static bool
mentions_vars_p_field_decl(tree t)693 mentions_vars_p_field_decl (tree t)
694 {
695 if (mentions_vars_p_decl_common (t))
696 return true;
697 CHECK_VAR (DECL_FIELD_OFFSET (t));
698 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
699 CHECK_NO_VAR (DECL_QUALIFIER (t));
700 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
701 CHECK_NO_VAR (DECL_FCONTEXT (t));
702 return false;
703 }
704
705 /* Check presence of pointers to decls in fields of a type T. */
706
707 static bool
mentions_vars_p_type(tree t)708 mentions_vars_p_type (tree t)
709 {
710 if (mentions_vars_p_common (t))
711 return true;
712 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
713 CHECK_VAR (TYPE_SIZE (t));
714 CHECK_VAR (TYPE_SIZE_UNIT (t));
715 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
716 CHECK_NO_VAR (TYPE_NAME (t));
717
718 CHECK_VAR (TYPE_MIN_VALUE_RAW (t));
719 CHECK_VAR (TYPE_MAX_VALUE_RAW (t));
720
721 /* Accessor is for derived node types only. */
722 CHECK_NO_VAR (TYPE_LANG_SLOT_1 (t));
723
724 CHECK_VAR (TYPE_CONTEXT (t));
725 CHECK_NO_VAR (TYPE_CANONICAL (t));
726 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
727 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
728 return false;
729 }
730
731 /* Check presence of pointers to decls in fields of a BINFO T. */
732
733 static bool
mentions_vars_p_binfo(tree t)734 mentions_vars_p_binfo (tree t)
735 {
736 unsigned HOST_WIDE_INT i, n;
737
738 if (mentions_vars_p_common (t))
739 return true;
740 CHECK_VAR (BINFO_VTABLE (t));
741 CHECK_NO_VAR (BINFO_OFFSET (t));
742 CHECK_NO_VAR (BINFO_VIRTUALS (t));
743 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
744 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
745 for (i = 0; i < n; i++)
746 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
747 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
748 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
749 n = BINFO_N_BASE_BINFOS (t);
750 for (i = 0; i < n; i++)
751 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
752 return false;
753 }
754
755 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
756
757 static bool
mentions_vars_p_constructor(tree t)758 mentions_vars_p_constructor (tree t)
759 {
760 unsigned HOST_WIDE_INT idx;
761 constructor_elt *ce;
762
763 if (mentions_vars_p_typed (t))
764 return true;
765
766 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
767 {
768 CHECK_NO_VAR (ce->index);
769 CHECK_VAR (ce->value);
770 }
771 return false;
772 }
773
774 /* Check presence of pointers to decls in fields of an expression tree T. */
775
776 static bool
mentions_vars_p_expr(tree t)777 mentions_vars_p_expr (tree t)
778 {
779 int i;
780 if (mentions_vars_p_typed (t))
781 return true;
782 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
783 CHECK_VAR (TREE_OPERAND (t, i));
784 return false;
785 }
786
787 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
788
789 static bool
mentions_vars_p_omp_clause(tree t)790 mentions_vars_p_omp_clause (tree t)
791 {
792 int i;
793 if (mentions_vars_p_common (t))
794 return true;
795 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
796 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
797 return false;
798 }
799
800 /* Check presence of pointers to decls that needs later fixup in T. */
801
802 static bool
mentions_vars_p(tree t)803 mentions_vars_p (tree t)
804 {
805 switch (TREE_CODE (t))
806 {
807 case IDENTIFIER_NODE:
808 break;
809
810 case TREE_LIST:
811 CHECK_VAR (TREE_VALUE (t));
812 CHECK_VAR (TREE_PURPOSE (t));
813 CHECK_NO_VAR (TREE_CHAIN (t));
814 break;
815
816 case FIELD_DECL:
817 return mentions_vars_p_field_decl (t);
818
819 case LABEL_DECL:
820 case CONST_DECL:
821 case PARM_DECL:
822 case RESULT_DECL:
823 case IMPORTED_DECL:
824 case NAMESPACE_DECL:
825 case NAMELIST_DECL:
826 return mentions_vars_p_decl_common (t);
827
828 case VAR_DECL:
829 return mentions_vars_p_decl_with_vis (t);
830
831 case TYPE_DECL:
832 return mentions_vars_p_decl_non_common (t);
833
834 case FUNCTION_DECL:
835 return mentions_vars_p_function (t);
836
837 case TREE_BINFO:
838 return mentions_vars_p_binfo (t);
839
840 case PLACEHOLDER_EXPR:
841 return mentions_vars_p_common (t);
842
843 case BLOCK:
844 case TRANSLATION_UNIT_DECL:
845 case OPTIMIZATION_NODE:
846 case TARGET_OPTION_NODE:
847 break;
848
849 case CONSTRUCTOR:
850 return mentions_vars_p_constructor (t);
851
852 case OMP_CLAUSE:
853 return mentions_vars_p_omp_clause (t);
854
855 default:
856 if (TYPE_P (t))
857 {
858 if (mentions_vars_p_type (t))
859 return true;
860 }
861 else if (EXPR_P (t))
862 {
863 if (mentions_vars_p_expr (t))
864 return true;
865 }
866 else if (CONSTANT_CLASS_P (t))
867 CHECK_NO_VAR (TREE_TYPE (t));
868 else
869 gcc_unreachable ();
870 }
871 return false;
872 }
873
874
875 /* Return the resolution for the decl with index INDEX from DATA_IN. */
876
877 static enum ld_plugin_symbol_resolution
get_resolution(class data_in * data_in,unsigned index)878 get_resolution (class data_in *data_in, unsigned index)
879 {
880 if (data_in->globals_resolution.exists ())
881 {
882 ld_plugin_symbol_resolution_t ret;
883 /* We can have references to not emitted functions in
884 DECL_FUNCTION_PERSONALITY at least. So we can and have
885 to indeed return LDPR_UNKNOWN in some cases. */
886 if (data_in->globals_resolution.length () <= index)
887 return LDPR_UNKNOWN;
888 ret = data_in->globals_resolution[index];
889 return ret;
890 }
891 else
892 /* Delay resolution finding until decl merging. */
893 return LDPR_UNKNOWN;
894 }
895
896 /* We need to record resolutions until symbol table is read. */
897 static void
register_resolution(struct lto_file_decl_data * file_data,tree decl,enum ld_plugin_symbol_resolution resolution)898 register_resolution (struct lto_file_decl_data *file_data, tree decl,
899 enum ld_plugin_symbol_resolution resolution)
900 {
901 bool existed;
902 if (resolution == LDPR_UNKNOWN)
903 return;
904 if (!file_data->resolution_map)
905 file_data->resolution_map
906 = new hash_map<tree, ld_plugin_symbol_resolution>;
907 ld_plugin_symbol_resolution_t &res
908 = file_data->resolution_map->get_or_insert (decl, &existed);
909 if (!existed
910 || resolution == LDPR_PREVAILING_DEF_IRONLY
911 || resolution == LDPR_PREVAILING_DEF
912 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
913 res = resolution;
914 }
915
916 /* Register DECL with the global symbol table and change its
917 name if necessary to avoid name clashes for static globals across
918 different files. */
919
920 static void
lto_register_var_decl_in_symtab(class data_in * data_in,tree decl,unsigned ix)921 lto_register_var_decl_in_symtab (class data_in *data_in, tree decl,
922 unsigned ix)
923 {
924 tree context;
925
926 /* Variable has file scope, not local. */
927 if (!TREE_PUBLIC (decl)
928 && !((context = decl_function_context (decl))
929 && auto_var_in_fn_p (decl, context)))
930 rest_of_decl_compilation (decl, 1, 0);
931
932 /* If this variable has already been declared, queue the
933 declaration for merging. */
934 if (TREE_PUBLIC (decl))
935 register_resolution (data_in->file_data,
936 decl, get_resolution (data_in, ix));
937 }
938
939
940 /* Register DECL with the global symbol table and change its
941 name if necessary to avoid name clashes for static globals across
942 different files. DATA_IN contains descriptors and tables for the
943 file being read. */
944
945 static void
lto_register_function_decl_in_symtab(class data_in * data_in,tree decl,unsigned ix)946 lto_register_function_decl_in_symtab (class data_in *data_in, tree decl,
947 unsigned ix)
948 {
949 /* If this variable has already been declared, queue the
950 declaration for merging. */
951 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT_P (decl))
952 register_resolution (data_in->file_data,
953 decl, get_resolution (data_in, ix));
954 }
955
956 /* Check if T is a decl and needs register its resolution info. */
957
958 static void
lto_maybe_register_decl(class data_in * data_in,tree t,unsigned ix)959 lto_maybe_register_decl (class data_in *data_in, tree t, unsigned ix)
960 {
961 if (TREE_CODE (t) == VAR_DECL)
962 lto_register_var_decl_in_symtab (data_in, t, ix);
963 else if (TREE_CODE (t) == FUNCTION_DECL
964 && !fndecl_built_in_p (t))
965 lto_register_function_decl_in_symtab (data_in, t, ix);
966 }
967
968
969 /* For the type T re-materialize it in the type variant list and
970 the pointer/reference-to chains. */
971
972 static void
lto_fixup_prevailing_type(tree t)973 lto_fixup_prevailing_type (tree t)
974 {
975 /* The following re-creates proper variant lists while fixing up
976 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
977 variant list state before fixup is broken. */
978
979 /* If we are not our own variant leader link us into our new leaders
980 variant list. */
981 if (TYPE_MAIN_VARIANT (t) != t)
982 {
983 tree mv = TYPE_MAIN_VARIANT (t);
984 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
985 TYPE_NEXT_VARIANT (mv) = t;
986 }
987 else if (!TYPE_ATTRIBUTES (t))
988 {
989 /* The following reconstructs the pointer chains
990 of the new pointed-to type if we are a main variant. We do
991 not stream those so they are broken before fixup.
992 Don't add it if despite being main variant it has
993 attributes (then it was created with build_distinct_type_copy).
994 Similarly don't add TYPE_REF_IS_RVALUE REFERENCE_TYPEs.
995 Don't add it if there is something in the chain already. */
996 if (TREE_CODE (t) == POINTER_TYPE)
997 {
998 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
999 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1000 }
1001 else if (TREE_CODE (t) == REFERENCE_TYPE && !TYPE_REF_IS_RVALUE (t))
1002 {
1003 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1004 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1005 }
1006 }
1007 }
1008
1009
1010 /* We keep prevailing tree SCCs in a hashtable with manual collision
1011 handling (in case all hashes compare the same) and keep the colliding
1012 entries in the tree_scc->next chain. */
1013
1014 struct tree_scc
1015 {
1016 tree_scc *next;
1017 /* Hash of the whole SCC. */
1018 hashval_t hash;
1019 /* Number of trees in the SCC. */
1020 unsigned len;
1021 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1022 which share the same individual tree hash). */
1023 unsigned entry_len;
1024 /* The members of the SCC.
1025 We only need to remember the first entry node candidate for prevailing
1026 SCCs (but of course have access to all entries for SCCs we are
1027 processing).
1028 ??? For prevailing SCCs we really only need hash and the first
1029 entry candidate, but that's too awkward to implement. */
1030 tree entries[1];
1031 };
1032
1033 struct tree_scc_hasher : nofree_ptr_hash <tree_scc>
1034 {
1035 static inline hashval_t hash (const tree_scc *);
1036 static inline bool equal (const tree_scc *, const tree_scc *);
1037 };
1038
1039 hashval_t
hash(const tree_scc * scc)1040 tree_scc_hasher::hash (const tree_scc *scc)
1041 {
1042 return scc->hash;
1043 }
1044
1045 bool
equal(const tree_scc * scc1,const tree_scc * scc2)1046 tree_scc_hasher::equal (const tree_scc *scc1, const tree_scc *scc2)
1047 {
1048 if (scc1->hash != scc2->hash
1049 || scc1->len != scc2->len
1050 || scc1->entry_len != scc2->entry_len)
1051 return false;
1052 return true;
1053 }
1054
1055 static hash_table<tree_scc_hasher> *tree_scc_hash;
1056 static struct obstack tree_scc_hash_obstack;
1057
1058 static unsigned long num_merged_types;
1059 static unsigned long num_prevailing_types;
1060 static unsigned long num_type_scc_trees;
1061 static unsigned long total_scc_size;
1062 static unsigned long num_sccs_read;
1063 static unsigned long num_unshared_trees_read;
1064 static unsigned long total_scc_size_merged;
1065 static unsigned long num_sccs_merged;
1066 static unsigned long num_scc_compares;
1067 static unsigned long num_scc_compare_collisions;
1068
1069
1070 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1071 recursing through in-SCC tree edges. Returns true if the SCCs entered
1072 through T1 and T2 are equal and fills in *MAP with the pairs of
1073 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1074
1075 static bool
compare_tree_sccs_1(tree t1,tree t2,tree ** map)1076 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1077 {
1078 enum tree_code code;
1079
1080 /* Mark already visited nodes. */
1081 TREE_ASM_WRITTEN (t2) = 1;
1082
1083 /* Push the pair onto map. */
1084 (*map)[0] = t1;
1085 (*map)[1] = t2;
1086 *map = *map + 2;
1087
1088 /* Compare value-fields. */
1089 #define compare_values(X) \
1090 do { \
1091 if (X(t1) != X(t2)) \
1092 return false; \
1093 } while (0)
1094
1095 compare_values (TREE_CODE);
1096 code = TREE_CODE (t1);
1097
1098 /* If we end up comparing translation unit decls we either forgot to mark
1099 some SCC as local or we compare too much. */
1100 gcc_checking_assert (code != TRANSLATION_UNIT_DECL);
1101
1102 if (!TYPE_P (t1))
1103 {
1104 compare_values (TREE_SIDE_EFFECTS);
1105 compare_values (TREE_CONSTANT);
1106 compare_values (TREE_READONLY);
1107 compare_values (TREE_PUBLIC);
1108 }
1109 compare_values (TREE_ADDRESSABLE);
1110 compare_values (TREE_THIS_VOLATILE);
1111 if (DECL_P (t1))
1112 compare_values (DECL_UNSIGNED);
1113 else if (TYPE_P (t1))
1114 compare_values (TYPE_UNSIGNED);
1115 if (TYPE_P (t1))
1116 compare_values (TYPE_ARTIFICIAL);
1117 else
1118 compare_values (TREE_NO_WARNING);
1119 compare_values (TREE_NOTHROW);
1120 compare_values (TREE_STATIC);
1121 if (code != TREE_BINFO)
1122 compare_values (TREE_PRIVATE);
1123 compare_values (TREE_PROTECTED);
1124 compare_values (TREE_DEPRECATED);
1125 if (TYPE_P (t1))
1126 {
1127 if (AGGREGATE_TYPE_P (t1))
1128 compare_values (TYPE_REVERSE_STORAGE_ORDER);
1129 else
1130 compare_values (TYPE_SATURATING);
1131 compare_values (TYPE_ADDR_SPACE);
1132 }
1133 else if (code == SSA_NAME)
1134 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1135
1136 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1137 {
1138 if (wi::to_wide (t1) != wi::to_wide (t2))
1139 return false;
1140 }
1141
1142 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1143 {
1144 /* ??? No suitable compare routine available. */
1145 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1146 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1147 if (r1.cl != r2.cl
1148 || r1.decimal != r2.decimal
1149 || r1.sign != r2.sign
1150 || r1.signalling != r2.signalling
1151 || r1.canonical != r2.canonical
1152 || r1.uexp != r2.uexp)
1153 return false;
1154 for (unsigned i = 0; i < SIGSZ; ++i)
1155 if (r1.sig[i] != r2.sig[i])
1156 return false;
1157 }
1158
1159 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1160 if (!fixed_compare (EQ_EXPR,
1161 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1162 return false;
1163
1164 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1165 {
1166 compare_values (VECTOR_CST_LOG2_NPATTERNS);
1167 compare_values (VECTOR_CST_NELTS_PER_PATTERN);
1168 }
1169
1170 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1171 {
1172 compare_values (DECL_MODE);
1173 compare_values (DECL_NONLOCAL);
1174 compare_values (DECL_VIRTUAL_P);
1175 compare_values (DECL_IGNORED_P);
1176 compare_values (DECL_ABSTRACT_P);
1177 compare_values (DECL_ARTIFICIAL);
1178 compare_values (DECL_USER_ALIGN);
1179 compare_values (DECL_PRESERVE_P);
1180 compare_values (DECL_EXTERNAL);
1181 compare_values (DECL_NOT_GIMPLE_REG_P);
1182 compare_values (DECL_ALIGN);
1183 if (code == LABEL_DECL)
1184 {
1185 compare_values (EH_LANDING_PAD_NR);
1186 compare_values (LABEL_DECL_UID);
1187 }
1188 else if (code == FIELD_DECL)
1189 {
1190 compare_values (DECL_PACKED);
1191 compare_values (DECL_NONADDRESSABLE_P);
1192 compare_values (DECL_PADDING_P);
1193 compare_values (DECL_FIELD_ABI_IGNORED);
1194 compare_values (DECL_FIELD_CXX_ZERO_WIDTH_BIT_FIELD);
1195 compare_values (DECL_OFFSET_ALIGN);
1196 }
1197 else if (code == VAR_DECL)
1198 {
1199 compare_values (DECL_HAS_DEBUG_EXPR_P);
1200 compare_values (DECL_NONLOCAL_FRAME);
1201 }
1202 if (code == RESULT_DECL
1203 || code == PARM_DECL
1204 || code == VAR_DECL)
1205 {
1206 compare_values (DECL_BY_REFERENCE);
1207 if (code == VAR_DECL
1208 || code == PARM_DECL)
1209 compare_values (DECL_HAS_VALUE_EXPR_P);
1210 }
1211 }
1212
1213 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1214 compare_values (DECL_REGISTER);
1215
1216 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1217 {
1218 compare_values (DECL_COMMON);
1219 compare_values (DECL_DLLIMPORT_P);
1220 compare_values (DECL_WEAK);
1221 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1222 compare_values (DECL_COMDAT);
1223 compare_values (DECL_VISIBILITY);
1224 compare_values (DECL_VISIBILITY_SPECIFIED);
1225 if (code == VAR_DECL)
1226 {
1227 compare_values (DECL_HARD_REGISTER);
1228 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1229 compare_values (DECL_IN_CONSTANT_POOL);
1230 }
1231 }
1232
1233 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1234 {
1235 compare_values (DECL_BUILT_IN_CLASS);
1236 compare_values (DECL_STATIC_CONSTRUCTOR);
1237 compare_values (DECL_STATIC_DESTRUCTOR);
1238 compare_values (DECL_UNINLINABLE);
1239 compare_values (DECL_POSSIBLY_INLINED);
1240 compare_values (DECL_IS_NOVOPS);
1241 compare_values (DECL_IS_RETURNS_TWICE);
1242 compare_values (DECL_IS_MALLOC);
1243 compare_values (FUNCTION_DECL_DECL_TYPE);
1244 compare_values (DECL_DECLARED_INLINE_P);
1245 compare_values (DECL_STATIC_CHAIN);
1246 compare_values (DECL_NO_INLINE_WARNING_P);
1247 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1248 compare_values (DECL_NO_LIMIT_STACK);
1249 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1250 compare_values (DECL_PURE_P);
1251 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1252 compare_values (DECL_IS_REPLACEABLE_OPERATOR);
1253 compare_values (DECL_FINAL_P);
1254 compare_values (DECL_CXX_CONSTRUCTOR_P);
1255 compare_values (DECL_CXX_DESTRUCTOR_P);
1256 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1257 compare_values (DECL_UNCHECKED_FUNCTION_CODE);
1258 }
1259
1260 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1261 {
1262 compare_values (TYPE_MODE);
1263 compare_values (TYPE_NEEDS_CONSTRUCTING);
1264 if (RECORD_OR_UNION_TYPE_P (t1))
1265 {
1266 compare_values (TYPE_TRANSPARENT_AGGR);
1267 compare_values (TYPE_FINAL_P);
1268 compare_values (TYPE_CXX_ODR_P);
1269 }
1270 else if (code == ARRAY_TYPE)
1271 compare_values (TYPE_NONALIASED_COMPONENT);
1272 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1273 compare_values (TYPE_STRING_FLAG);
1274 if (AGGREGATE_TYPE_P (t1))
1275 compare_values (TYPE_TYPELESS_STORAGE);
1276 compare_values (TYPE_EMPTY_P);
1277 compare_values (TYPE_PACKED);
1278 compare_values (TYPE_RESTRICT);
1279 compare_values (TYPE_USER_ALIGN);
1280 compare_values (TYPE_READONLY);
1281 compare_values (TYPE_PRECISION);
1282 compare_values (TYPE_ALIGN);
1283 /* Do not compare TYPE_ALIAS_SET. Doing so introduce ordering issues
1284 with calls to get_alias_set which may initialize it for streamed
1285 in types. */
1286 }
1287
1288 /* We don't want to compare locations, so there is nothing do compare
1289 for TS_EXP. */
1290
1291 /* BLOCKs are function local and we don't merge anything there, so
1292 simply refuse to merge. */
1293 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1294 return false;
1295
1296 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1297 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1298 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1299 return false;
1300
1301 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1302 if (!cl_target_option_eq (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2)))
1303 return false;
1304
1305 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1306 if (!cl_optimization_option_eq (TREE_OPTIMIZATION (t1),
1307 TREE_OPTIMIZATION (t2)))
1308 return false;
1309
1310 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1311 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1312 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1313 return false;
1314
1315 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1316 {
1317 compare_values (CLOBBER_KIND);
1318 compare_values (CONSTRUCTOR_NELTS);
1319 }
1320
1321 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1322 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1323 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1324 IDENTIFIER_LENGTH (t1)) != 0)
1325 return false;
1326
1327 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1328 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1329 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1330 TREE_STRING_LENGTH (t1)) != 0)
1331 return false;
1332
1333 if (code == OMP_CLAUSE)
1334 {
1335 compare_values (OMP_CLAUSE_CODE);
1336 switch (OMP_CLAUSE_CODE (t1))
1337 {
1338 case OMP_CLAUSE_DEFAULT:
1339 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1340 break;
1341 case OMP_CLAUSE_SCHEDULE:
1342 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1343 break;
1344 case OMP_CLAUSE_DEPEND:
1345 compare_values (OMP_CLAUSE_DEPEND_KIND);
1346 break;
1347 case OMP_CLAUSE_MAP:
1348 compare_values (OMP_CLAUSE_MAP_KIND);
1349 break;
1350 case OMP_CLAUSE_PROC_BIND:
1351 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1352 break;
1353 case OMP_CLAUSE_REDUCTION:
1354 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1355 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1356 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1357 break;
1358 default:
1359 break;
1360 }
1361 }
1362
1363 #undef compare_values
1364
1365
1366 /* Compare pointer fields. */
1367
1368 /* Recurse. Search & Replaced from DFS_write_tree_body.
1369 Folding the early checks into the compare_tree_edges recursion
1370 macro makes debugging way quicker as you are able to break on
1371 compare_tree_sccs_1 and simply finish until a call returns false
1372 to spot the SCC members with the difference. */
1373 #define compare_tree_edges(E1, E2) \
1374 do { \
1375 tree t1_ = (E1), t2_ = (E2); \
1376 if (t1_ != t2_ \
1377 && (!t1_ || !t2_ \
1378 || !TREE_VISITED (t2_) \
1379 || (!TREE_ASM_WRITTEN (t2_) \
1380 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1381 return false; \
1382 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1383 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1384 } while (0)
1385
1386 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1387 {
1388 if (code != IDENTIFIER_NODE)
1389 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1390 }
1391
1392 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1393 {
1394 /* Note that the number of elements for EXPR has already been emitted
1395 in EXPR's header (see streamer_write_tree_header). */
1396 unsigned int count = vector_cst_encoded_nelts (t1);
1397 for (unsigned int i = 0; i < count; ++i)
1398 compare_tree_edges (VECTOR_CST_ENCODED_ELT (t1, i),
1399 VECTOR_CST_ENCODED_ELT (t2, i));
1400 }
1401
1402 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1403 {
1404 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1405 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1406 }
1407
1408 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1409 {
1410 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1411 /* ??? Global decls from different TUs have non-matching
1412 TRANSLATION_UNIT_DECLs. Only consider a small set of
1413 decls equivalent, we should not end up merging others. */
1414 if ((code == TYPE_DECL
1415 || code == NAMESPACE_DECL
1416 || code == IMPORTED_DECL
1417 || code == CONST_DECL
1418 || (VAR_OR_FUNCTION_DECL_P (t1)
1419 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1420 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1421 ;
1422 else
1423 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1424 }
1425
1426 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1427 {
1428 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1429 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1430 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1431 compare_tree_edges (DECL_ABSTRACT_ORIGIN (t1), DECL_ABSTRACT_ORIGIN (t2));
1432 if ((code == VAR_DECL
1433 || code == PARM_DECL)
1434 && DECL_HAS_VALUE_EXPR_P (t1))
1435 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1436 if (code == VAR_DECL
1437 && DECL_HAS_DEBUG_EXPR_P (t1))
1438 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1439 /* LTO specific edges. */
1440 if (code != FUNCTION_DECL
1441 && code != TRANSLATION_UNIT_DECL)
1442 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1443 }
1444
1445 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1446 {
1447 if (code == FUNCTION_DECL)
1448 {
1449 tree a1, a2;
1450 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1451 a1 || a2;
1452 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1453 compare_tree_edges (a1, a2);
1454 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1455 }
1456 else if (code == TYPE_DECL)
1457 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1458 }
1459
1460 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1461 {
1462 /* Make sure we don't inadvertently set the assembler name. */
1463 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1464 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1465 DECL_ASSEMBLER_NAME (t2));
1466 }
1467
1468 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1469 {
1470 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1471 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1472 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1473 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1474 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1475 DECL_FIELD_BIT_OFFSET (t2));
1476 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1477 }
1478
1479 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1480 {
1481 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1482 DECL_FUNCTION_PERSONALITY (t2));
1483 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1484 compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1),
1485 DECL_FUNCTION_SPECIFIC_TARGET (t2));
1486 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1487 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1488 }
1489
1490 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1491 {
1492 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1493 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1494 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1495 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1496 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1497 reconstructed during fixup. */
1498 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1499 during fixup. */
1500 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1501 /* ??? Global types from different TUs have non-matching
1502 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1503 equal. */
1504 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1505 ;
1506 else
1507 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1508 /* TYPE_CANONICAL is re-computed during type merging, so do not
1509 compare it here. */
1510 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1511 }
1512
1513 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1514 {
1515 if (code == ARRAY_TYPE)
1516 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1517 else if (RECORD_OR_UNION_TYPE_P (t1))
1518 {
1519 tree f1, f2;
1520 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1521 f1 || f2;
1522 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1523 compare_tree_edges (f1, f2);
1524 }
1525 else if (code == FUNCTION_TYPE
1526 || code == METHOD_TYPE)
1527 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1528
1529 if (!POINTER_TYPE_P (t1))
1530 compare_tree_edges (TYPE_MIN_VALUE_RAW (t1), TYPE_MIN_VALUE_RAW (t2));
1531 compare_tree_edges (TYPE_MAX_VALUE_RAW (t1), TYPE_MAX_VALUE_RAW (t2));
1532 }
1533
1534 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1535 {
1536 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1537 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1538 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1539 }
1540
1541 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1542 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1543 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1544
1545 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1546 {
1547 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1548 compare_tree_edges (TREE_OPERAND (t1, i),
1549 TREE_OPERAND (t2, i));
1550
1551 /* BLOCKs are function local and we don't merge anything there. */
1552 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1553 return false;
1554 }
1555
1556 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1557 {
1558 unsigned i;
1559 tree t;
1560 /* Lengths have already been compared above. */
1561 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1562 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1563 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1564 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1565 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1566 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1567 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1568 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1569 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1570 }
1571
1572 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1573 {
1574 unsigned i;
1575 tree index, value;
1576 /* Lengths have already been compared above. */
1577 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1578 {
1579 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1580 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1581 }
1582 }
1583
1584 if (code == OMP_CLAUSE)
1585 {
1586 int i;
1587
1588 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1589 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1590 OMP_CLAUSE_OPERAND (t2, i));
1591 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1592 }
1593
1594 #undef compare_tree_edges
1595
1596 return true;
1597 }
1598
1599 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1600 out MAP if they are equal. */
1601
1602 static bool
compare_tree_sccs(tree_scc * pscc,tree_scc * scc,tree * map)1603 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1604 tree *map)
1605 {
1606 /* Assume SCC entry hashes are sorted after their cardinality. Which
1607 means we can simply take the first n-tuple of equal hashes
1608 (which is recorded as entry_len) and do n SCC entry candidate
1609 comparisons. */
1610 for (unsigned i = 0; i < pscc->entry_len; ++i)
1611 {
1612 tree *mapp = map;
1613 num_scc_compare_collisions++;
1614 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1615 {
1616 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1617 on the scc as all trees will be freed. */
1618 return true;
1619 }
1620 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1621 the SCC prevails. */
1622 for (unsigned j = 0; j < scc->len; ++j)
1623 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1624 }
1625
1626 return false;
1627 }
1628
1629 /* QSort sort function to sort a map of two pointers after the 2nd
1630 pointer. */
1631
1632 static int
cmp_tree(const void * p1_,const void * p2_)1633 cmp_tree (const void *p1_, const void *p2_)
1634 {
1635 tree *p1 = (tree *)(const_cast<void *>(p1_));
1636 tree *p2 = (tree *)(const_cast<void *>(p2_));
1637 if (p1[1] == p2[1])
1638 return 0;
1639 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1640 }
1641
1642 /* New scc of size 1 containing T was streamed in from DATA_IN and not merged.
1643 Register it to reader cache at index FROM. */
1644
1645 static void
process_dref(class data_in * data_in,tree t,unsigned from)1646 process_dref (class data_in *data_in, tree t, unsigned from)
1647 {
1648 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1649 /* If we got a debug reference queued, see if the prevailing
1650 tree has a debug reference and if not, register the one
1651 for the tree we are about to throw away. */
1652 if (dref_queue.length () == 1)
1653 {
1654 dref_entry e = dref_queue.pop ();
1655 gcc_assert (e.decl
1656 == streamer_tree_cache_get_tree (cache, from));
1657 const char *sym;
1658 unsigned HOST_WIDE_INT off;
1659 if (!debug_hooks->die_ref_for_decl (t, &sym, &off))
1660 debug_hooks->register_external_die (t, e.sym, e.off);
1661 }
1662 }
1663
1664 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1665 hash value SCC_HASH with an already recorded SCC. Return true if
1666 that was successful, otherwise return false. */
1667
1668 static bool
unify_scc(class data_in * data_in,unsigned from,unsigned len,unsigned scc_entry_len,hashval_t scc_hash)1669 unify_scc (class data_in *data_in, unsigned from,
1670 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1671 {
1672 bool unified_p = false;
1673 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1674 tree_scc *scc
1675 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1676 scc->next = NULL;
1677 scc->hash = scc_hash;
1678 scc->len = len;
1679 scc->entry_len = scc_entry_len;
1680 for (unsigned i = 0; i < len; ++i)
1681 {
1682 tree t = streamer_tree_cache_get_tree (cache, from + i);
1683 scc->entries[i] = t;
1684 /* These types should be streamed as unshared. */
1685 gcc_checking_assert
1686 (!(TREE_CODE (t) == TRANSLATION_UNIT_DECL
1687 || (VAR_OR_FUNCTION_DECL_P (t)
1688 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1689 || TREE_CODE (t) == LABEL_DECL
1690 || (TREE_CODE (t) == NAMESPACE_DECL && !DECL_NAME (t))
1691 || (TYPE_P (t)
1692 && type_with_linkage_p (TYPE_MAIN_VARIANT (t))
1693 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t)))));
1694 }
1695
1696 /* Look for the list of candidate SCCs to compare against. */
1697 tree_scc **slot;
1698 slot = tree_scc_hash->find_slot_with_hash (scc, scc_hash, INSERT);
1699 if (*slot)
1700 {
1701 /* Try unifying against each candidate. */
1702 num_scc_compares++;
1703
1704 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1705 outside of the scc when following tree edges. Make sure
1706 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1707 to track whether we visited the SCC member during the compare.
1708 We cannot use TREE_VISITED on the pscc members as the extended
1709 scc and pscc can overlap. */
1710 for (unsigned i = 0; i < scc->len; ++i)
1711 {
1712 TREE_VISITED (scc->entries[i]) = 1;
1713 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1714 }
1715
1716 tree *map = XALLOCAVEC (tree, 2 * len);
1717 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1718 {
1719 if (!compare_tree_sccs (pscc, scc, map))
1720 continue;
1721
1722 /* Found an equal SCC. */
1723 unified_p = true;
1724 num_scc_compare_collisions--;
1725 num_sccs_merged++;
1726 total_scc_size_merged += len;
1727
1728 if (flag_checking)
1729 for (unsigned i = 0; i < len; ++i)
1730 {
1731 tree t = map[2*i+1];
1732 enum tree_code code = TREE_CODE (t);
1733 /* IDENTIFIER_NODEs should be singletons and are merged by the
1734 streamer. The others should be singletons, too, and we
1735 should not merge them in any way. */
1736 gcc_assert (code != TRANSLATION_UNIT_DECL
1737 && code != IDENTIFIER_NODE);
1738 }
1739
1740 /* Fixup the streamer cache with the prevailing nodes according
1741 to the tree node mapping computed by compare_tree_sccs. */
1742 if (len == 1)
1743 {
1744 process_dref (data_in, pscc->entries[0], from);
1745 lto_maybe_register_decl (data_in, pscc->entries[0], from);
1746 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1747 }
1748 else
1749 {
1750 tree *map2 = XALLOCAVEC (tree, 2 * len);
1751 for (unsigned i = 0; i < len; ++i)
1752 {
1753 map2[i*2] = (tree)(uintptr_t)(from + i);
1754 map2[i*2+1] = scc->entries[i];
1755 }
1756 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1757 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1758 for (unsigned i = 0; i < len; ++i)
1759 {
1760 lto_maybe_register_decl (data_in, map[2*i],
1761 (uintptr_t)map2[2*i]);
1762 streamer_tree_cache_replace_tree (cache, map[2*i],
1763 (uintptr_t)map2[2*i]);
1764 }
1765 }
1766
1767 /* Free the tree nodes from the read SCC. */
1768 data_in->location_cache.revert_location_cache ();
1769 for (unsigned i = 0; i < len; ++i)
1770 {
1771 if (TYPE_P (scc->entries[i]))
1772 num_merged_types++;
1773 free_node (scc->entries[i]);
1774 }
1775
1776 /* Drop DIE references.
1777 ??? Do as in the size-one SCC case which involves sorting
1778 the queue. */
1779 dref_queue.truncate (0);
1780
1781 break;
1782 }
1783
1784 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1785 if (!unified_p)
1786 for (unsigned i = 0; i < scc->len; ++i)
1787 TREE_VISITED (scc->entries[i]) = 0;
1788 }
1789
1790 /* If we didn't unify it to any candidate duplicate the relevant
1791 pieces to permanent storage and link it into the chain. */
1792 if (!unified_p)
1793 {
1794 tree_scc *pscc
1795 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1796 memcpy (pscc, scc, sizeof (tree_scc));
1797 pscc->next = (*slot);
1798 *slot = pscc;
1799 }
1800 return unified_p;
1801 }
1802
1803 typedef int_hash<unsigned, 0, UINT_MAX> code_id_hash;
1804
1805 /* Do registering necessary once new tree fully streamed in (including all
1806 trees it reffers to). */
1807
1808 static void
process_new_tree(tree t,hash_map<code_id_hash,unsigned> * hm,unsigned index,unsigned * total,class data_in * data_in)1809 process_new_tree (tree t, hash_map <code_id_hash, unsigned> *hm,
1810 unsigned index, unsigned *total, class data_in *data_in)
1811 {
1812 /* Reconstruct the type variant and pointer-to/reference-to
1813 chains. */
1814 if (TYPE_P (t))
1815 {
1816 /* Map the tree types to their frequencies. */
1817 if (flag_lto_dump_type_stats)
1818 {
1819 unsigned key = (unsigned) TREE_CODE (t);
1820 unsigned *countp = hm->get (key);
1821 hm->put (key, countp ? (*countp) + 1 : 1);
1822 (*total)++;
1823 }
1824
1825 num_prevailing_types++;
1826 lto_fixup_prevailing_type (t);
1827
1828 /* Compute the canonical type of all non-ODR types.
1829 Delay ODR types for the end of merging process - the canonical
1830 type for those can be computed using the (unique) name however
1831 we want to do this only if units in other languages do not
1832 contain structurally equivalent type.
1833
1834 Because SCC components are streamed in random (hash) order
1835 we may have encountered the type before while registering
1836 type canonical of a derived type in the same SCC. */
1837 if (!TYPE_CANONICAL (t))
1838 {
1839 if (!RECORD_OR_UNION_TYPE_P (t)
1840 || !TYPE_CXX_ODR_P (t))
1841 gimple_register_canonical_type (t);
1842 else if (COMPLETE_TYPE_P (t))
1843 vec_safe_push (types_to_register, t);
1844 }
1845 if (TYPE_MAIN_VARIANT (t) == t && odr_type_p (t))
1846 register_odr_type (t);
1847 }
1848 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1849 type which is also member of this SCC. */
1850 if (TREE_CODE (t) == INTEGER_CST
1851 && !TREE_OVERFLOW (t))
1852 cache_integer_cst (t);
1853 if (!flag_ltrans)
1854 {
1855 lto_maybe_register_decl (data_in, t, index);
1856 /* Scan the tree for references to global functions or
1857 variables and record those for later fixup. */
1858 if (mentions_vars_p (t))
1859 vec_safe_push (tree_with_vars, t);
1860 }
1861 }
1862
1863 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1864 RESOLUTIONS is the set of symbols picked by the linker (read from the
1865 resolution file when the linker plugin is being used). */
1866
1867 static void
lto_read_decls(struct lto_file_decl_data * decl_data,const void * data,vec<ld_plugin_symbol_resolution_t> resolutions)1868 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1869 vec<ld_plugin_symbol_resolution_t> resolutions)
1870 {
1871 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1872 const int decl_offset = sizeof (struct lto_decl_header);
1873 const int main_offset = decl_offset + header->decl_state_size;
1874 const int string_offset = main_offset + header->main_size;
1875 class data_in *data_in;
1876 unsigned int i;
1877 const uint32_t *data_ptr, *data_end;
1878 uint32_t num_decl_states;
1879
1880 lto_input_block ib_main ((const char *) data + main_offset,
1881 header->main_size, decl_data->mode_table);
1882
1883 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1884 header->string_size, resolutions);
1885
1886 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1887 internal types that should not be merged. */
1888
1889 hash_map <code_id_hash, unsigned> hm;
1890 unsigned total = 0;
1891
1892 /* Read the global declarations and types. */
1893 while (ib_main.p < ib_main.len)
1894 {
1895 tree t;
1896 unsigned from = data_in->reader_cache->nodes.length ();
1897 /* Read and uniquify SCCs as in the input stream. */
1898 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1899 if (tag == LTO_tree_scc || tag == LTO_trees)
1900 {
1901 unsigned len_;
1902 unsigned scc_entry_len;
1903
1904 /* Because we stream in SCC order we know that all unshared trees
1905 are now fully streamed. Process them. */
1906 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1907 &scc_entry_len,
1908 tag == LTO_tree_scc);
1909 unsigned len = data_in->reader_cache->nodes.length () - from;
1910 gcc_assert (len == len_);
1911
1912 if (tag == LTO_tree_scc)
1913 {
1914 total_scc_size += len;
1915 num_sccs_read++;
1916 }
1917 else
1918 num_unshared_trees_read += len;
1919
1920 /* We have the special case of size-1 SCCs that are pre-merged
1921 by means of identifier and string sharing for example.
1922 ??? Maybe we should avoid streaming those as SCCs. */
1923 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1924 from);
1925 /* Identifier and integers are shared specially, they should never
1926 go by the tree merging path. */
1927 gcc_checking_assert ((TREE_CODE (first) != IDENTIFIER_NODE
1928 && (TREE_CODE (first) != INTEGER_CST
1929 || TREE_OVERFLOW (first)))
1930 || len != 1);
1931
1932 /* Try to unify the SCC with already existing ones. */
1933 if (!flag_ltrans && tag != LTO_trees
1934 && unify_scc (data_in, from,
1935 len, scc_entry_len, scc_hash))
1936 continue;
1937
1938 /* Tree merging failed, mark entries in location cache as
1939 permanent. */
1940 data_in->location_cache.accept_location_cache ();
1941
1942 bool seen_type = false;
1943 for (unsigned i = 0; i < len; ++i)
1944 {
1945 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1946 from + i);
1947 process_new_tree (t, &hm, from + i, &total, data_in);
1948 if (TYPE_P (t))
1949 seen_type = true;
1950 }
1951
1952 /* Register DECLs with the debuginfo machinery. */
1953 while (!dref_queue.is_empty ())
1954 {
1955 dref_entry e = dref_queue.pop ();
1956 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1957 }
1958
1959 if (seen_type)
1960 num_type_scc_trees += len;
1961 }
1962 else
1963 {
1964 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1965 gcc_assert (data_in->reader_cache->nodes.length () == from + 1);
1966 num_unshared_trees_read++;
1967 data_in->location_cache.accept_location_cache ();
1968 process_dref (data_in, t, from);
1969 if (TREE_CODE (t) == IDENTIFIER_NODE
1970 || (TREE_CODE (t) == INTEGER_CST
1971 && !TREE_OVERFLOW (t)))
1972 ;
1973 else
1974 {
1975 lto_maybe_register_decl (data_in, t, from);
1976 process_new_tree (t, &hm, from, &total, data_in);
1977 }
1978 }
1979 }
1980
1981 /* Dump type statistics. */
1982 if (flag_lto_dump_type_stats)
1983 {
1984 fprintf (stdout, " Type Frequency Percentage\n\n");
1985 for (hash_map<code_id_hash, unsigned>::iterator itr = hm.begin ();
1986 itr != hm.end ();
1987 ++itr)
1988 {
1989 std::pair<unsigned, unsigned> p = *itr;
1990 enum tree_code code = (enum tree_code) p.first;
1991 fprintf (stdout, "%14s %6d %12.2f\n", get_tree_code_name (code),
1992 p.second, float (p.second)/total*100);
1993 }
1994 }
1995
1996 data_in->location_cache.apply_location_cache ();
1997
1998 /* Read in lto_in_decl_state objects. */
1999 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2000 data_end
2001 = (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2002 num_decl_states = *data_ptr++;
2003
2004 gcc_assert (num_decl_states > 0);
2005 decl_data->global_decl_state = lto_new_in_decl_state ();
2006 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2007 decl_data->global_decl_state);
2008
2009 /* Read in per-function decl states and enter them in hash table. */
2010 decl_data->function_decl_states
2011 = hash_table<decl_state_hasher>::create_ggc (37);
2012
2013 for (i = 1; i < num_decl_states; i++)
2014 {
2015 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2016
2017 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2018 lto_in_decl_state **slot
2019 = decl_data->function_decl_states->find_slot (state, INSERT);
2020 gcc_assert (*slot == NULL);
2021 *slot = state;
2022 }
2023
2024 if (data_ptr != data_end)
2025 internal_error ("bytecode stream: garbage at the end of symbols section");
2026
2027 /* Set the current decl state to be the global state. */
2028 decl_data->current_decl_state = decl_data->global_decl_state;
2029
2030 lto_data_in_delete (data_in);
2031 }
2032
2033 /* Custom version of strtoll, which is not portable. */
2034
2035 static int64_t
lto_parse_hex(const char * p)2036 lto_parse_hex (const char *p)
2037 {
2038 int64_t ret = 0;
2039
2040 for (; *p != '\0'; ++p)
2041 {
2042 char c = *p;
2043 unsigned char part;
2044 ret <<= 4;
2045 if (c >= '0' && c <= '9')
2046 part = c - '0';
2047 else if (c >= 'a' && c <= 'f')
2048 part = c - 'a' + 10;
2049 else if (c >= 'A' && c <= 'F')
2050 part = c - 'A' + 10;
2051 else
2052 internal_error ("could not parse hex number");
2053 ret |= part;
2054 }
2055
2056 return ret;
2057 }
2058
2059 /* Read resolution for file named FILE_NAME. The resolution is read from
2060 RESOLUTION. */
2061
2062 static void
lto_resolution_read(splay_tree file_ids,FILE * resolution,lto_file * file)2063 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2064 {
2065 /* We require that objects in the resolution file are in the same
2066 order as the lto1 command line. */
2067 unsigned int name_len;
2068 char *obj_name;
2069 unsigned int num_symbols;
2070 unsigned int i;
2071 struct lto_file_decl_data *file_data;
2072 splay_tree_node nd = NULL;
2073
2074 if (!resolution)
2075 return;
2076
2077 name_len = strlen (file->filename);
2078 obj_name = XNEWVEC (char, name_len + 1);
2079 fscanf (resolution, " "); /* Read white space. */
2080
2081 fread (obj_name, sizeof (char), name_len, resolution);
2082 obj_name[name_len] = '\0';
2083 if (filename_cmp (obj_name, file->filename) != 0)
2084 internal_error ("unexpected file name %s in linker resolution file. "
2085 "Expected %s", obj_name, file->filename);
2086 if (file->offset != 0)
2087 {
2088 int t;
2089 char offset_p[17];
2090 int64_t offset;
2091 t = fscanf (resolution, "@0x%16s", offset_p);
2092 if (t != 1)
2093 internal_error ("could not parse file offset");
2094 offset = lto_parse_hex (offset_p);
2095 if (offset != file->offset)
2096 internal_error ("unexpected offset");
2097 }
2098
2099 free (obj_name);
2100
2101 fscanf (resolution, "%u", &num_symbols);
2102
2103 for (i = 0; i < num_symbols; i++)
2104 {
2105 int t;
2106 unsigned index;
2107 unsigned HOST_WIDE_INT id;
2108 char r_str[27];
2109 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2110 unsigned int j;
2111 unsigned int lto_resolution_str_len
2112 = sizeof (lto_resolution_str) / sizeof (char *);
2113 res_pair rp;
2114
2115 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE
2116 " %26s %*[^\n]\n", &index, &id, r_str);
2117 if (t != 3)
2118 internal_error ("invalid line in the resolution file");
2119
2120 for (j = 0; j < lto_resolution_str_len; j++)
2121 {
2122 if (strcmp (lto_resolution_str[j], r_str) == 0)
2123 {
2124 r = (enum ld_plugin_symbol_resolution) j;
2125 break;
2126 }
2127 }
2128 if (j == lto_resolution_str_len)
2129 internal_error ("invalid resolution in the resolution file");
2130
2131 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2132 {
2133 nd = lto_splay_tree_lookup (file_ids, id);
2134 if (nd == NULL)
2135 internal_error ("resolution sub id %wx not in object file", id);
2136 }
2137
2138 file_data = (struct lto_file_decl_data *)nd->value;
2139 /* The indexes are very sparse. To save memory save them in a compact
2140 format that is only unpacked later when the subfile is processed. */
2141 rp.res = r;
2142 rp.index = index;
2143 file_data->respairs.safe_push (rp);
2144 if (file_data->max_index < index)
2145 file_data->max_index = index;
2146 }
2147 }
2148
2149 /* List of file_decl_datas. */
2150 struct file_data_list
2151 {
2152 struct lto_file_decl_data *first, *last;
2153 };
2154
2155 /* Is the name for a id'ed LTO section? */
2156
2157 static int
lto_section_with_id(const char * name,unsigned HOST_WIDE_INT * id)2158 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2159 {
2160 const char *s;
2161
2162 if (strncmp (name, section_name_prefix, strlen (section_name_prefix)))
2163 return 0;
2164 s = strrchr (name, '.');
2165 if (!s)
2166 return 0;
2167 /* If the section is not suffixed with an ID return. */
2168 if ((size_t)(s - name) == strlen (section_name_prefix))
2169 return 0;
2170 return sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2171 }
2172
2173 /* Create file_data of each sub file id. */
2174
2175 static int
create_subid_section_table(struct lto_section_slot * ls,splay_tree file_ids,struct file_data_list * list)2176 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2177 struct file_data_list *list)
2178 {
2179 struct lto_section_slot s_slot, *new_slot;
2180 unsigned HOST_WIDE_INT id;
2181 splay_tree_node nd;
2182 void **hash_slot;
2183 char *new_name;
2184 struct lto_file_decl_data *file_data;
2185
2186 if (!lto_section_with_id (ls->name, &id))
2187 return 1;
2188
2189 /* Find hash table of sub module id. */
2190 nd = lto_splay_tree_lookup (file_ids, id);
2191 if (nd != NULL)
2192 {
2193 file_data = (struct lto_file_decl_data *)nd->value;
2194 }
2195 else
2196 {
2197 file_data = ggc_alloc<lto_file_decl_data> ();
2198 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2199 file_data->id = id;
2200 file_data->section_hash_table = lto_obj_create_section_hash_table ();
2201 lto_splay_tree_insert (file_ids, id, file_data);
2202
2203 /* Maintain list in linker order. */
2204 if (!list->first)
2205 list->first = file_data;
2206 if (list->last)
2207 list->last->next = file_data;
2208
2209 list->last = file_data;
2210 }
2211
2212 /* Copy section into sub module hash table. */
2213 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2214 s_slot.name = new_name;
2215 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2216 gcc_assert (*hash_slot == NULL);
2217
2218 new_slot = XDUP (struct lto_section_slot, ls);
2219 new_slot->name = new_name;
2220 *hash_slot = new_slot;
2221 return 1;
2222 }
2223
2224 /* Read declarations and other initializations for a FILE_DATA. */
2225
2226 static void
lto_file_finalize(struct lto_file_decl_data * file_data,lto_file * file,int order)2227 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file,
2228 int order)
2229 {
2230 const char *data;
2231 size_t len;
2232 vec<ld_plugin_symbol_resolution_t>
2233 resolutions = vNULL;
2234 int i;
2235 res_pair *rp;
2236
2237 /* Create vector for fast access of resolution. We do this lazily
2238 to save memory. */
2239 resolutions.safe_grow_cleared (file_data->max_index + 1, true);
2240 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2241 resolutions[rp->index] = rp->res;
2242 file_data->respairs.release ();
2243
2244 file_data->renaming_hash_table = lto_create_renaming_table ();
2245 file_data->file_name = file->filename;
2246 file_data->order = order;
2247
2248 /* Read and verify LTO section. */
2249 data = lto_get_summary_section_data (file_data, LTO_section_lto, &len);
2250 if (data == NULL)
2251 {
2252 fatal_error (input_location, "bytecode stream in file %qs generated "
2253 "with GCC compiler older than 10.0", file_data->file_name);
2254 return;
2255 }
2256
2257 memcpy (&file_data->lto_section_header, data, sizeof (lto_section));
2258 lto_check_version (file_data->lto_section_header.major_version,
2259 file_data->lto_section_header.minor_version,
2260 file_data->file_name);
2261
2262 #ifdef ACCEL_COMPILER
2263 lto_input_mode_table (file_data);
2264 #else
2265 file_data->mode_table = lto_mode_identity_table;
2266 #endif
2267
2268 data = lto_get_summary_section_data (file_data, LTO_section_decls, &len);
2269 if (data == NULL)
2270 {
2271 internal_error ("cannot read %<LTO_section_decls%> from %s",
2272 file_data->file_name);
2273 return;
2274 }
2275 /* Frees resolutions. */
2276 lto_read_decls (file_data, data, resolutions);
2277 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2278 }
2279
2280 /* Finalize FILE_DATA in FILE and increase COUNT. */
2281
2282 static int
lto_create_files_from_ids(lto_file * file,struct lto_file_decl_data * file_data,int * count,int order)2283 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2284 int *count, int order)
2285 {
2286 lto_file_finalize (file_data, file, order);
2287 if (symtab->dump_file)
2288 fprintf (symtab->dump_file,
2289 "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2290 file_data->file_name, file_data->id);
2291 (*count)++;
2292 return 0;
2293 }
2294
2295 /* Generate a TREE representation for all types and external decls
2296 entities in FILE.
2297
2298 Read all of the globals out of the file. Then read the cgraph
2299 and process the .o index into the cgraph nodes so that it can open
2300 the .o file to load the functions and ipa information. */
2301
2302 static struct lto_file_decl_data *
lto_file_read(lto_file * file,FILE * resolution_file,int * count)2303 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2304 {
2305 struct lto_file_decl_data *file_data = NULL;
2306 splay_tree file_ids;
2307 htab_t section_hash_table;
2308 struct lto_section_slot *section;
2309 struct file_data_list file_list;
2310 struct lto_section_list section_list;
2311
2312 memset (§ion_list, 0, sizeof (struct lto_section_list));
2313 section_hash_table = lto_obj_build_section_table (file, §ion_list);
2314
2315 /* Dump the details of LTO objects. */
2316 if (flag_lto_dump_objects)
2317 {
2318 int i=0;
2319 fprintf (stdout, "\n LTO Object Name: %s\n", file->filename);
2320 fprintf (stdout, "\nNo. Offset Size Section Name\n\n");
2321 for (section = section_list.first; section != NULL; section = section->next)
2322 fprintf (stdout, "%2d %8" PRId64 " %8" PRIu64 " %s\n",
2323 ++i, (int64_t) section->start, (uint64_t) section->len,
2324 section->name);
2325 }
2326
2327 /* Find all sub modules in the object and put their sections into new hash
2328 tables in a splay tree. */
2329 file_ids = lto_splay_tree_new ();
2330 memset (&file_list, 0, sizeof (struct file_data_list));
2331 for (section = section_list.first; section != NULL; section = section->next)
2332 create_subid_section_table (section, file_ids, &file_list);
2333
2334 /* Add resolutions to file ids. */
2335 lto_resolution_read (file_ids, resolution_file, file);
2336
2337 /* Finalize each lto file for each submodule in the merged object. */
2338 int order = 0;
2339 for (file_data = file_list.first; file_data != NULL;
2340 file_data = file_data->next)
2341 lto_create_files_from_ids (file, file_data, count, order++);
2342
2343 splay_tree_delete (file_ids);
2344 htab_delete (section_hash_table);
2345
2346 return file_list.first;
2347 }
2348
2349 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2350 #define LTO_MMAP_IO 1
2351 #endif
2352
2353 #if LTO_MMAP_IO
2354 /* Page size of machine is used for mmap and munmap calls. */
2355 static size_t page_mask;
2356 #endif
2357
2358 /* Get the section data of length LEN from FILENAME starting at
2359 OFFSET. The data segment must be freed by the caller when the
2360 caller is finished. Returns NULL if all was not well. */
2361
2362 static char *
lto_read_section_data(struct lto_file_decl_data * file_data,intptr_t offset,size_t len)2363 lto_read_section_data (struct lto_file_decl_data *file_data,
2364 intptr_t offset, size_t len)
2365 {
2366 char *result;
2367 static int fd = -1;
2368 static char *fd_name;
2369 #if LTO_MMAP_IO
2370 intptr_t computed_len;
2371 intptr_t computed_offset;
2372 intptr_t diff;
2373 #endif
2374
2375 /* Keep a single-entry file-descriptor cache. The last file we
2376 touched will get closed at exit.
2377 ??? Eventually we want to add a more sophisticated larger cache
2378 or rather fix function body streaming to not stream them in
2379 practically random order. */
2380 if (fd != -1
2381 && filename_cmp (fd_name, file_data->file_name) != 0)
2382 {
2383 free (fd_name);
2384 close (fd);
2385 fd = -1;
2386 }
2387 if (fd == -1)
2388 {
2389 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2390 if (fd == -1)
2391 {
2392 fatal_error (input_location, "Cannot open %s", file_data->file_name);
2393 return NULL;
2394 }
2395 fd_name = xstrdup (file_data->file_name);
2396 }
2397
2398 #if LTO_MMAP_IO
2399 if (!page_mask)
2400 {
2401 size_t page_size = sysconf (_SC_PAGE_SIZE);
2402 page_mask = ~(page_size - 1);
2403 }
2404
2405 computed_offset = offset & page_mask;
2406 diff = offset - computed_offset;
2407 computed_len = len + diff;
2408
2409 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2410 fd, computed_offset);
2411 if (result == MAP_FAILED)
2412 {
2413 fatal_error (input_location, "Cannot map %s", file_data->file_name);
2414 return NULL;
2415 }
2416
2417 return result + diff;
2418 #else
2419 result = (char *) xmalloc (len);
2420 if (lseek (fd, offset, SEEK_SET) != offset
2421 || read (fd, result, len) != (ssize_t) len)
2422 {
2423 free (result);
2424 fatal_error (input_location, "Cannot read %s", file_data->file_name);
2425 result = NULL;
2426 }
2427 #ifdef __MINGW32__
2428 /* Native windows doesn't supports delayed unlink on opened file. So
2429 we close file here again. This produces higher I/O load, but at least
2430 it prevents to have dangling file handles preventing unlink. */
2431 free (fd_name);
2432 fd_name = NULL;
2433 close (fd);
2434 fd = -1;
2435 #endif
2436 return result;
2437 #endif
2438 }
2439
2440
2441 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2442 NAME will be NULL unless the section type is for a function
2443 body. */
2444
2445 static const char *
get_section_data(struct lto_file_decl_data * file_data,enum lto_section_type section_type,const char * name,int order,size_t * len)2446 get_section_data (struct lto_file_decl_data *file_data,
2447 enum lto_section_type section_type,
2448 const char *name, int order,
2449 size_t *len)
2450 {
2451 htab_t section_hash_table = file_data->section_hash_table;
2452 struct lto_section_slot *f_slot;
2453 struct lto_section_slot s_slot;
2454 const char *section_name = lto_get_section_name (section_type, name,
2455 order, file_data);
2456 char *data = NULL;
2457
2458 *len = 0;
2459 s_slot.name = section_name;
2460 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2461 if (f_slot)
2462 {
2463 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2464 *len = f_slot->len;
2465 }
2466
2467 free (CONST_CAST (char *, section_name));
2468 return data;
2469 }
2470
2471
2472 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2473 starts at OFFSET and has LEN bytes. */
2474
2475 static void
free_section_data(struct lto_file_decl_data * file_data ATTRIBUTE_UNUSED,enum lto_section_type section_type ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,const char * offset,size_t len ATTRIBUTE_UNUSED)2476 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2477 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2478 const char *name ATTRIBUTE_UNUSED,
2479 const char *offset, size_t len ATTRIBUTE_UNUSED)
2480 {
2481 #if LTO_MMAP_IO
2482 intptr_t computed_len;
2483 intptr_t computed_offset;
2484 intptr_t diff;
2485 #endif
2486
2487 #if LTO_MMAP_IO
2488 computed_offset = ((intptr_t) offset) & page_mask;
2489 diff = (intptr_t) offset - computed_offset;
2490 computed_len = len + diff;
2491
2492 munmap ((caddr_t) computed_offset, computed_len);
2493 #else
2494 free (CONST_CAST(char *, offset));
2495 #endif
2496 }
2497
2498 static lto_file *current_lto_file;
2499
2500 /* If TT is a variable or function decl replace it with its
2501 prevailing variant. */
2502 #define LTO_SET_PREVAIL(tt) \
2503 do {\
2504 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2505 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2506 { \
2507 tt = lto_symtab_prevailing_decl (tt); \
2508 fixed = true; \
2509 } \
2510 } while (0)
2511
2512 /* Ensure that TT isn't a replacable var of function decl. */
2513 #define LTO_NO_PREVAIL(tt) \
2514 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2515
2516 /* Given a tree T replace all fields referring to variables or functions
2517 with their prevailing variant. */
2518 static void
lto_fixup_prevailing_decls(tree t)2519 lto_fixup_prevailing_decls (tree t)
2520 {
2521 enum tree_code code = TREE_CODE (t);
2522 bool fixed = false;
2523
2524 gcc_checking_assert (code != TREE_BINFO);
2525 LTO_NO_PREVAIL (TREE_TYPE (t));
2526 if (CODE_CONTAINS_STRUCT (code, TS_COMMON)
2527 /* lto_symtab_prevail_decl use TREE_CHAIN to link to the prevailing decl.
2528 in the case T is a prevailed declaration we would ICE here. */
2529 && !VAR_OR_FUNCTION_DECL_P (t))
2530 LTO_NO_PREVAIL (TREE_CHAIN (t));
2531 if (DECL_P (t))
2532 {
2533 LTO_NO_PREVAIL (DECL_NAME (t));
2534 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2535 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2536 {
2537 LTO_SET_PREVAIL (DECL_SIZE (t));
2538 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2539 LTO_SET_PREVAIL (DECL_INITIAL (t));
2540 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2541 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2542 }
2543 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2544 {
2545 LTO_NO_PREVAIL (DECL_ASSEMBLER_NAME_RAW (t));
2546 }
2547 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2548 {
2549 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2550 }
2551 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2552 {
2553 LTO_NO_PREVAIL (DECL_ARGUMENTS (t));
2554 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2555 LTO_NO_PREVAIL (DECL_VINDEX (t));
2556 }
2557 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2558 {
2559 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2560 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2561 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2562 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2563 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2564 }
2565 }
2566 else if (TYPE_P (t))
2567 {
2568 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2569 LTO_SET_PREVAIL (TYPE_SIZE (t));
2570 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2571 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2572 LTO_NO_PREVAIL (TYPE_NAME (t));
2573
2574 LTO_SET_PREVAIL (TYPE_MIN_VALUE_RAW (t));
2575 LTO_SET_PREVAIL (TYPE_MAX_VALUE_RAW (t));
2576 LTO_NO_PREVAIL (TYPE_LANG_SLOT_1 (t));
2577
2578 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2579
2580 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2581 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2582 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2583 }
2584 else if (EXPR_P (t))
2585 {
2586 int i;
2587 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2588 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2589 }
2590 else if (TREE_CODE (t) == CONSTRUCTOR)
2591 {
2592 unsigned i;
2593 tree val;
2594 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2595 LTO_SET_PREVAIL (val);
2596 }
2597 else
2598 {
2599 switch (code)
2600 {
2601 case TREE_LIST:
2602 LTO_SET_PREVAIL (TREE_VALUE (t));
2603 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2604 break;
2605 default:
2606 gcc_unreachable ();
2607 }
2608 }
2609 /* If we fixed nothing, then we missed something seen by
2610 mentions_vars_p. */
2611 gcc_checking_assert (fixed);
2612 }
2613 #undef LTO_SET_PREVAIL
2614 #undef LTO_NO_PREVAIL
2615
2616 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2617 replaces var and function decls with the corresponding prevailing def. */
2618
2619 static void
lto_fixup_state(struct lto_in_decl_state * state)2620 lto_fixup_state (struct lto_in_decl_state *state)
2621 {
2622 unsigned i, si;
2623
2624 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2625 we still need to walk from all DECLs to find the reachable
2626 FUNCTION_DECLs and VAR_DECLs. */
2627 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2628 {
2629 vec<tree, va_gc> *trees = state->streams[si];
2630 for (i = 0; i < vec_safe_length (trees); i++)
2631 {
2632 tree t = (*trees)[i];
2633 if (flag_checking && TYPE_P (t))
2634 verify_type (t);
2635 if (VAR_OR_FUNCTION_DECL_P (t)
2636 && (TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
2637 (*trees)[i] = lto_symtab_prevailing_decl (t);
2638 }
2639 }
2640 }
2641
2642 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2643 prevailing one. */
2644
2645 static void
lto_fixup_decls(struct lto_file_decl_data ** files)2646 lto_fixup_decls (struct lto_file_decl_data **files)
2647 {
2648 unsigned int i;
2649 tree t;
2650
2651 if (tree_with_vars)
2652 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2653 lto_fixup_prevailing_decls (t);
2654
2655 for (i = 0; files[i]; i++)
2656 {
2657 struct lto_file_decl_data *file = files[i];
2658 struct lto_in_decl_state *state = file->global_decl_state;
2659 lto_fixup_state (state);
2660
2661 hash_table<decl_state_hasher>::iterator iter;
2662 lto_in_decl_state *elt;
2663 FOR_EACH_HASH_TABLE_ELEMENT (*file->function_decl_states, elt,
2664 lto_in_decl_state *, iter)
2665 lto_fixup_state (elt);
2666 }
2667 }
2668
2669 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2670
2671 /* Turn file datas for sub files into a single array, so that they look
2672 like separate files for further passes. */
2673
2674 static void
lto_flatten_files(struct lto_file_decl_data ** orig,int count,int last_file_ix)2675 lto_flatten_files (struct lto_file_decl_data **orig, int count,
2676 int last_file_ix)
2677 {
2678 struct lto_file_decl_data *n, *next;
2679 int i, k;
2680
2681 lto_stats.num_input_files = count;
2682 all_file_decl_data
2683 = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (count + 1);
2684 /* Set the hooks so that all of the ipa passes can read in their data. */
2685 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2686 for (i = 0, k = 0; i < last_file_ix; i++)
2687 {
2688 for (n = orig[i]; n != NULL; n = next)
2689 {
2690 all_file_decl_data[k++] = n;
2691 next = n->next;
2692 n->next = NULL;
2693 }
2694 }
2695 all_file_decl_data[k] = NULL;
2696 gcc_assert (k == count);
2697 }
2698
2699 /* Input file data before flattening (i.e. splitting them to subfiles to support
2700 incremental linking. */
2701 static int real_file_count;
2702 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2703
2704 /* Read all the symbols from the input files FNAMES. NFILES is the
2705 number of files requested in the command line. Instantiate a
2706 global call graph by aggregating all the sub-graphs found in each
2707 file. */
2708
2709 void
read_cgraph_and_symbols(unsigned nfiles,const char ** fnames)2710 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2711 {
2712 unsigned int i, last_file_ix;
2713 FILE *resolution;
2714 unsigned resolution_objects = 0;
2715 int count = 0;
2716 struct lto_file_decl_data **decl_data;
2717 symtab_node *snode;
2718
2719 symtab->initialize ();
2720
2721 timevar_push (TV_IPA_LTO_DECL_IN);
2722
2723 #ifdef ACCEL_COMPILER
2724 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2725 lto_stream_offload_p = true;
2726 #endif
2727
2728 real_file_decl_data
2729 = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (nfiles + 1);
2730 real_file_count = nfiles;
2731
2732 /* Read the resolution file. */
2733 resolution = NULL;
2734 if (resolution_file_name)
2735 {
2736 int t;
2737
2738 resolution = fopen (resolution_file_name, "r");
2739 if (resolution == NULL)
2740 fatal_error (input_location,
2741 "could not open symbol resolution file: %m");
2742
2743 t = fscanf (resolution, "%u", &resolution_objects);
2744 gcc_assert (t == 1);
2745 }
2746 symtab->state = LTO_STREAMING;
2747
2748 canonical_type_hash_cache = new hash_map<const_tree, hashval_t> (251);
2749 gimple_canonical_types = htab_create (16381, gimple_canonical_type_hash,
2750 gimple_canonical_type_eq, NULL);
2751 gcc_obstack_init (&tree_scc_hash_obstack);
2752 tree_scc_hash = new hash_table<tree_scc_hasher> (4096);
2753
2754 /* Register the common node types with the canonical type machinery so
2755 we properly share alias-sets across languages and TUs. Do not
2756 expose the common nodes as type merge target - those that should be
2757 are already exposed so by pre-loading the LTO streamer caches.
2758 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2759 for (i = 0; i < itk_none; ++i)
2760 lto_register_canonical_types (integer_types[i], true);
2761 for (i = 0; i < stk_type_kind_last; ++i)
2762 lto_register_canonical_types (sizetype_tab[i], true);
2763 for (i = 0; i < TI_MAX; ++i)
2764 lto_register_canonical_types (global_trees[i], true);
2765 for (i = 0; i < itk_none; ++i)
2766 lto_register_canonical_types (integer_types[i], false);
2767 for (i = 0; i < stk_type_kind_last; ++i)
2768 lto_register_canonical_types (sizetype_tab[i], false);
2769 for (i = 0; i < TI_MAX; ++i)
2770 lto_register_canonical_types (global_trees[i], false);
2771
2772 if (!quiet_flag)
2773 fprintf (stderr, "Reading object files:");
2774
2775 /* Read all of the object files specified on the command line. */
2776 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2777 {
2778 struct lto_file_decl_data *file_data = NULL;
2779 if (!quiet_flag)
2780 {
2781 fprintf (stderr, " %s", fnames[i]);
2782 fflush (stderr);
2783 }
2784
2785 current_lto_file = lto_obj_file_open (fnames[i], false);
2786 if (!current_lto_file)
2787 break;
2788
2789 file_data = lto_file_read (current_lto_file, resolution, &count);
2790 if (!file_data)
2791 {
2792 lto_obj_file_close (current_lto_file);
2793 free (current_lto_file);
2794 current_lto_file = NULL;
2795 break;
2796 }
2797
2798 decl_data[last_file_ix++] = file_data;
2799
2800 lto_obj_file_close (current_lto_file);
2801 free (current_lto_file);
2802 current_lto_file = NULL;
2803 }
2804
2805 lto_flatten_files (decl_data, count, last_file_ix);
2806 lto_stats.num_input_files = count;
2807 ggc_free(decl_data);
2808 real_file_decl_data = NULL;
2809
2810 lto_register_canonical_types_for_odr_types ();
2811
2812 if (resolution_file_name)
2813 {
2814 /* True, since the plugin splits the archives. */
2815 gcc_assert (resolution_objects == nfiles);
2816 fclose (resolution);
2817 }
2818
2819 /* Show the LTO report before launching LTRANS. */
2820 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2821 print_lto_report_1 ();
2822
2823 /* Free gimple type merging datastructures. */
2824 delete tree_scc_hash;
2825 tree_scc_hash = NULL;
2826 obstack_free (&tree_scc_hash_obstack, NULL);
2827 htab_delete (gimple_canonical_types);
2828 gimple_canonical_types = NULL;
2829 delete canonical_type_hash_cache;
2830 canonical_type_hash_cache = NULL;
2831
2832 /* At this stage we know that majority of GGC memory is reachable.
2833 Growing the limits prevents unnecesary invocation of GGC. */
2834 ggc_grow ();
2835 report_heap_memory_use ();
2836
2837 /* Set the hooks so that all of the ipa passes can read in their data. */
2838 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2839
2840 timevar_pop (TV_IPA_LTO_DECL_IN);
2841
2842 if (!quiet_flag)
2843 fprintf (stderr, "\nReading the symbol table:");
2844
2845 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2846 /* Read the symtab. */
2847 input_symtab ();
2848
2849 input_offload_tables (!flag_ltrans);
2850
2851 /* Store resolutions into the symbol table. */
2852
2853 FOR_EACH_SYMBOL (snode)
2854 if (snode->externally_visible && snode->real_symbol_p ()
2855 && snode->lto_file_data && snode->lto_file_data->resolution_map
2856 && !(TREE_CODE (snode->decl) == FUNCTION_DECL
2857 && fndecl_built_in_p (snode->decl))
2858 && !(VAR_P (snode->decl) && DECL_HARD_REGISTER (snode->decl)))
2859 {
2860 ld_plugin_symbol_resolution_t *res;
2861
2862 res = snode->lto_file_data->resolution_map->get (snode->decl);
2863 if (!res || *res == LDPR_UNKNOWN)
2864 {
2865 if (snode->output_to_lto_symbol_table_p ())
2866 fatal_error (input_location, "missing resolution data for %s",
2867 IDENTIFIER_POINTER
2868 (DECL_ASSEMBLER_NAME (snode->decl)));
2869 }
2870 /* Symbol versions are always used externally, but linker does not
2871 report that correctly.
2872 This is binutils PR25924. */
2873 else if (snode->symver && *res == LDPR_PREVAILING_DEF_IRONLY)
2874 snode->resolution = LDPR_PREVAILING_DEF_IRONLY_EXP;
2875 else
2876 snode->resolution = *res;
2877 }
2878 for (i = 0; all_file_decl_data[i]; i++)
2879 if (all_file_decl_data[i]->resolution_map)
2880 {
2881 delete all_file_decl_data[i]->resolution_map;
2882 all_file_decl_data[i]->resolution_map = NULL;
2883 }
2884
2885 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2886
2887 if (!quiet_flag)
2888 fprintf (stderr, "\nMerging declarations:");
2889
2890 timevar_push (TV_IPA_LTO_DECL_MERGE);
2891 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
2892 need to care about resolving symbols again, we only need to replace
2893 duplicated declarations read from the callgraph and from function
2894 sections. */
2895 if (!flag_ltrans)
2896 {
2897 lto_symtab_merge_decls ();
2898
2899 /* If there were errors during symbol merging bail out, we have no
2900 good way to recover here. */
2901 if (seen_error ())
2902 fatal_error (input_location,
2903 "errors during merging of translation units");
2904
2905 /* Fixup all decls. */
2906 lto_fixup_decls (all_file_decl_data);
2907 }
2908 if (tree_with_vars)
2909 ggc_free (tree_with_vars);
2910 tree_with_vars = NULL;
2911 /* During WPA we want to prevent ggc collecting by default. Grow limits
2912 until after the IPA summaries are streamed in. Basically all IPA memory
2913 is explcitly managed by ggc_free and ggc collect is not useful.
2914 Exception are the merged declarations. */
2915 ggc_grow ();
2916 report_heap_memory_use ();
2917
2918 timevar_pop (TV_IPA_LTO_DECL_MERGE);
2919 /* Each pass will set the appropriate timer. */
2920
2921 if (!quiet_flag)
2922 fprintf (stderr, "\nReading summaries:");
2923
2924 /* Read the IPA summary data. */
2925 if (flag_ltrans)
2926 ipa_read_optimization_summaries ();
2927 else
2928 ipa_read_summaries ();
2929
2930 ggc_grow ();
2931
2932 for (i = 0; all_file_decl_data[i]; i++)
2933 {
2934 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
2935 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
2936 all_file_decl_data[i]->symtab_node_encoder = NULL;
2937 lto_in_decl_state *global_decl_state
2938 = all_file_decl_data[i]->global_decl_state;
2939 lto_free_function_in_decl_state (global_decl_state);
2940 all_file_decl_data[i]->global_decl_state = NULL;
2941 all_file_decl_data[i]->current_decl_state = NULL;
2942 }
2943
2944 if (!flag_ltrans)
2945 {
2946 /* Finally merge the cgraph according to the decl merging decisions. */
2947 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
2948
2949 if (!quiet_flag)
2950 fprintf (stderr, "\nMerging symbols:");
2951
2952 gcc_assert (!dump_file);
2953 dump_file = dump_begin (lto_link_dump_id, NULL);
2954
2955 if (dump_file)
2956 {
2957 fprintf (dump_file, "Before merging:\n");
2958 symtab->dump (dump_file);
2959 }
2960 lto_symtab_merge_symbols ();
2961 /* Removal of unreachable symbols is needed to make verify_symtab to pass;
2962 we are still having duplicated comdat groups containing local statics.
2963 We could also just remove them while merging. */
2964 symtab->remove_unreachable_nodes (dump_file);
2965 ggc_collect ();
2966 report_heap_memory_use ();
2967
2968 if (dump_file)
2969 dump_end (lto_link_dump_id, dump_file);
2970 dump_file = NULL;
2971 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
2972 }
2973 symtab->state = IPA_SSA;
2974 /* All node removals happening here are useless, because
2975 WPA should not stream them. Still always perform remove_unreachable_nodes
2976 because we may reshape clone tree, get rid of dead masters of inline
2977 clones and remove symbol entries for read-only variables we keep around
2978 only to be able to constant fold them. */
2979 if (flag_ltrans)
2980 {
2981 if (symtab->dump_file)
2982 symtab->dump (symtab->dump_file);
2983 symtab->remove_unreachable_nodes (symtab->dump_file);
2984 }
2985
2986 /* Indicate that the cgraph is built and ready. */
2987 symtab->function_flags_ready = true;
2988
2989 ggc_free (all_file_decl_data);
2990 all_file_decl_data = NULL;
2991 }
2992
2993
2994
2995 /* Show various memory usage statistics related to LTO. */
2996 void
print_lto_report_1(void)2997 print_lto_report_1 (void)
2998 {
2999 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3000 fprintf (stderr, "%s statistics\n", pfx);
3001
3002 fprintf (stderr, "[%s] read %lu unshared trees\n",
3003 pfx, num_unshared_trees_read);
3004 fprintf (stderr, "[%s] read %lu mergeable SCCs of average size %f\n",
3005 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3006 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx,
3007 total_scc_size + num_unshared_trees_read);
3008 if (flag_wpa && tree_scc_hash && num_sccs_read)
3009 {
3010 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3011 "collision ratio: %f\n", pfx,
3012 (long) tree_scc_hash->size (),
3013 (long) tree_scc_hash->elements (),
3014 tree_scc_hash->collisions ());
3015 hash_table<tree_scc_hasher>::iterator hiter;
3016 tree_scc *scc, *max_scc = NULL;
3017 unsigned max_length = 0;
3018 FOR_EACH_HASH_TABLE_ELEMENT (*tree_scc_hash, scc, x, hiter)
3019 {
3020 unsigned length = 0;
3021 tree_scc *s = scc;
3022 for (; s; s = s->next)
3023 length++;
3024 if (length > max_length)
3025 {
3026 max_length = length;
3027 max_scc = scc;
3028 }
3029 }
3030 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3031 pfx, max_length, max_scc->len);
3032 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3033 num_scc_compares, num_scc_compare_collisions,
3034 num_scc_compare_collisions / (double) num_scc_compares);
3035 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3036 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3037 total_scc_size_merged);
3038 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3039 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3040 pfx, num_prevailing_types, num_type_scc_trees);
3041 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3042 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3043 (long) htab_size (gimple_canonical_types),
3044 (long) htab_elements (gimple_canonical_types),
3045 (long) gimple_canonical_types->searches,
3046 (long) gimple_canonical_types->collisions,
3047 htab_collisions (gimple_canonical_types));
3048 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3049 "%lu elements, %ld searches\n", pfx,
3050 num_canonical_type_hash_entries,
3051 num_canonical_type_hash_queries);
3052 }
3053
3054 print_lto_report (pfx);
3055 }
3056
3057 GTY(()) tree lto_eh_personality_decl;
3058
3059 /* Return the LTO personality function decl. */
3060
3061 tree
lto_eh_personality(void)3062 lto_eh_personality (void)
3063 {
3064 if (!lto_eh_personality_decl)
3065 {
3066 /* Use the first personality DECL for our personality if we don't
3067 support multiple ones. This ensures that we don't artificially
3068 create the need for them in a single-language program. */
3069 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3070 lto_eh_personality_decl = first_personality_decl;
3071 else
3072 lto_eh_personality_decl = lhd_gcc_personality ();
3073 }
3074
3075 return lto_eh_personality_decl;
3076 }
3077
3078 /* Set the process name based on the LTO mode. */
3079
3080 static void
lto_process_name(void)3081 lto_process_name (void)
3082 {
3083 if (flag_lto)
3084 setproctitle (flag_incremental_link == INCREMENTAL_LINK_LTO
3085 ? "lto1-inclink" : "lto1-lto");
3086 if (flag_wpa)
3087 setproctitle ("lto1-wpa");
3088 if (flag_ltrans)
3089 setproctitle ("lto1-ltrans");
3090 }
3091
3092
3093 /* Initialize the LTO front end. */
3094
3095 void
lto_fe_init(void)3096 lto_fe_init (void)
3097 {
3098 lto_process_name ();
3099 lto_streamer_hooks_init ();
3100 lto_reader_init ();
3101 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3102 memset (<o_stats, 0, sizeof (lto_stats));
3103 bitmap_obstack_initialize (NULL);
3104 gimple_register_cfg_hooks ();
3105 #ifndef ACCEL_COMPILER
3106 unsigned char *table
3107 = ggc_vec_alloc<unsigned char> (MAX_MACHINE_MODE);
3108 for (int m = 0; m < MAX_MACHINE_MODE; m++)
3109 table[m] = m;
3110 lto_mode_identity_table = table;
3111 #endif
3112 }
3113
3114 #include "gt-lto-lto-common.h"
3115