xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-devirt.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Basic IPA utilities for type inheritance graph construction and
2    devirtualization.
3    Copyright (C) 2013-2017 Free Software Foundation, Inc.
4    Contributed by Jan Hubicka
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* Brief vocabulary:
23      ODR = One Definition Rule
24         In short, the ODR states that:
25 	1 In any translation unit, a template, type, function, or object can
26 	  have no more than one definition. Some of these can have any number
27 	  of declarations. A definition provides an instance.
28         2 In the entire program, an object or non-inline function cannot have
29 	  more than one definition; if an object or function is used, it must
30 	  have exactly one definition. You can declare an object or function
31 	  that is never used, in which case you don't have to provide
32 	  a definition. In no event can there be more than one definition.
33         3 Some things, like types, templates, and extern inline functions, can
34 	  be defined in more than one translation unit. For a given entity,
35 	  each definition must be the same. Non-extern objects and functions
36 	  in different translation units are different entities, even if their
37 	  names and types are the same.
38 
39      OTR = OBJ_TYPE_REF
40        This is the Gimple representation of type information of a polymorphic call.
41        It contains two parameters:
42 	 otr_type is a type of class whose method is called.
43 	 otr_token is the index into virtual table where address is taken.
44 
45      BINFO
46        This is the type inheritance information attached to each tree
47        RECORD_TYPE by the C++ frontend.  It provides information about base
48        types and virtual tables.
49 
50        BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51        BINFO also links to its type by BINFO_TYPE and to the virtual table by
52        BINFO_VTABLE.
53 
54        Base types of a given type are enumerated by BINFO_BASE_BINFO
55        vector.  Members of this vectors are not BINFOs associated
56        with a base type.  Rather they are new copies of BINFOs
57        (base BINFOs). Their virtual tables may differ from
58        virtual table of the base type.  Also BINFO_OFFSET specifies
59        offset of the base within the type.
60 
61        In the case of single inheritance, the virtual table is shared
62        and BINFO_VTABLE of base BINFO is NULL.  In the case of multiple
63        inheritance the individual virtual tables are pointer to by
64        BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65        binfo associated to the base type).
66 
67        BINFO lookup for a given base type and offset can be done by
68        get_binfo_at_offset.  It returns proper BINFO whose virtual table
69        can be used for lookup of virtual methods associated with the
70        base type.
71 
72      token
73        This is an index of virtual method in virtual table associated
74        to the type defining it. Token can be looked up from OBJ_TYPE_REF
75        or from DECL_VINDEX of a given virtual table.
76 
77      polymorphic (indirect) call
78        This is callgraph representation of virtual method call.  Every
79        polymorphic call contains otr_type and otr_token taken from
80        original OBJ_TYPE_REF at callgraph construction time.
81 
82    What we do here:
83 
84    build_type_inheritance_graph triggers a construction of the type inheritance
85    graph.
86 
87      We reconstruct it based on types of methods we see in the unit.
88      This means that the graph is not complete. Types with no methods are not
89      inserted into the graph.  Also types without virtual methods are not
90      represented at all, though it may be easy to add this.
91 
92      The inheritance graph is represented as follows:
93 
94        Vertices are structures odr_type.  Every odr_type may correspond
95        to one or more tree type nodes that are equivalent by ODR rule.
96        (the multiple type nodes appear only with linktime optimization)
97 
98        Edges are represented by odr_type->base and odr_type->derived_types.
99        At the moment we do not track offsets of types for multiple inheritance.
100        Adding this is easy.
101 
102   possible_polymorphic_call_targets returns, given an parameters found in
103   indirect polymorphic edge all possible polymorphic call targets of the call.
104 
105   pass_ipa_devirt performs simple speculative devirtualization.
106 */
107 
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "rtl.h"
113 #include "tree.h"
114 #include "gimple.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
117 #include "cgraph.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
121 #include "calls.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-inline.h"
128 #include "demangle.h"
129 #include "dbgcnt.h"
130 #include "gimple-pretty-print.h"
131 #include "intl.h"
132 
133 /* Hash based set of pairs of types.  */
134 struct type_pair
135 {
136   tree first;
137   tree second;
138 };
139 
140 template <>
141 struct default_hash_traits <type_pair> : typed_noop_remove <type_pair>
142 {
143   typedef type_pair value_type;
144   typedef type_pair compare_type;
145   static hashval_t
146   hash (type_pair p)
147   {
148     return TYPE_UID (p.first) ^ TYPE_UID (p.second);
149   }
150   static bool
151   is_empty (type_pair p)
152   {
153     return p.first == NULL;
154   }
155   static bool
156   is_deleted (type_pair p ATTRIBUTE_UNUSED)
157     {
158       return false;
159     }
160   static bool
161   equal (const type_pair &a, const type_pair &b)
162     {
163       return a.first==b.first && a.second == b.second;
164     }
165   static void
166   mark_empty (type_pair &e)
167     {
168       e.first = NULL;
169     }
170 };
171 
172 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
173 				    hash_set<type_pair> *,
174 				    location_t, location_t);
175 
176 static bool odr_violation_reported = false;
177 
178 
179 /* Pointer set of all call targets appearing in the cache.  */
180 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
181 
182 /* The node of type inheritance graph.  For each type unique in
183    One Definition Rule (ODR) sense, we produce one node linking all
184    main variants of types equivalent to it, bases and derived types.  */
185 
186 struct GTY(()) odr_type_d
187 {
188   /* leader type.  */
189   tree type;
190   /* All bases; built only for main variants of types.  */
191   vec<odr_type> GTY((skip)) bases;
192   /* All derived types with virtual methods seen in unit;
193      built only for main variants of types.  */
194   vec<odr_type> GTY((skip)) derived_types;
195 
196   /* All equivalent types, if more than one.  */
197   vec<tree, va_gc> *types;
198   /* Set of all equivalent types, if NON-NULL.  */
199   hash_set<tree> * GTY((skip)) types_set;
200 
201   /* Unique ID indexing the type in odr_types array.  */
202   int id;
203   /* Is it in anonymous namespace? */
204   bool anonymous_namespace;
205   /* Do we know about all derivations of given type?  */
206   bool all_derivations_known;
207   /* Did we report ODR violation here?  */
208   bool odr_violated;
209   /* Set when virtual table without RTTI previaled table with.  */
210   bool rtti_broken;
211 };
212 
213 /* Return TRUE if all derived types of T are known and thus
214    we may consider the walk of derived type complete.
215 
216    This is typically true only for final anonymous namespace types and types
217    defined within functions (that may be COMDAT and thus shared across units,
218    but with the same set of derived types).  */
219 
220 bool
221 type_all_derivations_known_p (const_tree t)
222 {
223   if (TYPE_FINAL_P (t))
224     return true;
225   if (flag_ltrans)
226     return false;
227   /* Non-C++ types may have IDENTIFIER_NODE here, do not crash.  */
228   if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
229     return true;
230   if (type_in_anonymous_namespace_p (t))
231     return true;
232   return (decl_function_context (TYPE_NAME (t)) != NULL);
233 }
234 
235 /* Return TRUE if type's constructors are all visible.  */
236 
237 static bool
238 type_all_ctors_visible_p (tree t)
239 {
240   return !flag_ltrans
241 	 && symtab->state >= CONSTRUCTION
242 	 /* We can not always use type_all_derivations_known_p.
243 	    For function local types we must assume case where
244 	    the function is COMDAT and shared in between units.
245 
246 	    TODO: These cases are quite easy to get, but we need
247 	    to keep track of C++ privatizing via -Wno-weak
248 	    as well as the  IPA privatizing.  */
249 	 && type_in_anonymous_namespace_p (t);
250 }
251 
252 /* Return TRUE if type may have instance.  */
253 
254 static bool
255 type_possibly_instantiated_p (tree t)
256 {
257   tree vtable;
258   varpool_node *vnode;
259 
260   /* TODO: Add abstract types here.  */
261   if (!type_all_ctors_visible_p (t))
262     return true;
263 
264   vtable = BINFO_VTABLE (TYPE_BINFO (t));
265   if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
266     vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
267   vnode = varpool_node::get (vtable);
268   return vnode && vnode->definition;
269 }
270 
271 /* Hash used to unify ODR types based on their mangled name and for anonymous
272    namespace types.  */
273 
274 struct odr_name_hasher : pointer_hash <odr_type_d>
275 {
276   typedef union tree_node *compare_type;
277   static inline hashval_t hash (const odr_type_d *);
278   static inline bool equal (const odr_type_d *, const tree_node *);
279   static inline void remove (odr_type_d *);
280 };
281 
282 /* Has used to unify ODR types based on their associated virtual table.
283    This hash is needed to keep -fno-lto-odr-type-merging to work and contains
284    only polymorphic types.  Types with mangled names are inserted to both.  */
285 
286 struct odr_vtable_hasher:odr_name_hasher
287 {
288   static inline hashval_t hash (const odr_type_d *);
289   static inline bool equal (const odr_type_d *, const tree_node *);
290 };
291 
292 /* Return type that was declared with T's name so that T is an
293    qualified variant of it.  */
294 
295 static inline tree
296 main_odr_variant (const_tree t)
297 {
298   if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
299     return TREE_TYPE (TYPE_NAME (t));
300   /* Unnamed types and non-C++ produced types can be compared by variants.  */
301   else
302     return TYPE_MAIN_VARIANT (t);
303 }
304 
305 static bool
306 can_be_name_hashed_p (tree t)
307 {
308   return (!in_lto_p || odr_type_p (t));
309 }
310 
311 /* Hash type by its ODR name.  */
312 
313 static hashval_t
314 hash_odr_name (const_tree t)
315 {
316   gcc_checking_assert (main_odr_variant (t) == t);
317 
318   /* If not in LTO, all main variants are unique, so we can do
319      pointer hash.  */
320   if (!in_lto_p)
321     return htab_hash_pointer (t);
322 
323   /* Anonymous types are unique.  */
324   if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
325     return htab_hash_pointer (t);
326 
327   gcc_checking_assert (TYPE_NAME (t)
328 		       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
329   return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
330 }
331 
332 /* Return the computed hashcode for ODR_TYPE.  */
333 
334 inline hashval_t
335 odr_name_hasher::hash (const odr_type_d *odr_type)
336 {
337   return hash_odr_name (odr_type->type);
338 }
339 
340 static bool
341 can_be_vtable_hashed_p (tree t)
342 {
343   /* vtable hashing can distinguish only main variants.  */
344   if (TYPE_MAIN_VARIANT (t) != t)
345     return false;
346   /* Anonymous namespace types are always handled by name hash.  */
347   if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
348     return false;
349   return (TREE_CODE (t) == RECORD_TYPE
350 	  && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
351 }
352 
353 /* Hash type by assembler name of its vtable.  */
354 
355 static hashval_t
356 hash_odr_vtable (const_tree t)
357 {
358   tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
359   inchash::hash hstate;
360 
361   gcc_checking_assert (in_lto_p);
362   gcc_checking_assert (!type_in_anonymous_namespace_p (t));
363   gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
364 		       && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
365   gcc_checking_assert (main_odr_variant (t) == t);
366 
367   if (TREE_CODE (v) == POINTER_PLUS_EXPR)
368     {
369       add_expr (TREE_OPERAND (v, 1), hstate);
370       v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
371     }
372 
373   hstate.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
374   return hstate.end ();
375 }
376 
377 /* Return the computed hashcode for ODR_TYPE.  */
378 
379 inline hashval_t
380 odr_vtable_hasher::hash (const odr_type_d *odr_type)
381 {
382   return hash_odr_vtable (odr_type->type);
383 }
384 
385 /* For languages with One Definition Rule, work out if
386    types are the same based on their name.
387 
388    This is non-trivial for LTO where minor differences in
389    the type representation may have prevented type merging
390    to merge two copies of otherwise equivalent type.
391 
392    Until we start streaming mangled type names, this function works
393    only for polymorphic types.
394 
395    When STRICT is true, we compare types by their names for purposes of
396    ODR violation warnings.  When strict is false, we consider variants
397    equivalent, because it is all that matters for devirtualization machinery.
398 */
399 
400 bool
401 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
402 {
403   gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
404 
405   type1 = main_odr_variant (type1);
406   type2 = main_odr_variant (type2);
407   if (!strict)
408     {
409       type1 = TYPE_MAIN_VARIANT (type1);
410       type2 = TYPE_MAIN_VARIANT (type2);
411     }
412 
413   if (type1 == type2)
414     return true;
415 
416   if (!in_lto_p)
417     return false;
418 
419   /* Check for anonymous namespaces. Those have !TREE_PUBLIC
420      on the corresponding TYPE_STUB_DECL.  */
421   if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
422       || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
423     return false;
424 
425 
426   /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
427 
428      Ideally we should never need types without ODR names here.  It can however
429      happen in two cases:
430 
431        1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
432           Here testing for equivalence is safe, since their MAIN_VARIANTs are
433           unique.
434        2) for units streamed with -fno-lto-odr-type-merging.  Here we can't
435 	  establish precise ODR equivalency, but for correctness we care only
436 	  about equivalency on complete polymorphic types.  For these we can
437 	  compare assembler names of their virtual tables.  */
438   if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
439       || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
440     {
441       /* See if types are obviously different (i.e. different codes
442 	 or polymorphic wrt non-polymorphic).  This is not strictly correct
443 	 for ODR violating programs, but we can't do better without streaming
444 	 ODR names.  */
445       if (TREE_CODE (type1) != TREE_CODE (type2))
446 	return false;
447       if (TREE_CODE (type1) == RECORD_TYPE
448 	  && (TYPE_BINFO (type1) == NULL_TREE)
449 	      != (TYPE_BINFO (type2) == NULL_TREE))
450 	return false;
451       if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
452 	  && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
453 	     != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
454 	return false;
455 
456       /* At the moment we have no way to establish ODR equivalence at LTO
457 	 other than comparing virtual table pointers of polymorphic types.
458 	 Eventually we should start saving mangled names in TYPE_NAME.
459 	 Then this condition will become non-trivial.  */
460 
461       if (TREE_CODE (type1) == RECORD_TYPE
462 	  && TYPE_BINFO (type1) && TYPE_BINFO (type2)
463 	  && BINFO_VTABLE (TYPE_BINFO (type1))
464 	  && BINFO_VTABLE (TYPE_BINFO (type2)))
465 	{
466 	  tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
467 	  tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
468 	  gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
469 		      && TREE_CODE (v2) == POINTER_PLUS_EXPR);
470 	  return (operand_equal_p (TREE_OPERAND (v1, 1),
471 				   TREE_OPERAND (v2, 1), 0)
472 		  && DECL_ASSEMBLER_NAME
473 			 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
474 		     == DECL_ASSEMBLER_NAME
475 			 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
476 	}
477       gcc_unreachable ();
478     }
479   return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
480 	  == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
481 }
482 
483 /* Return true if we can decide on ODR equivalency.
484 
485    In non-LTO it is always decide, in LTO however it depends in the type has
486    ODR info attached.
487 
488    When STRICT is false, compare main variants.  */
489 
490 bool
491 types_odr_comparable (tree t1, tree t2, bool strict)
492 {
493   return (!in_lto_p
494 	  || (strict ? (main_odr_variant (t1) == main_odr_variant (t2)
495 			&& main_odr_variant (t1))
496 	      : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
497 	  || (odr_type_p (t1) && odr_type_p (t2))
498 	  || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
499 	      && TYPE_BINFO (t1) && TYPE_BINFO (t2)
500 	      && polymorphic_type_binfo_p (TYPE_BINFO (t1))
501 	      && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
502 }
503 
504 /* Return true if T1 and T2 are ODR equivalent.  If ODR equivalency is not
505    known, be conservative and return false.  */
506 
507 bool
508 types_must_be_same_for_odr (tree t1, tree t2)
509 {
510   if (types_odr_comparable (t1, t2))
511     return types_same_for_odr (t1, t2);
512   else
513     return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
514 }
515 
516 /* If T is compound type, return type it is based on.  */
517 
518 static tree
519 compound_type_base (const_tree t)
520 {
521   if (TREE_CODE (t) == ARRAY_TYPE
522       || POINTER_TYPE_P (t)
523       || TREE_CODE (t) == COMPLEX_TYPE
524       || VECTOR_TYPE_P (t))
525     return TREE_TYPE (t);
526   if (TREE_CODE (t) == METHOD_TYPE)
527     return TYPE_METHOD_BASETYPE (t);
528   if (TREE_CODE (t) == OFFSET_TYPE)
529     return TYPE_OFFSET_BASETYPE (t);
530   return NULL_TREE;
531 }
532 
533 /* Return true if T is either ODR type or compound type based from it.
534    If the function return true, we know that T is a type originating from C++
535    source even at link-time.  */
536 
537 bool
538 odr_or_derived_type_p (const_tree t)
539 {
540   do
541     {
542       if (odr_type_p (t))
543 	return true;
544       /* Function type is a tricky one. Basically we can consider it
545 	 ODR derived if return type or any of the parameters is.
546 	 We need to check all parameters because LTO streaming merges
547 	 common types (such as void) and they are not considered ODR then.  */
548       if (TREE_CODE (t) == FUNCTION_TYPE)
549 	{
550 	  if (TYPE_METHOD_BASETYPE (t))
551 	    t = TYPE_METHOD_BASETYPE (t);
552 	  else
553 	   {
554 	     if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
555 	       return true;
556 	     for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
557 	       if (odr_or_derived_type_p (TREE_VALUE (t)))
558 		 return true;
559 	     return false;
560 	   }
561 	}
562       else
563 	t = compound_type_base (t);
564     }
565   while (t);
566   return t;
567 }
568 
569 /* Compare types T1 and T2 and return true if they are
570    equivalent.  */
571 
572 inline bool
573 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
574 {
575   tree t1 = o1->type;
576 
577   gcc_checking_assert (main_odr_variant (t2) == t2);
578   gcc_checking_assert (main_odr_variant (t1) == t1);
579   if (t1 == t2)
580     return true;
581   if (!in_lto_p)
582     return false;
583   /* Check for anonymous namespaces. Those have !TREE_PUBLIC
584      on the corresponding TYPE_STUB_DECL.  */
585   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
586       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
587     return false;
588   gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
589   gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
590   return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
591 	  == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
592 }
593 
594 /* Compare types T1 and T2 and return true if they are
595    equivalent.  */
596 
597 inline bool
598 odr_vtable_hasher::equal (const odr_type_d *o1, const tree_node *t2)
599 {
600   tree t1 = o1->type;
601 
602   gcc_checking_assert (main_odr_variant (t2) == t2);
603   gcc_checking_assert (main_odr_variant (t1) == t1);
604   gcc_checking_assert (in_lto_p);
605   t1 = TYPE_MAIN_VARIANT (t1);
606   t2 = TYPE_MAIN_VARIANT (t2);
607   if (t1 == t2)
608     return true;
609   tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
610   tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
611   return (operand_equal_p (TREE_OPERAND (v1, 1),
612 			   TREE_OPERAND (v2, 1), 0)
613 	  && DECL_ASSEMBLER_NAME
614 		 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
615 	     == DECL_ASSEMBLER_NAME
616 		 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
617 }
618 
619 /* Free ODR type V.  */
620 
621 inline void
622 odr_name_hasher::remove (odr_type_d *v)
623 {
624   v->bases.release ();
625   v->derived_types.release ();
626   if (v->types_set)
627     delete v->types_set;
628   ggc_free (v);
629 }
630 
631 /* ODR type hash used to look up ODR type based on tree type node.  */
632 
633 typedef hash_table<odr_name_hasher> odr_hash_type;
634 static odr_hash_type *odr_hash;
635 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
636 static odr_vtable_hash_type *odr_vtable_hash;
637 
638 /* ODR types are also stored into ODR_TYPE vector to allow consistent
639    walking.  Bases appear before derived types.  Vector is garbage collected
640    so we won't end up visiting empty types.  */
641 
642 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
643 #define odr_types (*odr_types_ptr)
644 
645 /* Set TYPE_BINFO of TYPE and its variants to BINFO.  */
646 void
647 set_type_binfo (tree type, tree binfo)
648 {
649   for (; type; type = TYPE_NEXT_VARIANT (type))
650     if (COMPLETE_TYPE_P (type))
651       TYPE_BINFO (type) = binfo;
652     else
653       gcc_assert (!TYPE_BINFO (type));
654 }
655 
656 /* Compare T2 and T2 based on name or structure.  */
657 
658 static bool
659 odr_subtypes_equivalent_p (tree t1, tree t2,
660 			   hash_set<type_pair> *visited,
661 			   location_t loc1, location_t loc2)
662 {
663 
664   /* This can happen in incomplete types that should be handled earlier.  */
665   gcc_assert (t1 && t2);
666 
667   t1 = main_odr_variant (t1);
668   t2 = main_odr_variant (t2);
669   if (t1 == t2)
670     return true;
671 
672   /* Anonymous namespace types must match exactly.  */
673   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
674       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
675     return false;
676 
677   /* For ODR types be sure to compare their names.
678      To support -wno-odr-type-merging we allow one type to be non-ODR
679      and other ODR even though it is a violation.  */
680   if (types_odr_comparable (t1, t2, true))
681     {
682       if (!types_same_for_odr (t1, t2, true))
683         return false;
684       /* Limit recursion: If subtypes are ODR types and we know
685          that they are same, be happy.  */
686       if (!odr_type_p (t1) || !get_odr_type (t1, true)->odr_violated)
687         return true;
688     }
689 
690   /* Component types, builtins and possibly violating ODR types
691      have to be compared structurally.  */
692   if (TREE_CODE (t1) != TREE_CODE (t2))
693     return false;
694   if (AGGREGATE_TYPE_P (t1)
695       && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
696     return false;
697 
698   type_pair pair={t1,t2};
699   if (TYPE_UID (t1) > TYPE_UID (t2))
700     {
701       pair.first = t2;
702       pair.second = t1;
703     }
704   if (visited->add (pair))
705     return true;
706   return odr_types_equivalent_p (t1, t2, false, NULL, visited, loc1, loc2);
707 }
708 
709 /* Return true if DECL1 and DECL2 are identical methods.  Consider
710    name equivalent to name.localalias.xyz.  */
711 
712 static bool
713 methods_equal_p (tree decl1, tree decl2)
714 {
715   if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
716     return true;
717   const char sep = symbol_table::symbol_suffix_separator ();
718 
719   const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
720   const char *ptr1 = strchr (name1, sep);
721   int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
722 
723   const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
724   const char *ptr2 = strchr (name2, sep);
725   int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
726 
727   if (len1 != len2)
728     return false;
729   return !strncmp (name1, name2, len1);
730 }
731 
732 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
733    violation warnings.  */
734 
735 void
736 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
737 {
738   int n1, n2;
739 
740   if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
741     {
742       odr_violation_reported = true;
743       if (DECL_VIRTUAL_P (prevailing->decl))
744 	{
745 	  varpool_node *tmp = prevailing;
746 	  prevailing = vtable;
747 	  vtable = tmp;
748 	}
749       if (warning_at (DECL_SOURCE_LOCATION
750 			(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
751 		      OPT_Wodr,
752 		      "virtual table of type %qD violates one definition rule",
753 		      DECL_CONTEXT (vtable->decl)))
754 	inform (DECL_SOURCE_LOCATION (prevailing->decl),
755 		"variable of same assembler name as the virtual table is "
756 		"defined in another translation unit");
757       return;
758     }
759   if (!prevailing->definition || !vtable->definition)
760     return;
761 
762   /* If we do not stream ODR type info, do not bother to do useful compare.  */
763   if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
764       || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
765     return;
766 
767   odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
768 
769   if (class_type->odr_violated)
770     return;
771 
772   for (n1 = 0, n2 = 0; true; n1++, n2++)
773     {
774       struct ipa_ref *ref1, *ref2;
775       bool end1, end2;
776 
777       end1 = !prevailing->iterate_reference (n1, ref1);
778       end2 = !vtable->iterate_reference (n2, ref2);
779 
780       /* !DECL_VIRTUAL_P means RTTI entry;
781 	 We warn when RTTI is lost because non-RTTI previals; we silently
782 	 accept the other case.  */
783       while (!end2
784 	     && (end1
785 	         || (methods_equal_p (ref1->referred->decl,
786 				      ref2->referred->decl)
787 	             && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
788 	     && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
789 	{
790 	  if (!class_type->rtti_broken
791 	      && warning_at (DECL_SOURCE_LOCATION
792 			      (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
793 			     OPT_Wodr,
794 			     "virtual table of type %qD contains RTTI "
795 			     "information",
796 			     DECL_CONTEXT (vtable->decl)))
797 	    {
798 	      inform (DECL_SOURCE_LOCATION
799 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
800 		      "but is prevailed by one without from other translation "
801 		      "unit");
802 	      inform (DECL_SOURCE_LOCATION
803 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
804 		      "RTTI will not work on this type");
805 	      class_type->rtti_broken = true;
806 	    }
807 	  n2++;
808           end2 = !vtable->iterate_reference (n2, ref2);
809 	}
810       while (!end1
811 	     && (end2
812 	         || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
813 	             && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
814 	     && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
815 	{
816 	  n1++;
817           end1 = !prevailing->iterate_reference (n1, ref1);
818 	}
819 
820       /* Finished?  */
821       if (end1 && end2)
822 	{
823 	  /* Extra paranoia; compare the sizes.  We do not have information
824 	     about virtual inheritance offsets, so just be sure that these
825 	     match.
826 	     Do this as very last check so the not very informative error
827 	     is not output too often.  */
828 	  if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
829 	    {
830 	      class_type->odr_violated = true;
831 	      if (warning_at (DECL_SOURCE_LOCATION
832 				(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
833 			      OPT_Wodr,
834 			      "virtual table of type %qD violates "
835 			      "one definition rule  ",
836 			      DECL_CONTEXT (vtable->decl)))
837 		{
838 		  inform (DECL_SOURCE_LOCATION
839 			    (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
840 			  "the conflicting type defined in another translation "
841 			  "unit has virtual table of different size");
842 		}
843 	    }
844 	  return;
845 	}
846 
847       if (!end1 && !end2)
848 	{
849 	  if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
850 	    continue;
851 
852 	  class_type->odr_violated = true;
853 
854 	  /* If the loops above stopped on non-virtual pointer, we have
855 	     mismatch in RTTI information mangling.  */
856 	  if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
857 	      && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
858 	    {
859 	      if (warning_at (DECL_SOURCE_LOCATION
860 				(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
861 			      OPT_Wodr,
862 			      "virtual table of type %qD violates "
863 			      "one definition rule  ",
864 			      DECL_CONTEXT (vtable->decl)))
865 		{
866 		  inform (DECL_SOURCE_LOCATION
867 			    (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
868 			  "the conflicting type defined in another translation "
869 			  "unit with different RTTI information");
870 		}
871 	      return;
872 	    }
873 	  /* At this point both REF1 and REF2 points either to virtual table
874 	     or virtual method.  If one points to virtual table and other to
875 	     method we can complain the same way as if one table was shorter
876 	     than other pointing out the extra method.  */
877 	  if (TREE_CODE (ref1->referred->decl)
878 	      != TREE_CODE (ref2->referred->decl))
879 	    {
880 	      if (VAR_P (ref1->referred->decl))
881 		end1 = true;
882 	      else if (VAR_P (ref2->referred->decl))
883 		end2 = true;
884 	    }
885 	}
886 
887       class_type->odr_violated = true;
888 
889       /* Complain about size mismatch.  Either we have too many virutal
890  	 functions or too many virtual table pointers.  */
891       if (end1 || end2)
892 	{
893 	  if (end1)
894 	    {
895 	      varpool_node *tmp = prevailing;
896 	      prevailing = vtable;
897 	      vtable = tmp;
898 	      ref1 = ref2;
899 	    }
900 	  if (warning_at (DECL_SOURCE_LOCATION
901 			    (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
902 			  OPT_Wodr,
903 			  "virtual table of type %qD violates "
904 			  "one definition rule",
905 			  DECL_CONTEXT (vtable->decl)))
906 	    {
907 	      if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
908 		{
909 		  inform (DECL_SOURCE_LOCATION
910 			   (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
911 			  "the conflicting type defined in another translation "
912 			  "unit");
913 		  inform (DECL_SOURCE_LOCATION
914 			    (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
915 			  "contains additional virtual method %qD",
916 			  ref1->referred->decl);
917 		}
918 	      else
919 		{
920 		  inform (DECL_SOURCE_LOCATION
921 			   (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
922 			  "the conflicting type defined in another translation "
923 			  "unit has virtual table with more entries");
924 		}
925 	    }
926 	  return;
927 	}
928 
929       /* And in the last case we have either mistmatch in between two virtual
930 	 methods or two virtual table pointers.  */
931       if (warning_at (DECL_SOURCE_LOCATION
932 			(TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
933 		      "virtual table of type %qD violates "
934 		      "one definition rule  ",
935 		      DECL_CONTEXT (vtable->decl)))
936 	{
937 	  if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
938 	    {
939 	      inform (DECL_SOURCE_LOCATION
940 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
941 		      "the conflicting type defined in another translation "
942 		      "unit");
943 	      gcc_assert (TREE_CODE (ref2->referred->decl)
944 			  == FUNCTION_DECL);
945 	      inform (DECL_SOURCE_LOCATION
946 			 (ref1->referred->ultimate_alias_target ()->decl),
947 		      "virtual method %qD",
948 		      ref1->referred->ultimate_alias_target ()->decl);
949 	      inform (DECL_SOURCE_LOCATION
950 			 (ref2->referred->ultimate_alias_target ()->decl),
951 		      "ought to match virtual method %qD but does not",
952 		      ref2->referred->ultimate_alias_target ()->decl);
953 	    }
954 	  else
955 	    inform (DECL_SOURCE_LOCATION
956 		      (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
957 		    "the conflicting type defined in another translation "
958 		    "unit has virtual table with different contents");
959 	  return;
960 	}
961     }
962 }
963 
964 /* Output ODR violation warning about T1 and T2 with REASON.
965    Display location of ST1 and ST2 if REASON speaks about field or
966    method of the type.
967    If WARN is false, do nothing. Set WARNED if warning was indeed
968    output.  */
969 
970 void
971 warn_odr (tree t1, tree t2, tree st1, tree st2,
972 	  bool warn, bool *warned, const char *reason)
973 {
974   tree decl2 = TYPE_NAME (t2);
975   if (warned)
976     *warned = false;
977 
978   if (!warn || !TYPE_NAME(t1))
979     return;
980 
981   /* ODR warnings are output druing LTO streaming; we must apply location
982      cache for potential warnings to be output correctly.  */
983   if (lto_location_cache::current_cache)
984     lto_location_cache::current_cache->apply_location_cache ();
985 
986   if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
987 		   "type %qT violates the C++ One Definition Rule",
988 		   t1))
989     return;
990   if (!st1 && !st2)
991     ;
992   /* For FIELD_DECL support also case where one of fields is
993      NULL - this is used when the structures have mismatching number of
994      elements.  */
995   else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
996     {
997       inform (DECL_SOURCE_LOCATION (decl2),
998 	      "a different type is defined in another translation unit");
999       if (!st1)
1000 	{
1001 	  st1 = st2;
1002 	  st2 = NULL;
1003 	}
1004       inform (DECL_SOURCE_LOCATION (st1),
1005 	      "the first difference of corresponding definitions is field %qD",
1006 	      st1);
1007       if (st2)
1008         decl2 = st2;
1009     }
1010   else if (TREE_CODE (st1) == FUNCTION_DECL)
1011     {
1012       inform (DECL_SOURCE_LOCATION (decl2),
1013 	      "a different type is defined in another translation unit");
1014       inform (DECL_SOURCE_LOCATION (st1),
1015 	      "the first difference of corresponding definitions is method %qD",
1016 	      st1);
1017       decl2 = st2;
1018     }
1019   else
1020     return;
1021   inform (DECL_SOURCE_LOCATION (decl2), reason);
1022 
1023   if (warned)
1024     *warned = true;
1025 }
1026 
1027 /* Return ture if T1 and T2 are incompatible and we want to recusively
1028    dive into them from warn_type_mismatch to give sensible answer.  */
1029 
1030 static bool
1031 type_mismatch_p (tree t1, tree t2)
1032 {
1033   if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
1034       && !odr_types_equivalent_p (t1, t2))
1035     return true;
1036   return !types_compatible_p (t1, t2);
1037 }
1038 
1039 
1040 /* Types T1 and T2 was found to be incompatible in a context they can't
1041    (either used to declare a symbol of same assembler name or unified by
1042    ODR rule).  We already output warning about this, but if possible, output
1043    extra information on how the types mismatch.
1044 
1045    This is hard to do in general.  We basically handle the common cases.
1046 
1047    If LOC1 and LOC2 are meaningful locations, use it in the case the types
1048    themselves do no thave one.*/
1049 
1050 void
1051 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
1052 {
1053   /* Location of type is known only if it has TYPE_NAME and the name is
1054      TYPE_DECL.  */
1055   location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1056 		      ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1057 		      : UNKNOWN_LOCATION;
1058   location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1059 		      ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1060 		      : UNKNOWN_LOCATION;
1061   bool loc_t2_useful = false;
1062 
1063   /* With LTO it is a common case that the location of both types match.
1064      See if T2 has a location that is different from T1. If so, we will
1065      inform user about the location.
1066      Do not consider the location passed to us in LOC1/LOC2 as those are
1067      already output.  */
1068   if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1069     {
1070       if (loc_t1 <= BUILTINS_LOCATION)
1071 	loc_t2_useful = true;
1072       else
1073 	{
1074 	  expanded_location xloc1 = expand_location (loc_t1);
1075 	  expanded_location xloc2 = expand_location (loc_t2);
1076 
1077 	  if (strcmp (xloc1.file, xloc2.file)
1078 	      || xloc1.line != xloc2.line
1079 	      || xloc1.column != xloc2.column)
1080 	    loc_t2_useful = true;
1081 	}
1082     }
1083 
1084   if (loc_t1 <= BUILTINS_LOCATION)
1085     loc_t1 = loc1;
1086   if (loc_t2 <= BUILTINS_LOCATION)
1087     loc_t2 = loc2;
1088 
1089   location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1090 
1091   /* It is a quite common bug to reference anonymous namespace type in
1092      non-anonymous namespace class.  */
1093   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1094       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1095     {
1096       if (type_with_linkage_p (t1) && !type_in_anonymous_namespace_p (t1))
1097 	{
1098 	  std::swap (t1, t2);
1099 	  std::swap (loc_t1, loc_t2);
1100 	}
1101       gcc_assert (TYPE_NAME (t1) && TYPE_NAME (t2)
1102 		  && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1103 		  && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL);
1104       /* Most of the time, the type names will match, do not be unnecesarily
1105          verbose.  */
1106       if (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t1)))
1107 	  != IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t2))))
1108         inform (loc_t1,
1109 	        "type %qT defined in anonymous namespace can not match "
1110 	        "type %qT across the translation unit boundary",
1111 	        t1, t2);
1112       else
1113         inform (loc_t1,
1114 	        "type %qT defined in anonymous namespace can not match "
1115 	        "across the translation unit boundary",
1116 	        t1);
1117       if (loc_t2_useful)
1118         inform (loc_t2,
1119 	        "the incompatible type defined in another translation unit");
1120       return;
1121     }
1122   /* If types have mangled ODR names and they are different, it is most
1123      informative to output those.
1124      This also covers types defined in different namespaces.  */
1125   if (TYPE_NAME (t1) && TYPE_NAME (t2)
1126       && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1127       && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1128       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
1129       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
1130       && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
1131 	 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
1132     {
1133       char *name1 = xstrdup (cplus_demangle
1134 	 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1135 	  DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1136       char *name2 = cplus_demangle
1137 	 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1138 	  DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1139       if (name1 && name2 && strcmp (name1, name2))
1140 	{
1141 	  inform (loc_t1,
1142 		  "type name %qs should match type name %qs",
1143 		  name1, name2);
1144 	  if (loc_t2_useful)
1145 	    inform (loc_t2,
1146 		    "the incompatible type is defined here");
1147 	  free (name1);
1148 	  return;
1149 	}
1150       free (name1);
1151     }
1152   /* A tricky case are compound types.  Often they appear the same in source
1153      code and the mismatch is dragged in by type they are build from.
1154      Look for those differences in subtypes and try to be informative.  In other
1155      cases just output nothing because the source code is probably different
1156      and in this case we already output a all necessary info.  */
1157   if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1158     {
1159       if (TREE_CODE (t1) == TREE_CODE (t2))
1160 	{
1161 	  if (TREE_CODE (t1) == ARRAY_TYPE
1162 	      && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1163 	    {
1164 	      tree i1 = TYPE_DOMAIN (t1);
1165 	      tree i2 = TYPE_DOMAIN (t2);
1166 
1167 	      if (i1 && i2
1168 		  && TYPE_MAX_VALUE (i1)
1169 		  && TYPE_MAX_VALUE (i2)
1170 		  && !operand_equal_p (TYPE_MAX_VALUE (i1),
1171 				       TYPE_MAX_VALUE (i2), 0))
1172 		{
1173 		  inform (loc,
1174 			  "array types have different bounds");
1175 		  return;
1176 		}
1177 	    }
1178 	  if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1179 	      && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1180 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1181 	  else if (TREE_CODE (t1) == METHOD_TYPE
1182 		   || TREE_CODE (t1) == FUNCTION_TYPE)
1183 	    {
1184 	      tree parms1 = NULL, parms2 = NULL;
1185 	      int count = 1;
1186 
1187 	      if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1188 		{
1189 		  inform (loc, "return value type mismatch");
1190 		  warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1191 				       loc_t2);
1192 		  return;
1193 		}
1194 	      if (prototype_p (t1) && prototype_p (t2))
1195 		for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1196 		     parms1 && parms2;
1197 		     parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1198 		     count++)
1199 		  {
1200 		    if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1201 		      {
1202 			if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1203 			  inform (loc,
1204 				  "implicit this pointer type mismatch");
1205 			else
1206 			  inform (loc,
1207 				  "type mismatch in parameter %i",
1208 				  count - (TREE_CODE (t1) == METHOD_TYPE));
1209 			warn_types_mismatch (TREE_VALUE (parms1),
1210 					     TREE_VALUE (parms2),
1211 					     loc_t1, loc_t2);
1212 			return;
1213 		      }
1214 		  }
1215 	      if (parms1 || parms2)
1216 		{
1217 		  inform (loc,
1218 			  "types have different parameter counts");
1219 		  return;
1220 		}
1221 	    }
1222 	}
1223       return;
1224     }
1225 
1226   if (types_odr_comparable (t1, t2, true)
1227       && types_same_for_odr (t1, t2, true))
1228     inform (loc_t1,
1229 	    "type %qT itself violates the C++ One Definition Rule", t1);
1230   /* Prevent pointless warnings like "struct aa" should match "struct aa".  */
1231   else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1232 	   && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1233     return;
1234   else
1235     inform (loc_t1, "type %qT should match type %qT",
1236 	    t1, t2);
1237   if (loc_t2_useful)
1238     inform (loc_t2, "the incompatible type is defined here");
1239 }
1240 
1241 /* Compare T1 and T2, report ODR violations if WARN is true and set
1242    WARNED to true if anything is reported.  Return true if types match.
1243    If true is returned, the types are also compatible in the sense of
1244    gimple_canonical_types_compatible_p.
1245    If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1246    about the type if the type itself do not have location.  */
1247 
1248 static bool
1249 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1250 			hash_set<type_pair> *visited,
1251 			location_t loc1, location_t loc2)
1252 {
1253   /* Check first for the obvious case of pointer identity.  */
1254   if (t1 == t2)
1255     return true;
1256   gcc_assert (!type_with_linkage_p (t1) || !type_in_anonymous_namespace_p (t1));
1257   gcc_assert (!type_with_linkage_p (t2) || !type_in_anonymous_namespace_p (t2));
1258 
1259   /* Can't be the same type if the types don't have the same code.  */
1260   if (TREE_CODE (t1) != TREE_CODE (t2))
1261     {
1262       warn_odr (t1, t2, NULL, NULL, warn, warned,
1263 	        G_("a different type is defined in another translation unit"));
1264       return false;
1265     }
1266 
1267   if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1268     {
1269       warn_odr (t1, t2, NULL, NULL, warn, warned,
1270 	        G_("a type with different qualifiers is defined in another "
1271 		   "translation unit"));
1272       return false;
1273     }
1274 
1275   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1276       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1277     {
1278       /* We can not trip this when comparing ODR types, only when trying to
1279 	 match different ODR derivations from different declarations.
1280 	 So WARN should be always false.  */
1281       gcc_assert (!warn);
1282       return false;
1283     }
1284 
1285   if (comp_type_attributes (t1, t2) != 1)
1286     {
1287       warn_odr (t1, t2, NULL, NULL, warn, warned,
1288 	        G_("a type with different attributes "
1289 		   "is defined in another translation unit"));
1290       return false;
1291     }
1292 
1293   if (TREE_CODE (t1) == ENUMERAL_TYPE
1294       && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1295     {
1296       tree v1, v2;
1297       for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1298 	   v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1299 	{
1300 	  if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1301 	    {
1302 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1303 			G_("an enum with different value name"
1304 			   " is defined in another translation unit"));
1305 	      return false;
1306 	    }
1307 	  if (TREE_VALUE (v1) != TREE_VALUE (v2)
1308 	      && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1309 				   DECL_INITIAL (TREE_VALUE (v2)), 0))
1310 	    {
1311 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1312 			G_("an enum with different values is defined"
1313 			   " in another translation unit"));
1314 	      return false;
1315 	    }
1316 	}
1317       if (v1 || v2)
1318 	{
1319 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1320 		    G_("an enum with mismatching number of values "
1321 		       "is defined in another translation unit"));
1322 	  return false;
1323 	}
1324     }
1325 
1326   /* Non-aggregate types can be handled cheaply.  */
1327   if (INTEGRAL_TYPE_P (t1)
1328       || SCALAR_FLOAT_TYPE_P (t1)
1329       || FIXED_POINT_TYPE_P (t1)
1330       || TREE_CODE (t1) == VECTOR_TYPE
1331       || TREE_CODE (t1) == COMPLEX_TYPE
1332       || TREE_CODE (t1) == OFFSET_TYPE
1333       || POINTER_TYPE_P (t1))
1334     {
1335       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1336 	{
1337 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1338 		    G_("a type with different precision is defined "
1339 		       "in another translation unit"));
1340 	  return false;
1341 	}
1342       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1343 	{
1344 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1345 		    G_("a type with different signedness is defined "
1346 		       "in another translation unit"));
1347 	  return false;
1348 	}
1349 
1350       if (TREE_CODE (t1) == INTEGER_TYPE
1351 	  && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1352 	{
1353 	  /* char WRT uint_8?  */
1354 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1355 		    G_("a different type is defined in another "
1356 		       "translation unit"));
1357 	  return false;
1358 	}
1359 
1360       /* For canonical type comparisons we do not want to build SCCs
1361 	 so we cannot compare pointed-to types.  But we can, for now,
1362 	 require the same pointed-to type kind and match what
1363 	 useless_type_conversion_p would do.  */
1364       if (POINTER_TYPE_P (t1))
1365 	{
1366 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1367 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1368 	    {
1369 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1370 			G_("it is defined as a pointer in different address "
1371 			   "space in another translation unit"));
1372 	      return false;
1373 	    }
1374 
1375 	  if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1376 					  visited, loc1, loc2))
1377 	    {
1378 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1379 			G_("it is defined as a pointer to different type "
1380 			   "in another translation unit"));
1381 	      if (warn && warned)
1382 	        warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1383 				     loc1, loc2);
1384 	      return false;
1385 	    }
1386 	}
1387 
1388       if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1389 	  && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1390 					 visited, loc1, loc2))
1391 	{
1392 	  /* Probably specific enough.  */
1393 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1394 		    G_("a different type is defined "
1395 		       "in another translation unit"));
1396 	  if (warn && warned)
1397 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1398 	  return false;
1399 	}
1400     }
1401   /* Do type-specific comparisons.  */
1402   else switch (TREE_CODE (t1))
1403     {
1404     case ARRAY_TYPE:
1405       {
1406 	/* Array types are the same if the element types are the same and
1407 	   the number of elements are the same.  */
1408 	if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1409 					visited, loc1, loc2))
1410 	  {
1411 	    warn_odr (t1, t2, NULL, NULL, warn, warned,
1412 		      G_("a different type is defined in another "
1413 			 "translation unit"));
1414 	    if (warn && warned)
1415 	      warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1416 	  }
1417 	gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1418 	gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1419 		    == TYPE_NONALIASED_COMPONENT (t2));
1420 
1421 	tree i1 = TYPE_DOMAIN (t1);
1422 	tree i2 = TYPE_DOMAIN (t2);
1423 
1424 	/* For an incomplete external array, the type domain can be
1425 	   NULL_TREE.  Check this condition also.  */
1426 	if (i1 == NULL_TREE || i2 == NULL_TREE)
1427 	  return true;
1428 
1429 	tree min1 = TYPE_MIN_VALUE (i1);
1430 	tree min2 = TYPE_MIN_VALUE (i2);
1431 	tree max1 = TYPE_MAX_VALUE (i1);
1432 	tree max2 = TYPE_MAX_VALUE (i2);
1433 
1434 	/* In C++, minimums should be always 0.  */
1435 	gcc_assert (min1 == min2);
1436 	if (!operand_equal_p (max1, max2, 0))
1437 	  {
1438 	    warn_odr (t1, t2, NULL, NULL, warn, warned,
1439 		      G_("an array of different size is defined "
1440 			 "in another translation unit"));
1441 	    return false;
1442 	  }
1443       }
1444     break;
1445 
1446     case METHOD_TYPE:
1447     case FUNCTION_TYPE:
1448       /* Function types are the same if the return type and arguments types
1449 	 are the same.  */
1450       if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1451 				      visited, loc1, loc2))
1452 	{
1453 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1454 		    G_("has different return value "
1455 		       "in another translation unit"));
1456 	  if (warn && warned)
1457 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1458 	  return false;
1459 	}
1460 
1461       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1462 	  || !prototype_p (t1) || !prototype_p (t2))
1463 	return true;
1464       else
1465 	{
1466 	  tree parms1, parms2;
1467 
1468 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1469 	       parms1 && parms2;
1470 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1471 	    {
1472 	      if (!odr_subtypes_equivalent_p
1473 		     (TREE_VALUE (parms1), TREE_VALUE (parms2), visited,
1474 		      loc1, loc2))
1475 		{
1476 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1477 			    G_("has different parameters in another "
1478 			       "translation unit"));
1479 		  if (warn && warned)
1480 		    warn_types_mismatch (TREE_VALUE (parms1),
1481 					 TREE_VALUE (parms2), loc1, loc2);
1482 		  return false;
1483 		}
1484 	    }
1485 
1486 	  if (parms1 || parms2)
1487 	    {
1488 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1489 			G_("has different parameters "
1490 			   "in another translation unit"));
1491 	      return false;
1492 	    }
1493 
1494 	  return true;
1495 	}
1496 
1497     case RECORD_TYPE:
1498     case UNION_TYPE:
1499     case QUAL_UNION_TYPE:
1500       {
1501 	tree f1, f2;
1502 
1503 	/* For aggregate types, all the fields must be the same.  */
1504 	if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1505 	  {
1506 	    if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1507 	        && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1508 		   != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1509 	      {
1510 		if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1511 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1512 			    G_("a type defined in another translation unit "
1513 			       "is not polymorphic"));
1514 		else
1515 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1516 			    G_("a type defined in another translation unit "
1517 			       "is polymorphic"));
1518 		return false;
1519 	      }
1520 	    for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1521 		 f1 || f2;
1522 		 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1523 	      {
1524 		/* Skip non-fields.  */
1525 		while (f1 && TREE_CODE (f1) != FIELD_DECL)
1526 		  f1 = TREE_CHAIN (f1);
1527 		while (f2 && TREE_CODE (f2) != FIELD_DECL)
1528 		  f2 = TREE_CHAIN (f2);
1529 		if (!f1 || !f2)
1530 		  break;
1531 		if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1532 		  {
1533 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1534 			      G_("a type with different virtual table pointers"
1535 			         " is defined in another translation unit"));
1536 		    return false;
1537 		  }
1538 		if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1539 		  {
1540 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1541 			      G_("a type with different bases is defined "
1542 				 "in another translation unit"));
1543 		    return false;
1544 		  }
1545 		if (DECL_NAME (f1) != DECL_NAME (f2)
1546 		    && !DECL_ARTIFICIAL (f1))
1547 		  {
1548 		    warn_odr (t1, t2, f1, f2, warn, warned,
1549 			      G_("a field with different name is defined "
1550 				 "in another translation unit"));
1551 		    return false;
1552 		  }
1553 		if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1554 						TREE_TYPE (f2), visited,
1555 						loc1, loc2))
1556 		  {
1557 		    /* Do not warn about artificial fields and just go into
1558  		       generic field mismatch warning.  */
1559 		    if (DECL_ARTIFICIAL (f1))
1560 		      break;
1561 
1562 		    warn_odr (t1, t2, f1, f2, warn, warned,
1563 			      G_("a field of same name but different type "
1564 				 "is defined in another translation unit"));
1565 		    if (warn && warned)
1566 		      warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1567 		    return false;
1568 		  }
1569 		if (!gimple_compare_field_offset (f1, f2))
1570 		  {
1571 		    /* Do not warn about artificial fields and just go into
1572 		       generic field mismatch warning.  */
1573 		    if (DECL_ARTIFICIAL (f1))
1574 		      break;
1575 		    warn_odr (t1, t2, f1, f2, warn, warned,
1576 			      G_("fields have different layout "
1577 				 "in another translation unit"));
1578 		    return false;
1579 		  }
1580 		if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1581 		  {
1582 		    warn_odr (t1, t2, f1, f2, warn, warned,
1583 			      G_("one field is bitfield while other is not"));
1584 		    return false;
1585 		  }
1586 		else
1587 		  gcc_assert (DECL_NONADDRESSABLE_P (f1)
1588 			      == DECL_NONADDRESSABLE_P (f2));
1589 	      }
1590 
1591 	    /* If one aggregate has more fields than the other, they
1592 	       are not the same.  */
1593 	    if (f1 || f2)
1594 	      {
1595 		if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1596 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1597 			    G_("a type with different virtual table pointers"
1598 			       " is defined in another translation unit"));
1599 		else if ((f1 && DECL_ARTIFICIAL (f1))
1600 		         || (f2 && DECL_ARTIFICIAL (f2)))
1601 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1602 			    G_("a type with different bases is defined "
1603 			       "in another translation unit"));
1604 		else
1605 		  warn_odr (t1, t2, f1, f2, warn, warned,
1606 			    G_("a type with different number of fields "
1607 			       "is defined in another translation unit"));
1608 
1609 		return false;
1610 	      }
1611 	    if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1612 		&& COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t1))
1613 		&& COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t2))
1614 		&& odr_type_p (TYPE_MAIN_VARIANT (t1))
1615 		&& odr_type_p (TYPE_MAIN_VARIANT (t2))
1616 		&& (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1617 		    != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1618 	      {
1619 		/* Currently free_lang_data sets TYPE_METHODS to error_mark_node
1620 		   if it is non-NULL so this loop will never realy execute.  */
1621 		if (TYPE_METHODS (TYPE_MAIN_VARIANT (t1)) != error_mark_node
1622 		    && TYPE_METHODS (TYPE_MAIN_VARIANT (t2)) != error_mark_node)
1623 		  for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1624 		       f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1625 		       f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1626 		    {
1627 		      if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1628 			{
1629 			  warn_odr (t1, t2, f1, f2, warn, warned,
1630 				    G_("a different method of same type "
1631 				       "is defined in another "
1632 				       "translation unit"));
1633 			  return false;
1634 			}
1635 		      if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1636 			{
1637 			  warn_odr (t1, t2, f1, f2, warn, warned,
1638 				    G_("a definition that differs by virtual "
1639 				       "keyword in another translation unit"));
1640 			  return false;
1641 			}
1642 		      if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1643 			{
1644 			  warn_odr (t1, t2, f1, f2, warn, warned,
1645 				    G_("virtual table layout differs "
1646 				       "in another translation unit"));
1647 			  return false;
1648 			}
1649 		      if (odr_subtypes_equivalent_p (TREE_TYPE (f1),
1650 						     TREE_TYPE (f2), visited,
1651 						     loc1, loc2))
1652 			{
1653 			  warn_odr (t1, t2, f1, f2, warn, warned,
1654 				    G_("method with incompatible type is "
1655 				       "defined in another translation unit"));
1656 			  return false;
1657 			}
1658 		    }
1659 		if ((f1 == NULL) != (f2 == NULL))
1660 		  {
1661 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1662 			      G_("a type with different number of methods "
1663 				 "is defined in another translation unit"));
1664 		    return false;
1665 		  }
1666 	      }
1667 	  }
1668 	break;
1669       }
1670     case VOID_TYPE:
1671     case NULLPTR_TYPE:
1672       break;
1673 
1674     default:
1675       debug_tree (t1);
1676       gcc_unreachable ();
1677     }
1678 
1679   /* Those are better to come last as they are utterly uninformative.  */
1680   if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1681       && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1682     {
1683       warn_odr (t1, t2, NULL, NULL, warn, warned,
1684 		G_("a type with different size "
1685 		   "is defined in another translation unit"));
1686       return false;
1687     }
1688   if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1689       && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1690     {
1691       warn_odr (t1, t2, NULL, NULL, warn, warned,
1692 		G_("a type with different alignment "
1693 		   "is defined in another translation unit"));
1694       return false;
1695     }
1696   gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1697 	      || operand_equal_p (TYPE_SIZE_UNIT (t1),
1698 				  TYPE_SIZE_UNIT (t2), 0));
1699   return true;
1700 }
1701 
1702 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule.  */
1703 
1704 bool
1705 odr_types_equivalent_p (tree type1, tree type2)
1706 {
1707   gcc_checking_assert (odr_or_derived_type_p (type1)
1708 		       && odr_or_derived_type_p (type2));
1709 
1710   hash_set<type_pair> visited;
1711   return odr_types_equivalent_p (type1, type2, false, NULL,
1712 			         &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1713 }
1714 
1715 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1716    from VAL->type.  This may happen in LTO where tree merging did not merge
1717    all variants of the same type or due to ODR violation.
1718 
1719    Analyze and report ODR violations and add type to duplicate list.
1720    If TYPE is more specified than VAL->type, prevail VAL->type.  Also if
1721    this is first time we see definition of a class return true so the
1722    base types are analyzed.  */
1723 
1724 static bool
1725 add_type_duplicate (odr_type val, tree type)
1726 {
1727   bool build_bases = false;
1728   bool prevail = false;
1729   bool odr_must_violate = false;
1730 
1731   if (!val->types_set)
1732     val->types_set = new hash_set<tree>;
1733 
1734   /* Chose polymorphic type as leader (this happens only in case of ODR
1735      violations.  */
1736   if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1737        && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1738       && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1739           || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1740     {
1741       prevail = true;
1742       build_bases = true;
1743     }
1744   /* Always prefer complete type to be the leader.  */
1745   else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1746     {
1747       prevail = true;
1748       build_bases = TYPE_BINFO (type);
1749     }
1750   else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1751     ;
1752   else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1753 	   && TREE_CODE (type) == ENUMERAL_TYPE
1754 	   && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1755     prevail = true;
1756   else if (TREE_CODE (val->type) == RECORD_TYPE
1757 	   && TREE_CODE (type) == RECORD_TYPE
1758 	   && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1759     {
1760       gcc_assert (!val->bases.length ());
1761       build_bases = true;
1762       prevail = true;
1763     }
1764 
1765   if (prevail)
1766     std::swap (val->type, type);
1767 
1768   val->types_set->add (type);
1769 
1770   /* If we now have a mangled name, be sure to record it to val->type
1771      so ODR hash can work.  */
1772 
1773   if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1774     SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1775 			     DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1776 
1777   bool merge = true;
1778   bool base_mismatch = false;
1779   unsigned int i;
1780   bool warned = false;
1781   hash_set<type_pair> visited;
1782 
1783   gcc_assert (in_lto_p);
1784   vec_safe_push (val->types, type);
1785 
1786   /* If both are class types, compare the bases.  */
1787   if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1788       && TREE_CODE (val->type) == RECORD_TYPE
1789       && TREE_CODE (type) == RECORD_TYPE
1790       && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1791     {
1792       if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1793 	  != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1794 	{
1795 	  if (!flag_ltrans && !warned && !val->odr_violated)
1796 	    {
1797 	      tree extra_base;
1798 	      warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1799 			"a type with the same name but different "
1800 			"number of polymorphic bases is "
1801 			"defined in another translation unit");
1802 	      if (warned)
1803 		{
1804 		  if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1805 		      > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1806 		    extra_base = BINFO_BASE_BINFO
1807 				 (TYPE_BINFO (type),
1808 				  BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1809 		  else
1810 		    extra_base = BINFO_BASE_BINFO
1811 				 (TYPE_BINFO (val->type),
1812 				  BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1813 		  tree extra_base_type = BINFO_TYPE (extra_base);
1814 		  inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1815 			  "the extra base is defined here");
1816 		}
1817 	    }
1818 	  base_mismatch = true;
1819 	}
1820       else
1821 	for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1822 	  {
1823 	    tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1824 	    tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1825 	    tree type1 = BINFO_TYPE (base1);
1826 	    tree type2 = BINFO_TYPE (base2);
1827 
1828 	    if (types_odr_comparable (type1, type2))
1829 	      {
1830 		if (!types_same_for_odr (type1, type2))
1831 		  base_mismatch = true;
1832 	      }
1833 	    else
1834 	      if (!odr_types_equivalent_p (type1, type2))
1835 		base_mismatch = true;
1836 	    if (base_mismatch)
1837 	      {
1838 		if (!warned && !val->odr_violated)
1839 		  {
1840 		    warn_odr (type, val->type, NULL, NULL,
1841 			      !warned, &warned,
1842 			      "a type with the same name but different base "
1843 			      "type is defined in another translation unit");
1844 		    if (warned)
1845 		      warn_types_mismatch (type1, type2,
1846 					    UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1847 		  }
1848 		break;
1849 	      }
1850 	    if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1851 	      {
1852 		base_mismatch = true;
1853 		if (!warned && !val->odr_violated)
1854 		  warn_odr (type, val->type, NULL, NULL,
1855 			    !warned, &warned,
1856 			    "a type with the same name but different base "
1857 			    "layout is defined in another translation unit");
1858 		break;
1859 	      }
1860 	    /* One of bases is not of complete type.  */
1861 	    if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1862 	      {
1863 		/* If we have a polymorphic type info specified for TYPE1
1864 		   but not for TYPE2 we possibly missed a base when recording
1865 		   VAL->type earlier.
1866 		   Be sure this does not happen.  */
1867 		if (TYPE_BINFO (type1)
1868 		    && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1869 		    && !build_bases)
1870 		  odr_must_violate = true;
1871 	        break;
1872 	      }
1873 	    /* One base is polymorphic and the other not.
1874 	       This ought to be diagnosed earlier, but do not ICE in the
1875 	       checking bellow.  */
1876 	    else if (TYPE_BINFO (type1)
1877 		     && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1878 		        != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1879 	      {
1880 		if (!warned && !val->odr_violated)
1881 		  warn_odr (type, val->type, NULL, NULL,
1882 			    !warned, &warned,
1883 			    "a base of the type is polymorphic only in one "
1884 			    "translation unit");
1885 		base_mismatch = true;
1886 		break;
1887 	      }
1888 	  }
1889       if (base_mismatch)
1890 	{
1891 	  merge = false;
1892 	  odr_violation_reported = true;
1893 	  val->odr_violated = true;
1894 
1895 	  if (symtab->dump_file)
1896 	    {
1897 	      fprintf (symtab->dump_file, "ODR base violation\n");
1898 
1899 	      print_node (symtab->dump_file, "", val->type, 0);
1900 	      putc ('\n',symtab->dump_file);
1901 	      print_node (symtab->dump_file, "", type, 0);
1902 	      putc ('\n',symtab->dump_file);
1903 	    }
1904 	}
1905     }
1906 
1907   /* Next compare memory layout.  */
1908   if (!odr_types_equivalent_p (val->type, type,
1909 			       !flag_ltrans && !val->odr_violated && !warned,
1910 			       &warned, &visited,
1911 			       DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1912 			       DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1913     {
1914       merge = false;
1915       odr_violation_reported = true;
1916       val->odr_violated = true;
1917     }
1918   gcc_assert (val->odr_violated || !odr_must_violate);
1919   /* Sanity check that all bases will be build same way again.  */
1920   if (flag_checking
1921       && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1922       && TREE_CODE (val->type) == RECORD_TYPE
1923       && TREE_CODE (type) == RECORD_TYPE
1924       && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1925       && !val->odr_violated
1926       && !base_mismatch && val->bases.length ())
1927     {
1928       unsigned int num_poly_bases = 0;
1929       unsigned int j;
1930 
1931       for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1932 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1933 					 (TYPE_BINFO (type), i)))
1934 	  num_poly_bases++;
1935       gcc_assert (num_poly_bases == val->bases.length ());
1936       for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1937 	   i++)
1938 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1939 				       (TYPE_BINFO (type), i)))
1940 	  {
1941 	    odr_type base = get_odr_type
1942 			       (BINFO_TYPE
1943 				  (BINFO_BASE_BINFO (TYPE_BINFO (type),
1944 						     i)),
1945 				true);
1946 	    gcc_assert (val->bases[j] == base);
1947 	    j++;
1948 	  }
1949     }
1950 
1951 
1952   /* Regularize things a little.  During LTO same types may come with
1953      different BINFOs.  Either because their virtual table was
1954      not merged by tree merging and only later at decl merging or
1955      because one type comes with external vtable, while other
1956      with internal.  We want to merge equivalent binfos to conserve
1957      memory and streaming overhead.
1958 
1959      The external vtables are more harmful: they contain references
1960      to external declarations of methods that may be defined in the
1961      merged LTO unit.  For this reason we absolutely need to remove
1962      them and replace by internal variants. Not doing so will lead
1963      to incomplete answers from possible_polymorphic_call_targets.
1964 
1965      FIXME: disable for now; because ODR types are now build during
1966      streaming in, the variants do not need to be linked to the type,
1967      yet.  We need to do the merging in cleanup pass to be implemented
1968      soon.  */
1969   if (!flag_ltrans && merge
1970       && 0
1971       && TREE_CODE (val->type) == RECORD_TYPE
1972       && TREE_CODE (type) == RECORD_TYPE
1973       && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1974       && TYPE_MAIN_VARIANT (type) == type
1975       && TYPE_MAIN_VARIANT (val->type) == val->type
1976       && BINFO_VTABLE (TYPE_BINFO (val->type))
1977       && BINFO_VTABLE (TYPE_BINFO (type)))
1978     {
1979       tree master_binfo = TYPE_BINFO (val->type);
1980       tree v1 = BINFO_VTABLE (master_binfo);
1981       tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1982 
1983       if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1984 	{
1985 	  gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1986 		      && operand_equal_p (TREE_OPERAND (v1, 1),
1987 					  TREE_OPERAND (v2, 1), 0));
1988 	  v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1989 	  v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1990 	}
1991       gcc_assert (DECL_ASSEMBLER_NAME (v1)
1992 		  == DECL_ASSEMBLER_NAME (v2));
1993 
1994       if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1995 	{
1996 	  unsigned int i;
1997 
1998 	  set_type_binfo (val->type, TYPE_BINFO (type));
1999 	  for (i = 0; i < val->types->length (); i++)
2000 	    {
2001 	      if (TYPE_BINFO ((*val->types)[i])
2002 		  == master_binfo)
2003 		set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
2004 	    }
2005 	  BINFO_TYPE (TYPE_BINFO (type)) = val->type;
2006 	}
2007       else
2008 	set_type_binfo (type, master_binfo);
2009     }
2010   return build_bases;
2011 }
2012 
2013 /* Get ODR type hash entry for TYPE.  If INSERT is true, create
2014    possibly new entry.  */
2015 
2016 odr_type
2017 get_odr_type (tree type, bool insert)
2018 {
2019   odr_type_d **slot = NULL;
2020   odr_type_d **vtable_slot = NULL;
2021   odr_type val = NULL;
2022   hashval_t hash;
2023   bool build_bases = false;
2024   bool insert_to_odr_array = false;
2025   int base_id = -1;
2026 
2027   type = main_odr_variant (type);
2028 
2029   gcc_checking_assert (can_be_name_hashed_p (type)
2030 		       || can_be_vtable_hashed_p (type));
2031 
2032   /* Lookup entry, first try name hash, fallback to vtable hash.  */
2033   if (can_be_name_hashed_p (type))
2034     {
2035       hash = hash_odr_name (type);
2036       slot = odr_hash->find_slot_with_hash (type, hash,
2037 					    insert ? INSERT : NO_INSERT);
2038     }
2039   if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
2040     {
2041       hash = hash_odr_vtable (type);
2042       vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2043 					           insert ? INSERT : NO_INSERT);
2044     }
2045 
2046   if (!slot && !vtable_slot)
2047     return NULL;
2048 
2049   /* See if we already have entry for type.  */
2050   if ((slot && *slot) || (vtable_slot && *vtable_slot))
2051     {
2052       if (slot && *slot)
2053 	{
2054 	  val = *slot;
2055 	  if (flag_checking
2056 	      && in_lto_p && can_be_vtable_hashed_p (type))
2057 	    {
2058 	      hash = hash_odr_vtable (type);
2059 	      vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2060 						                  NO_INSERT);
2061 	      gcc_assert (!vtable_slot || *vtable_slot == *slot);
2062 	      vtable_slot = NULL;
2063 	    }
2064 	}
2065       else if (*vtable_slot)
2066 	val = *vtable_slot;
2067 
2068       if (val->type != type
2069 	  && (!val->types_set || !val->types_set->add (type)))
2070 	{
2071 	  gcc_assert (insert);
2072 	  /* We have type duplicate, but it may introduce vtable name or
2073  	     mangled name; be sure to keep hashes in sync.  */
2074 	  if (in_lto_p && can_be_vtable_hashed_p (type)
2075 	      && (!vtable_slot || !*vtable_slot))
2076 	    {
2077 	      if (!vtable_slot)
2078 		{
2079 		  hash = hash_odr_vtable (type);
2080 		  vtable_slot = odr_vtable_hash->find_slot_with_hash
2081 			     (type, hash, INSERT);
2082 		  gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
2083 		}
2084 	      *vtable_slot = val;
2085 	    }
2086 	  if (slot && !*slot)
2087 	    *slot = val;
2088 	  build_bases = add_type_duplicate (val, type);
2089 	}
2090     }
2091   else
2092     {
2093       val = ggc_cleared_alloc<odr_type_d> ();
2094       val->type = type;
2095       val->bases = vNULL;
2096       val->derived_types = vNULL;
2097       if (type_with_linkage_p (type))
2098         val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2099       else
2100 	val->anonymous_namespace = 0;
2101       build_bases = COMPLETE_TYPE_P (val->type);
2102       insert_to_odr_array = true;
2103       if (slot)
2104         *slot = val;
2105       if (vtable_slot)
2106 	*vtable_slot = val;
2107     }
2108 
2109   if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
2110       && type_with_linkage_p (type)
2111       && type == TYPE_MAIN_VARIANT (type))
2112     {
2113       tree binfo = TYPE_BINFO (type);
2114       unsigned int i;
2115 
2116       gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
2117 
2118       val->all_derivations_known = type_all_derivations_known_p (type);
2119       for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2120 	/* For now record only polymorphic types. other are
2121 	   pointless for devirtualization and we can not precisely
2122 	   determine ODR equivalency of these during LTO.  */
2123 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2124 	  {
2125 	    tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2126 	    odr_type base = get_odr_type (base_type, true);
2127 	    gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2128 	    base->derived_types.safe_push (val);
2129 	    val->bases.safe_push (base);
2130 	    if (base->id > base_id)
2131 	      base_id = base->id;
2132 	  }
2133       }
2134   /* Ensure that type always appears after bases.  */
2135   if (insert_to_odr_array)
2136     {
2137       if (odr_types_ptr)
2138         val->id = odr_types.length ();
2139       vec_safe_push (odr_types_ptr, val);
2140     }
2141   else if (base_id > val->id)
2142     {
2143       odr_types[val->id] = 0;
2144       /* Be sure we did not recorded any derived types; these may need
2145 	 renumbering too.  */
2146       gcc_assert (val->derived_types.length() == 0);
2147       val->id = odr_types.length ();
2148       vec_safe_push (odr_types_ptr, val);
2149     }
2150   return val;
2151 }
2152 
2153 /* Add TYPE od ODR type hash.  */
2154 
2155 void
2156 register_odr_type (tree type)
2157 {
2158   if (!odr_hash)
2159     {
2160       odr_hash = new odr_hash_type (23);
2161       if (in_lto_p)
2162         odr_vtable_hash = new odr_vtable_hash_type (23);
2163     }
2164   /* Arrange things to be nicer and insert main variants first.
2165      ??? fundamental prerecorded types do not have mangled names; this
2166      makes it possible that non-ODR type is main_odr_variant of ODR type.
2167      Things may get smoother if LTO FE set mangled name of those types same
2168      way as C++ FE does.  */
2169   if (odr_type_p (main_odr_variant (TYPE_MAIN_VARIANT (type)))
2170       && odr_type_p (TYPE_MAIN_VARIANT (type)))
2171     get_odr_type (TYPE_MAIN_VARIANT (type), true);
2172   if (TYPE_MAIN_VARIANT (type) != type && odr_type_p (main_odr_variant (type)))
2173     get_odr_type (type, true);
2174 }
2175 
2176 /* Return true if type is known to have no derivations.  */
2177 
2178 bool
2179 type_known_to_have_no_derivations_p (tree t)
2180 {
2181   return (type_all_derivations_known_p (t)
2182 	  && (TYPE_FINAL_P (t)
2183 	      || (odr_hash
2184 		  && !get_odr_type (t, true)->derived_types.length())));
2185 }
2186 
2187 /* Dump ODR type T and all its derived types.  INDENT specifies indentation for
2188    recursive printing.  */
2189 
2190 static void
2191 dump_odr_type (FILE *f, odr_type t, int indent=0)
2192 {
2193   unsigned int i;
2194   fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2195   print_generic_expr (f, t->type, TDF_SLIM);
2196   fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2197   fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2198   if (TYPE_NAME (t->type))
2199     {
2200       /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2201 	       DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2202 	       DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2203       if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2204         fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2205 		 IDENTIFIER_POINTER
2206 		   (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2207     }
2208   if (t->bases.length ())
2209     {
2210       fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2211       for (i = 0; i < t->bases.length (); i++)
2212 	fprintf (f, " %i", t->bases[i]->id);
2213       fprintf (f, "\n");
2214     }
2215   if (t->derived_types.length ())
2216     {
2217       fprintf (f, "%*s derived types:\n", indent * 2, "");
2218       for (i = 0; i < t->derived_types.length (); i++)
2219         dump_odr_type (f, t->derived_types[i], indent + 1);
2220     }
2221   fprintf (f, "\n");
2222 }
2223 
2224 /* Dump the type inheritance graph.  */
2225 
2226 static void
2227 dump_type_inheritance_graph (FILE *f)
2228 {
2229   unsigned int i;
2230   if (!odr_types_ptr)
2231     return;
2232   fprintf (f, "\n\nType inheritance graph:\n");
2233   for (i = 0; i < odr_types.length (); i++)
2234     {
2235       if (odr_types[i] && odr_types[i]->bases.length () == 0)
2236 	dump_odr_type (f, odr_types[i]);
2237     }
2238   for (i = 0; i < odr_types.length (); i++)
2239     {
2240       if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2241 	{
2242 	  unsigned int j;
2243 	  fprintf (f, "Duplicate tree types for odr type %i\n", i);
2244 	  print_node (f, "", odr_types[i]->type, 0);
2245 	  for (j = 0; j < odr_types[i]->types->length (); j++)
2246 	    {
2247 	      tree t;
2248 	      fprintf (f, "duplicate #%i\n", j);
2249 	      print_node (f, "", (*odr_types[i]->types)[j], 0);
2250 	      t = (*odr_types[i]->types)[j];
2251 	      while (TYPE_P (t) && TYPE_CONTEXT (t))
2252 		{
2253 		  t = TYPE_CONTEXT (t);
2254 	          print_node (f, "", t, 0);
2255 		}
2256 	      putc ('\n',f);
2257 	    }
2258 	}
2259     }
2260 }
2261 
2262 /* Initialize IPA devirt and build inheritance tree graph.  */
2263 
2264 void
2265 build_type_inheritance_graph (void)
2266 {
2267   struct symtab_node *n;
2268   FILE *inheritance_dump_file;
2269   int flags;
2270 
2271   if (odr_hash)
2272     return;
2273   timevar_push (TV_IPA_INHERITANCE);
2274   inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2275   odr_hash = new odr_hash_type (23);
2276   if (in_lto_p)
2277     odr_vtable_hash = new odr_vtable_hash_type (23);
2278 
2279   /* We reconstruct the graph starting of types of all methods seen in the
2280      unit.  */
2281   FOR_EACH_SYMBOL (n)
2282     if (is_a <cgraph_node *> (n)
2283 	&& DECL_VIRTUAL_P (n->decl)
2284 	&& n->real_symbol_p ())
2285       get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2286 
2287     /* Look also for virtual tables of types that do not define any methods.
2288 
2289        We need it in a case where class B has virtual base of class A
2290        re-defining its virtual method and there is class C with no virtual
2291        methods with B as virtual base.
2292 
2293        Here we output B's virtual method in two variant - for non-virtual
2294        and virtual inheritance.  B's virtual table has non-virtual version,
2295        while C's has virtual.
2296 
2297        For this reason we need to know about C in order to include both
2298        variants of B.  More correctly, record_target_from_binfo should
2299        add both variants of the method when walking B, but we have no
2300        link in between them.
2301 
2302        We rely on fact that either the method is exported and thus we
2303        assume it is called externally or C is in anonymous namespace and
2304        thus we will see the vtable.  */
2305 
2306     else if (is_a <varpool_node *> (n)
2307 	     && DECL_VIRTUAL_P (n->decl)
2308 	     && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2309 	     && TYPE_BINFO (DECL_CONTEXT (n->decl))
2310 	     && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2311       get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2312   if (inheritance_dump_file)
2313     {
2314       dump_type_inheritance_graph (inheritance_dump_file);
2315       dump_end (TDI_inheritance, inheritance_dump_file);
2316     }
2317   timevar_pop (TV_IPA_INHERITANCE);
2318 }
2319 
2320 /* Return true if N has reference from live virtual table
2321    (and thus can be a destination of polymorphic call).
2322    Be conservatively correct when callgraph is not built or
2323    if the method may be referred externally.  */
2324 
2325 static bool
2326 referenced_from_vtable_p (struct cgraph_node *node)
2327 {
2328   int i;
2329   struct ipa_ref *ref;
2330   bool found = false;
2331 
2332   if (node->externally_visible
2333       || DECL_EXTERNAL (node->decl)
2334       || node->used_from_other_partition)
2335     return true;
2336 
2337   /* Keep this test constant time.
2338      It is unlikely this can happen except for the case where speculative
2339      devirtualization introduced many speculative edges to this node.
2340      In this case the target is very likely alive anyway.  */
2341   if (node->ref_list.referring.length () > 100)
2342     return true;
2343 
2344   /* We need references built.  */
2345   if (symtab->state <= CONSTRUCTION)
2346     return true;
2347 
2348   for (i = 0; node->iterate_referring (i, ref); i++)
2349     if ((ref->use == IPA_REF_ALIAS
2350 	 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2351 	|| (ref->use == IPA_REF_ADDR
2352 	    && VAR_P (ref->referring->decl)
2353 	    && DECL_VIRTUAL_P (ref->referring->decl)))
2354       {
2355 	found = true;
2356 	break;
2357       }
2358   return found;
2359 }
2360 
2361 /* Return if TARGET is cxa_pure_virtual.  */
2362 
2363 static bool
2364 is_cxa_pure_virtual_p (tree target)
2365 {
2366   return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2367 	 && DECL_NAME (target)
2368 	 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (target)),
2369 		     "__cxa_pure_virtual");
2370 }
2371 
2372 /* If TARGET has associated node, record it in the NODES array.
2373    CAN_REFER specify if program can refer to the target directly.
2374    if TARGET is unknown (NULL) or it can not be inserted (for example because
2375    its body was already removed and there is no way to refer to it), clear
2376    COMPLETEP.  */
2377 
2378 static void
2379 maybe_record_node (vec <cgraph_node *> &nodes,
2380 		   tree target, hash_set<tree> *inserted,
2381 		   bool can_refer,
2382 		   bool *completep)
2383 {
2384   struct cgraph_node *target_node, *alias_target;
2385   enum availability avail;
2386   bool pure_virtual = is_cxa_pure_virtual_p (target);
2387 
2388   /* __builtin_unreachable do not need to be added into
2389      list of targets; the runtime effect of calling them is undefined.
2390      Only "real" virtual methods should be accounted.  */
2391   if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2392     return;
2393 
2394   if (!can_refer)
2395     {
2396       /* The only case when method of anonymous namespace becomes unreferable
2397 	 is when we completely optimized it out.  */
2398       if (flag_ltrans
2399 	  || !target
2400 	  || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2401 	*completep = false;
2402       return;
2403     }
2404 
2405   if (!target)
2406     return;
2407 
2408   target_node = cgraph_node::get (target);
2409 
2410   /* Prefer alias target over aliases, so we do not get confused by
2411      fake duplicates.  */
2412   if (target_node)
2413     {
2414       alias_target = target_node->ultimate_alias_target (&avail);
2415       if (target_node != alias_target
2416 	  && avail >= AVAIL_AVAILABLE
2417 	  && target_node->get_availability ())
2418 	target_node = alias_target;
2419     }
2420 
2421   /* Method can only be called by polymorphic call if any
2422      of vtables referring to it are alive.
2423 
2424      While this holds for non-anonymous functions, too, there are
2425      cases where we want to keep them in the list; for example
2426      inline functions with -fno-weak are static, but we still
2427      may devirtualize them when instance comes from other unit.
2428      The same holds for LTO.
2429 
2430      Currently we ignore these functions in speculative devirtualization.
2431      ??? Maybe it would make sense to be more aggressive for LTO even
2432      elsewhere.  */
2433   if (!flag_ltrans
2434       && !pure_virtual
2435       && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2436       && (!target_node
2437           || !referenced_from_vtable_p (target_node)))
2438     ;
2439   /* See if TARGET is useful function we can deal with.  */
2440   else if (target_node != NULL
2441 	   && (TREE_PUBLIC (target)
2442 	       || DECL_EXTERNAL (target)
2443 	       || target_node->definition)
2444 	   && target_node->real_symbol_p ())
2445     {
2446       gcc_assert (!target_node->global.inlined_to);
2447       gcc_assert (target_node->real_symbol_p ());
2448       /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2449 	 by valid program.  */
2450       if (flag_sanitize & SANITIZE_UNREACHABLE)
2451 	;
2452       /* Only add pure virtual if it is the only possible target.  This way
2453 	 we will preserve the diagnostics about pure virtual called in many
2454 	 cases without disabling optimization in other.  */
2455       else if (pure_virtual)
2456 	{
2457 	  if (nodes.length ())
2458 	    return;
2459 	}
2460       /* If we found a real target, take away cxa_pure_virtual.  */
2461       else if (!pure_virtual && nodes.length () == 1
2462 	       && is_cxa_pure_virtual_p (nodes[0]->decl))
2463 	nodes.pop ();
2464       if (pure_virtual && nodes.length ())
2465 	return;
2466       if (!inserted->add (target))
2467 	{
2468 	  cached_polymorphic_call_targets->add (target_node);
2469 	  nodes.safe_push (target_node);
2470 	}
2471     }
2472   else if (!completep)
2473     ;
2474   /* We have definition of __cxa_pure_virtual that is not accessible (it is
2475      optimized out or partitioned to other unit) so we can not add it.  When
2476      not sanitizing, there is nothing to do.
2477      Otherwise declare the list incomplete.  */
2478   else if (pure_virtual)
2479     {
2480       if (flag_sanitize & SANITIZE_UNREACHABLE)
2481 	*completep = false;
2482     }
2483   else if (flag_ltrans
2484 	   || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2485     *completep = false;
2486 }
2487 
2488 /* See if BINFO's type matches OUTER_TYPE.  If so, look up
2489    BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2490    method in vtable and insert method to NODES array
2491    or BASES_TO_CONSIDER if this array is non-NULL.
2492    Otherwise recurse to base BINFOs.
2493    This matches what get_binfo_at_offset does, but with offset
2494    being unknown.
2495 
2496    TYPE_BINFOS is a stack of BINFOS of types with defined
2497    virtual table seen on way from class type to BINFO.
2498 
2499    MATCHED_VTABLES tracks virtual tables we already did lookup
2500    for virtual function in. INSERTED tracks nodes we already
2501    inserted.
2502 
2503    ANONYMOUS is true if BINFO is part of anonymous namespace.
2504 
2505    Clear COMPLETEP when we hit unreferable target.
2506   */
2507 
2508 static void
2509 record_target_from_binfo (vec <cgraph_node *> &nodes,
2510 			  vec <tree> *bases_to_consider,
2511 			  tree binfo,
2512 			  tree otr_type,
2513 			  vec <tree> &type_binfos,
2514 			  HOST_WIDE_INT otr_token,
2515 			  tree outer_type,
2516 			  HOST_WIDE_INT offset,
2517 			  hash_set<tree> *inserted,
2518 			  hash_set<tree> *matched_vtables,
2519 			  bool anonymous,
2520 			  bool *completep)
2521 {
2522   tree type = BINFO_TYPE (binfo);
2523   int i;
2524   tree base_binfo;
2525 
2526 
2527   if (BINFO_VTABLE (binfo))
2528     type_binfos.safe_push (binfo);
2529   if (types_same_for_odr (type, outer_type))
2530     {
2531       int i;
2532       tree type_binfo = NULL;
2533 
2534       /* Look up BINFO with virtual table.  For normal types it is always last
2535 	 binfo on stack.  */
2536       for (i = type_binfos.length () - 1; i >= 0; i--)
2537 	if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2538 	  {
2539 	    type_binfo = type_binfos[i];
2540 	    break;
2541 	  }
2542       if (BINFO_VTABLE (binfo))
2543 	type_binfos.pop ();
2544       /* If this is duplicated BINFO for base shared by virtual inheritance,
2545 	 we may not have its associated vtable.  This is not a problem, since
2546 	 we will walk it on the other path.  */
2547       if (!type_binfo)
2548 	return;
2549       tree inner_binfo = get_binfo_at_offset (type_binfo,
2550 					      offset, otr_type);
2551       if (!inner_binfo)
2552 	{
2553 	  gcc_assert (odr_violation_reported);
2554 	  return;
2555 	}
2556       /* For types in anonymous namespace first check if the respective vtable
2557 	 is alive. If not, we know the type can't be called.  */
2558       if (!flag_ltrans && anonymous)
2559 	{
2560 	  tree vtable = BINFO_VTABLE (inner_binfo);
2561 	  varpool_node *vnode;
2562 
2563 	  if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2564 	    vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2565 	  vnode = varpool_node::get (vtable);
2566 	  if (!vnode || !vnode->definition)
2567 	    return;
2568 	}
2569       gcc_assert (inner_binfo);
2570       if (bases_to_consider
2571 	  ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2572 	  : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2573 	{
2574 	  bool can_refer;
2575 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
2576 							  inner_binfo,
2577 							  &can_refer);
2578 	  if (!bases_to_consider)
2579 	    maybe_record_node (nodes, target, inserted, can_refer, completep);
2580 	  /* Destructors are never called via construction vtables.  */
2581 	  else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2582 	    bases_to_consider->safe_push (target);
2583 	}
2584       return;
2585     }
2586 
2587   /* Walk bases.  */
2588   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2589     /* Walking bases that have no virtual method is pointless exercise.  */
2590     if (polymorphic_type_binfo_p (base_binfo))
2591       record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2592 				type_binfos,
2593 				otr_token, outer_type, offset, inserted,
2594 				matched_vtables, anonymous, completep);
2595   if (BINFO_VTABLE (binfo))
2596     type_binfos.pop ();
2597 }
2598 
2599 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2600    of TYPE, insert them to NODES, recurse into derived nodes.
2601    INSERTED is used to avoid duplicate insertions of methods into NODES.
2602    MATCHED_VTABLES are used to avoid duplicate walking vtables.
2603    Clear COMPLETEP if unreferable target is found.
2604 
2605    If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2606    all cases where BASE_SKIPPED is true (because the base is abstract
2607    class).  */
2608 
2609 static void
2610 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2611 				     hash_set<tree> *inserted,
2612 				     hash_set<tree> *matched_vtables,
2613 				     tree otr_type,
2614 				     odr_type type,
2615 				     HOST_WIDE_INT otr_token,
2616 				     tree outer_type,
2617 				     HOST_WIDE_INT offset,
2618 				     bool *completep,
2619 				     vec <tree> &bases_to_consider,
2620 				     bool consider_construction)
2621 {
2622   tree binfo = TYPE_BINFO (type->type);
2623   unsigned int i;
2624   auto_vec <tree, 8> type_binfos;
2625   bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2626 
2627   /* We may need to consider types w/o instances because of possible derived
2628      types using their methods either directly or via construction vtables.
2629      We are safe to skip them when all derivations are known, since we will
2630      handle them later.
2631      This is done by recording them to BASES_TO_CONSIDER array.  */
2632   if (possibly_instantiated || consider_construction)
2633     {
2634       record_target_from_binfo (nodes,
2635 				(!possibly_instantiated
2636 				 && type_all_derivations_known_p (type->type))
2637 				? &bases_to_consider : NULL,
2638 				binfo, otr_type, type_binfos, otr_token,
2639 				outer_type, offset,
2640 				inserted, matched_vtables,
2641 				type->anonymous_namespace, completep);
2642     }
2643   for (i = 0; i < type->derived_types.length (); i++)
2644     possible_polymorphic_call_targets_1 (nodes, inserted,
2645 					 matched_vtables,
2646 					 otr_type,
2647 					 type->derived_types[i],
2648 					 otr_token, outer_type, offset, completep,
2649 					 bases_to_consider, consider_construction);
2650 }
2651 
2652 /* Cache of queries for polymorphic call targets.
2653 
2654    Enumerating all call targets may get expensive when there are many
2655    polymorphic calls in the program, so we memoize all the previous
2656    queries and avoid duplicated work.  */
2657 
2658 struct polymorphic_call_target_d
2659 {
2660   HOST_WIDE_INT otr_token;
2661   ipa_polymorphic_call_context context;
2662   odr_type type;
2663   vec <cgraph_node *> targets;
2664   tree decl_warning;
2665   int type_warning;
2666   bool complete;
2667   bool speculative;
2668 };
2669 
2670 /* Polymorphic call target cache helpers.  */
2671 
2672 struct polymorphic_call_target_hasher
2673   : pointer_hash <polymorphic_call_target_d>
2674 {
2675   static inline hashval_t hash (const polymorphic_call_target_d *);
2676   static inline bool equal (const polymorphic_call_target_d *,
2677 			    const polymorphic_call_target_d *);
2678   static inline void remove (polymorphic_call_target_d *);
2679 };
2680 
2681 /* Return the computed hashcode for ODR_QUERY.  */
2682 
2683 inline hashval_t
2684 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2685 {
2686   inchash::hash hstate (odr_query->otr_token);
2687 
2688   hstate.add_wide_int (odr_query->type->id);
2689   hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2690   hstate.add_wide_int (odr_query->context.offset);
2691 
2692   if (odr_query->context.speculative_outer_type)
2693     {
2694       hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2695       hstate.add_wide_int (odr_query->context.speculative_offset);
2696     }
2697   hstate.add_flag (odr_query->speculative);
2698   hstate.add_flag (odr_query->context.maybe_in_construction);
2699   hstate.add_flag (odr_query->context.maybe_derived_type);
2700   hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2701   hstate.commit_flag ();
2702   return hstate.end ();
2703 }
2704 
2705 /* Compare cache entries T1 and T2.  */
2706 
2707 inline bool
2708 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2709 				       const polymorphic_call_target_d *t2)
2710 {
2711   return (t1->type == t2->type && t1->otr_token == t2->otr_token
2712 	  && t1->speculative == t2->speculative
2713 	  && t1->context.offset == t2->context.offset
2714 	  && t1->context.speculative_offset == t2->context.speculative_offset
2715 	  && t1->context.outer_type == t2->context.outer_type
2716 	  && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2717 	  && t1->context.maybe_in_construction
2718 	      == t2->context.maybe_in_construction
2719 	  && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2720 	  && (t1->context.speculative_maybe_derived_type
2721 	      == t2->context.speculative_maybe_derived_type));
2722 }
2723 
2724 /* Remove entry in polymorphic call target cache hash.  */
2725 
2726 inline void
2727 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2728 {
2729   v->targets.release ();
2730   free (v);
2731 }
2732 
2733 /* Polymorphic call target query cache.  */
2734 
2735 typedef hash_table<polymorphic_call_target_hasher>
2736    polymorphic_call_target_hash_type;
2737 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2738 
2739 /* Destroy polymorphic call target query cache.  */
2740 
2741 static void
2742 free_polymorphic_call_targets_hash ()
2743 {
2744   if (cached_polymorphic_call_targets)
2745     {
2746       delete polymorphic_call_target_hash;
2747       polymorphic_call_target_hash = NULL;
2748       delete cached_polymorphic_call_targets;
2749       cached_polymorphic_call_targets = NULL;
2750     }
2751 }
2752 
2753 /* When virtual function is removed, we may need to flush the cache.  */
2754 
2755 static void
2756 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2757 {
2758   if (cached_polymorphic_call_targets
2759       && cached_polymorphic_call_targets->contains (n))
2760     free_polymorphic_call_targets_hash ();
2761 }
2762 
2763 /* Look up base of BINFO that has virtual table VTABLE with OFFSET.  */
2764 
2765 tree
2766 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2767 				tree vtable)
2768 {
2769   tree v = BINFO_VTABLE (binfo);
2770   int i;
2771   tree base_binfo;
2772   unsigned HOST_WIDE_INT this_offset;
2773 
2774   if (v)
2775     {
2776       if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2777 	gcc_unreachable ();
2778 
2779       if (offset == this_offset
2780 	  && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2781 	return binfo;
2782     }
2783 
2784   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2785     if (polymorphic_type_binfo_p (base_binfo))
2786       {
2787 	base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2788 	if (base_binfo)
2789 	  return base_binfo;
2790       }
2791   return NULL;
2792 }
2793 
2794 /* T is known constant value of virtual table pointer.
2795    Store virtual table to V and its offset to OFFSET.
2796    Return false if T does not look like virtual table reference.  */
2797 
2798 bool
2799 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2800 				unsigned HOST_WIDE_INT *offset)
2801 {
2802   /* We expect &MEM[(void *)&virtual_table + 16B].
2803      We obtain object's BINFO from the context of the virtual table.
2804      This one contains pointer to virtual table represented via
2805      POINTER_PLUS_EXPR.  Verify that this pointer matches what
2806      we propagated through.
2807 
2808      In the case of virtual inheritance, the virtual tables may
2809      be nested, i.e. the offset may be different from 16 and we may
2810      need to dive into the type representation.  */
2811   if (TREE_CODE (t) == ADDR_EXPR
2812       && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2813       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2814       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2815       && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2816 	  == VAR_DECL)
2817       && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2818 					 (TREE_OPERAND (t, 0), 0), 0)))
2819     {
2820       *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2821       *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2822       return true;
2823     }
2824 
2825   /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2826      We need to handle it when T comes from static variable initializer or
2827      BINFO. */
2828   if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2829     {
2830       *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2831       t = TREE_OPERAND (t, 0);
2832     }
2833   else
2834     *offset = 0;
2835 
2836   if (TREE_CODE (t) != ADDR_EXPR)
2837     return false;
2838   *v = TREE_OPERAND (t, 0);
2839   return true;
2840 }
2841 
2842 /* T is known constant value of virtual table pointer.  Return BINFO of the
2843    instance type.  */
2844 
2845 tree
2846 vtable_pointer_value_to_binfo (const_tree t)
2847 {
2848   tree vtable;
2849   unsigned HOST_WIDE_INT offset;
2850 
2851   if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2852     return NULL_TREE;
2853 
2854   /* FIXME: for stores of construction vtables we return NULL,
2855      because we do not have BINFO for those. Eventually we should fix
2856      our representation to allow this case to be handled, too.
2857      In the case we see store of BINFO we however may assume
2858      that standard folding will be able to cope with it.  */
2859   return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2860 					 offset, vtable);
2861 }
2862 
2863 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2864    Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2865    and insert them in NODES.
2866 
2867    MATCHED_VTABLES and INSERTED is used to avoid duplicated work.  */
2868 
2869 static void
2870 record_targets_from_bases (tree otr_type,
2871 			   HOST_WIDE_INT otr_token,
2872 			   tree outer_type,
2873 			   HOST_WIDE_INT offset,
2874 			   vec <cgraph_node *> &nodes,
2875 			   hash_set<tree> *inserted,
2876 			   hash_set<tree> *matched_vtables,
2877 			   bool *completep)
2878 {
2879   while (true)
2880     {
2881       HOST_WIDE_INT pos, size;
2882       tree base_binfo;
2883       tree fld;
2884 
2885       if (types_same_for_odr (outer_type, otr_type))
2886 	return;
2887 
2888       for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2889 	{
2890 	  if (TREE_CODE (fld) != FIELD_DECL)
2891 	    continue;
2892 
2893 	  pos = int_bit_position (fld);
2894 	  size = tree_to_shwi (DECL_SIZE (fld));
2895 	  if (pos <= offset && (pos + size) > offset
2896 	      /* Do not get confused by zero sized bases.  */
2897 	      && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2898 	    break;
2899 	}
2900       /* Within a class type we should always find corresponding fields.  */
2901       gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2902 
2903       /* Nonbase types should have been stripped by outer_class_type.  */
2904       gcc_assert (DECL_ARTIFICIAL (fld));
2905 
2906       outer_type = TREE_TYPE (fld);
2907       offset -= pos;
2908 
2909       base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2910 					offset, otr_type);
2911       if (!base_binfo)
2912 	{
2913 	  gcc_assert (odr_violation_reported);
2914 	  return;
2915 	}
2916       gcc_assert (base_binfo);
2917       if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2918 	{
2919 	  bool can_refer;
2920 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
2921 							  base_binfo,
2922 							  &can_refer);
2923 	  if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2924 	    maybe_record_node (nodes, target, inserted, can_refer, completep);
2925 	  matched_vtables->add (BINFO_VTABLE (base_binfo));
2926 	}
2927     }
2928 }
2929 
2930 /* When virtual table is removed, we may need to flush the cache.  */
2931 
2932 static void
2933 devirt_variable_node_removal_hook (varpool_node *n,
2934 				   void *d ATTRIBUTE_UNUSED)
2935 {
2936   if (cached_polymorphic_call_targets
2937       && DECL_VIRTUAL_P (n->decl)
2938       && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2939     free_polymorphic_call_targets_hash ();
2940 }
2941 
2942 /* Record about how many calls would benefit from given type to be final.  */
2943 
2944 struct odr_type_warn_count
2945 {
2946   tree type;
2947   int count;
2948   gcov_type dyn_count;
2949 };
2950 
2951 /* Record about how many calls would benefit from given method to be final.  */
2952 
2953 struct decl_warn_count
2954 {
2955   tree decl;
2956   int count;
2957   gcov_type dyn_count;
2958 };
2959 
2960 /* Information about type and decl warnings.  */
2961 
2962 struct final_warning_record
2963 {
2964   gcov_type dyn_count;
2965   auto_vec<odr_type_warn_count> type_warnings;
2966   hash_map<tree, decl_warn_count> decl_warnings;
2967 };
2968 struct final_warning_record *final_warning_records;
2969 
2970 /* Return vector containing possible targets of polymorphic call of type
2971    OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2972    If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2973    OTR_TYPE and include their virtual method.  This is useful for types
2974    possibly in construction or destruction where the virtual table may
2975    temporarily change to one of base types.  INCLUDE_DERIVER_TYPES make
2976    us to walk the inheritance graph for all derivations.
2977 
2978    If COMPLETEP is non-NULL, store true if the list is complete.
2979    CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2980    in the target cache.  If user needs to visit every target list
2981    just once, it can memoize them.
2982 
2983    If SPECULATIVE is set, the list will not contain targets that
2984    are not speculatively taken.
2985 
2986    Returned vector is placed into cache.  It is NOT caller's responsibility
2987    to free it.  The vector can be freed on cgraph_remove_node call if
2988    the particular node is a virtual function present in the cache.  */
2989 
2990 vec <cgraph_node *>
2991 possible_polymorphic_call_targets (tree otr_type,
2992 			           HOST_WIDE_INT otr_token,
2993 				   ipa_polymorphic_call_context context,
2994 			           bool *completep,
2995 			           void **cache_token,
2996 				   bool speculative)
2997 {
2998   static struct cgraph_node_hook_list *node_removal_hook_holder;
2999   vec <cgraph_node *> nodes = vNULL;
3000   auto_vec <tree, 8> bases_to_consider;
3001   odr_type type, outer_type;
3002   polymorphic_call_target_d key;
3003   polymorphic_call_target_d **slot;
3004   unsigned int i;
3005   tree binfo, target;
3006   bool complete;
3007   bool can_refer = false;
3008   bool skipped = false;
3009 
3010   otr_type = TYPE_MAIN_VARIANT (otr_type);
3011 
3012   /* If ODR is not initialized or the context is invalid, return empty
3013      incomplete list.  */
3014   if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3015     {
3016       if (completep)
3017 	*completep = context.invalid;
3018       if (cache_token)
3019 	*cache_token = NULL;
3020       return nodes;
3021     }
3022 
3023   /* Do not bother to compute speculative info when user do not asks for it.  */
3024   if (!speculative || !context.speculative_outer_type)
3025     context.clear_speculation ();
3026 
3027   type = get_odr_type (otr_type, true);
3028 
3029   /* Recording type variants would waste results cache.  */
3030   gcc_assert (!context.outer_type
3031 	      || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3032 
3033   /* Look up the outer class type we want to walk.
3034      If we fail to do so, the context is invalid.  */
3035   if ((context.outer_type || context.speculative_outer_type)
3036       && !context.restrict_to_inner_class (otr_type))
3037     {
3038       if (completep)
3039 	*completep = true;
3040       if (cache_token)
3041 	*cache_token = NULL;
3042       return nodes;
3043     }
3044   gcc_assert (!context.invalid);
3045 
3046   /* Check that restrict_to_inner_class kept the main variant.  */
3047   gcc_assert (!context.outer_type
3048 	      || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3049 
3050   /* We canonicalize our query, so we do not need extra hashtable entries.  */
3051 
3052   /* Without outer type, we have no use for offset.  Just do the
3053      basic search from inner type.  */
3054   if (!context.outer_type)
3055     context.clear_outer_type (otr_type);
3056   /* We need to update our hierarchy if the type does not exist.  */
3057   outer_type = get_odr_type (context.outer_type, true);
3058   /* If the type is complete, there are no derivations.  */
3059   if (TYPE_FINAL_P (outer_type->type))
3060     context.maybe_derived_type = false;
3061 
3062   /* Initialize query cache.  */
3063   if (!cached_polymorphic_call_targets)
3064     {
3065       cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3066       polymorphic_call_target_hash
3067        	= new polymorphic_call_target_hash_type (23);
3068       if (!node_removal_hook_holder)
3069 	{
3070 	  node_removal_hook_holder =
3071 	    symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3072 	  symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3073 					 NULL);
3074 	}
3075     }
3076 
3077   if (in_lto_p)
3078     {
3079       if (context.outer_type != otr_type)
3080         context.outer_type
3081 	  = get_odr_type (context.outer_type, true)->type;
3082       if (context.speculative_outer_type)
3083         context.speculative_outer_type
3084 	  = get_odr_type (context.speculative_outer_type, true)->type;
3085     }
3086 
3087   /* Look up cached answer.  */
3088   key.type = type;
3089   key.otr_token = otr_token;
3090   key.speculative = speculative;
3091   key.context = context;
3092   slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3093   if (cache_token)
3094    *cache_token = (void *)*slot;
3095   if (*slot)
3096     {
3097       if (completep)
3098 	*completep = (*slot)->complete;
3099       if ((*slot)->type_warning && final_warning_records)
3100 	{
3101 	  final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3102 	  final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3103 	    += final_warning_records->dyn_count;
3104 	}
3105       if (!speculative && (*slot)->decl_warning && final_warning_records)
3106 	{
3107 	  struct decl_warn_count *c =
3108 	     final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3109 	  c->count++;
3110 	  c->dyn_count += final_warning_records->dyn_count;
3111 	}
3112       return (*slot)->targets;
3113     }
3114 
3115   complete = true;
3116 
3117   /* Do actual search.  */
3118   timevar_push (TV_IPA_VIRTUAL_CALL);
3119   *slot = XCNEW (polymorphic_call_target_d);
3120   if (cache_token)
3121     *cache_token = (void *)*slot;
3122   (*slot)->type = type;
3123   (*slot)->otr_token = otr_token;
3124   (*slot)->context = context;
3125   (*slot)->speculative = speculative;
3126 
3127   hash_set<tree> inserted;
3128   hash_set<tree> matched_vtables;
3129 
3130   /* First insert targets we speculatively identified as likely.  */
3131   if (context.speculative_outer_type)
3132     {
3133       odr_type speculative_outer_type;
3134       bool speculation_complete = true;
3135 
3136       /* First insert target from type itself and check if it may have
3137 	 derived types.  */
3138       speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3139       if (TYPE_FINAL_P (speculative_outer_type->type))
3140 	context.speculative_maybe_derived_type = false;
3141       binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3142 				   context.speculative_offset, otr_type);
3143       if (binfo)
3144 	target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3145 						   &can_refer);
3146       else
3147 	target = NULL;
3148 
3149       /* In the case we get complete method, we don't need
3150 	 to walk derivations.  */
3151       if (target && DECL_FINAL_P (target))
3152 	context.speculative_maybe_derived_type = false;
3153       if (type_possibly_instantiated_p (speculative_outer_type->type))
3154 	maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3155       if (binfo)
3156 	matched_vtables.add (BINFO_VTABLE (binfo));
3157 
3158 
3159       /* Next walk recursively all derived types.  */
3160       if (context.speculative_maybe_derived_type)
3161 	for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3162 	  possible_polymorphic_call_targets_1 (nodes, &inserted,
3163 					       &matched_vtables,
3164 					       otr_type,
3165 					       speculative_outer_type->derived_types[i],
3166 					       otr_token, speculative_outer_type->type,
3167 					       context.speculative_offset,
3168 					       &speculation_complete,
3169 					       bases_to_consider,
3170 					       false);
3171     }
3172 
3173   if (!speculative || !nodes.length ())
3174     {
3175       /* First see virtual method of type itself.  */
3176       binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3177 				   context.offset, otr_type);
3178       if (binfo)
3179 	target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3180 						   &can_refer);
3181       else
3182 	{
3183 	  gcc_assert (odr_violation_reported);
3184 	  target = NULL;
3185 	}
3186 
3187       /* Destructors are never called through construction virtual tables,
3188 	 because the type is always known.  */
3189       if (target && DECL_CXX_DESTRUCTOR_P (target))
3190 	context.maybe_in_construction = false;
3191 
3192       if (target)
3193 	{
3194 	  /* In the case we get complete method, we don't need
3195 	     to walk derivations.  */
3196 	  if (DECL_FINAL_P (target))
3197 	    context.maybe_derived_type = false;
3198 	}
3199 
3200       /* If OUTER_TYPE is abstract, we know we are not seeing its instance.  */
3201       if (type_possibly_instantiated_p (outer_type->type))
3202 	maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3203       else
3204 	skipped = true;
3205 
3206       if (binfo)
3207 	matched_vtables.add (BINFO_VTABLE (binfo));
3208 
3209       /* Next walk recursively all derived types.  */
3210       if (context.maybe_derived_type)
3211 	{
3212 	  for (i = 0; i < outer_type->derived_types.length(); i++)
3213 	    possible_polymorphic_call_targets_1 (nodes, &inserted,
3214 						 &matched_vtables,
3215 						 otr_type,
3216 						 outer_type->derived_types[i],
3217 						 otr_token, outer_type->type,
3218 						 context.offset, &complete,
3219 						 bases_to_consider,
3220 						 context.maybe_in_construction);
3221 
3222 	  if (!outer_type->all_derivations_known)
3223 	    {
3224 	      if (!speculative && final_warning_records
3225 		  && nodes.length () == 1
3226 		  && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3227 		{
3228 		  if (complete
3229 		      && warn_suggest_final_types
3230 		      && !outer_type->derived_types.length ())
3231 		    {
3232 		      if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3233 			final_warning_records->type_warnings.safe_grow_cleared
3234 			  (odr_types.length ());
3235 		      final_warning_records->type_warnings[outer_type->id].count++;
3236 		      final_warning_records->type_warnings[outer_type->id].dyn_count
3237 			+= final_warning_records->dyn_count;
3238 		      final_warning_records->type_warnings[outer_type->id].type
3239 			= outer_type->type;
3240 		      (*slot)->type_warning = outer_type->id + 1;
3241 		    }
3242 		  if (complete
3243 		      && warn_suggest_final_methods
3244 		      && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3245 					     outer_type->type))
3246 		    {
3247 		      bool existed;
3248 		      struct decl_warn_count &c =
3249 			 final_warning_records->decl_warnings.get_or_insert
3250 			    (nodes[0]->decl, &existed);
3251 
3252 		      if (existed)
3253 			{
3254 			  c.count++;
3255 			  c.dyn_count += final_warning_records->dyn_count;
3256 			}
3257 		      else
3258 			{
3259 			  c.count = 1;
3260 			  c.dyn_count = final_warning_records->dyn_count;
3261 			  c.decl = nodes[0]->decl;
3262 			}
3263 		      (*slot)->decl_warning = nodes[0]->decl;
3264 		    }
3265 		}
3266 	      complete = false;
3267 	    }
3268 	}
3269 
3270       if (!speculative)
3271 	{
3272 	  /* Destructors are never called through construction virtual tables,
3273 	     because the type is always known.  One of entries may be
3274 	     cxa_pure_virtual so look to at least two of them.  */
3275 	  if (context.maybe_in_construction)
3276 	    for (i =0 ; i < MIN (nodes.length (), 2); i++)
3277 	      if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3278 		context.maybe_in_construction = false;
3279 	  if (context.maybe_in_construction)
3280 	    {
3281 	      if (type != outer_type
3282 		  && (!skipped
3283 		      || (context.maybe_derived_type
3284 			  && !type_all_derivations_known_p (outer_type->type))))
3285 		record_targets_from_bases (otr_type, otr_token, outer_type->type,
3286 					   context.offset, nodes, &inserted,
3287 					   &matched_vtables, &complete);
3288 	      if (skipped)
3289 		maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3290 	      for (i = 0; i < bases_to_consider.length(); i++)
3291 		maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3292 	    }
3293 	}
3294     }
3295 
3296   (*slot)->targets = nodes;
3297   (*slot)->complete = complete;
3298   if (completep)
3299     *completep = complete;
3300 
3301   timevar_pop (TV_IPA_VIRTUAL_CALL);
3302   return nodes;
3303 }
3304 
3305 bool
3306 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3307 		  vec<const decl_warn_count*> *vec)
3308 {
3309   vec->safe_push (&value);
3310   return true;
3311 }
3312 
3313 /* Dump target list TARGETS into FILE.  */
3314 
3315 static void
3316 dump_targets (FILE *f, vec <cgraph_node *> targets)
3317 {
3318   unsigned int i;
3319 
3320   for (i = 0; i < targets.length (); i++)
3321     {
3322       char *name = NULL;
3323       if (in_lto_p)
3324 	name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3325       fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3326       if (in_lto_p)
3327 	free (name);
3328       if (!targets[i]->definition)
3329 	fprintf (f, " (no definition%s)",
3330 		 DECL_DECLARED_INLINE_P (targets[i]->decl)
3331 		 ? " inline" : "");
3332     }
3333   fprintf (f, "\n");
3334 }
3335 
3336 /* Dump all possible targets of a polymorphic call.  */
3337 
3338 void
3339 dump_possible_polymorphic_call_targets (FILE *f,
3340 					tree otr_type,
3341 					HOST_WIDE_INT otr_token,
3342 					const ipa_polymorphic_call_context &ctx)
3343 {
3344   vec <cgraph_node *> targets;
3345   bool final;
3346   odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3347   unsigned int len;
3348 
3349   if (!type)
3350     return;
3351   targets = possible_polymorphic_call_targets (otr_type, otr_token,
3352 					       ctx,
3353 					       &final, NULL, false);
3354   fprintf (f, "  Targets of polymorphic call of type %i:", type->id);
3355   print_generic_expr (f, type->type, TDF_SLIM);
3356   fprintf (f, " token %i\n", (int)otr_token);
3357 
3358   ctx.dump (f);
3359 
3360   fprintf (f, "    %s%s%s%s\n      ",
3361 	   final ? "This is a complete list." :
3362 	   "This is partial list; extra targets may be defined in other units.",
3363 	   ctx.maybe_in_construction ? " (base types included)" : "",
3364 	   ctx.maybe_derived_type ? " (derived types included)" : "",
3365 	   ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3366   len = targets.length ();
3367   dump_targets (f, targets);
3368 
3369   targets = possible_polymorphic_call_targets (otr_type, otr_token,
3370 					       ctx,
3371 					       &final, NULL, true);
3372   if (targets.length () != len)
3373     {
3374       fprintf (f, "  Speculative targets:");
3375       dump_targets (f, targets);
3376     }
3377   /* Ugly: during callgraph construction the target cache may get populated
3378      before all targets are found.  While this is harmless (because all local
3379      types are discovered and only in those case we devirtualize fully and we
3380      don't do speculative devirtualization before IPA stage) it triggers
3381      assert here when dumping at that stage also populates the case with
3382      speculative targets.  Quietly ignore this.  */
3383   gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3384   fprintf (f, "\n");
3385 }
3386 
3387 
3388 /* Return true if N can be possibly target of a polymorphic call of
3389    OTR_TYPE/OTR_TOKEN.  */
3390 
3391 bool
3392 possible_polymorphic_call_target_p (tree otr_type,
3393 				    HOST_WIDE_INT otr_token,
3394 				    const ipa_polymorphic_call_context &ctx,
3395 				    struct cgraph_node *n)
3396 {
3397   vec <cgraph_node *> targets;
3398   unsigned int i;
3399   enum built_in_function fcode;
3400   bool final;
3401 
3402   if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3403       && ((fcode = DECL_FUNCTION_CODE (n->decl)) == BUILT_IN_UNREACHABLE
3404           || fcode == BUILT_IN_TRAP))
3405     return true;
3406 
3407   if (is_cxa_pure_virtual_p (n->decl))
3408     return true;
3409 
3410   if (!odr_hash)
3411     return true;
3412   targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3413   for (i = 0; i < targets.length (); i++)
3414     if (n->semantically_equivalent_p (targets[i]))
3415       return true;
3416 
3417   /* At a moment we allow middle end to dig out new external declarations
3418      as a targets of polymorphic calls.  */
3419   if (!final && !n->definition)
3420     return true;
3421   return false;
3422 }
3423 
3424 
3425 
3426 /* Return true if N can be possibly target of a polymorphic call of
3427    OBJ_TYPE_REF expression REF in STMT.  */
3428 
3429 bool
3430 possible_polymorphic_call_target_p (tree ref,
3431 				    gimple *stmt,
3432 				    struct cgraph_node *n)
3433 {
3434   ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3435   tree call_fn = gimple_call_fn (stmt);
3436 
3437   return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3438 					     tree_to_uhwi
3439 					       (OBJ_TYPE_REF_TOKEN (call_fn)),
3440 					     context,
3441 					     n);
3442 }
3443 
3444 
3445 /* After callgraph construction new external nodes may appear.
3446    Add them into the graph.  */
3447 
3448 void
3449 update_type_inheritance_graph (void)
3450 {
3451   struct cgraph_node *n;
3452 
3453   if (!odr_hash)
3454     return;
3455   free_polymorphic_call_targets_hash ();
3456   timevar_push (TV_IPA_INHERITANCE);
3457   /* We reconstruct the graph starting from types of all methods seen in the
3458      unit.  */
3459   FOR_EACH_FUNCTION (n)
3460     if (DECL_VIRTUAL_P (n->decl)
3461 	&& !n->definition
3462 	&& n->real_symbol_p ())
3463       get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3464   timevar_pop (TV_IPA_INHERITANCE);
3465 }
3466 
3467 
3468 /* Return true if N looks like likely target of a polymorphic call.
3469    Rule out cxa_pure_virtual, noreturns, function declared cold and
3470    other obvious cases.  */
3471 
3472 bool
3473 likely_target_p (struct cgraph_node *n)
3474 {
3475   int flags;
3476   /* cxa_pure_virtual and similar things are not likely.  */
3477   if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3478     return false;
3479   flags = flags_from_decl_or_type (n->decl);
3480   if (flags & ECF_NORETURN)
3481     return false;
3482   if (lookup_attribute ("cold",
3483 			DECL_ATTRIBUTES (n->decl)))
3484     return false;
3485   if (n->frequency < NODE_FREQUENCY_NORMAL)
3486     return false;
3487   /* If there are no live virtual tables referring the target,
3488      the only way the target can be called is an instance coming from other
3489      compilation unit; speculative devirtualization is built around an
3490      assumption that won't happen.  */
3491   if (!referenced_from_vtable_p (n))
3492     return false;
3493   return true;
3494 }
3495 
3496 /* Compare type warning records P1 and P2 and choose one with larger count;
3497    helper for qsort.  */
3498 
3499 int
3500 type_warning_cmp (const void *p1, const void *p2)
3501 {
3502   const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3503   const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3504 
3505   if (t1->dyn_count < t2->dyn_count)
3506    return 1;
3507   if (t1->dyn_count > t2->dyn_count)
3508    return -1;
3509   return t2->count - t1->count;
3510 }
3511 
3512 /* Compare decl warning records P1 and P2 and choose one with larger count;
3513    helper for qsort.  */
3514 
3515 int
3516 decl_warning_cmp (const void *p1, const void *p2)
3517 {
3518   const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3519   const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3520 
3521   if (t1->dyn_count < t2->dyn_count)
3522    return 1;
3523   if (t1->dyn_count > t2->dyn_count)
3524    return -1;
3525   return t2->count - t1->count;
3526 }
3527 
3528 
3529 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3530    context CTX.  */
3531 
3532 struct cgraph_node *
3533 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3534 				  ipa_polymorphic_call_context ctx)
3535 {
3536   vec <cgraph_node *>targets
3537      = possible_polymorphic_call_targets
3538 	  (otr_type, otr_token, ctx, NULL, NULL, true);
3539   unsigned int i;
3540   struct cgraph_node *likely_target = NULL;
3541 
3542   for (i = 0; i < targets.length (); i++)
3543     if (likely_target_p (targets[i]))
3544       {
3545 	if (likely_target)
3546 	  return NULL;
3547 	likely_target = targets[i];
3548       }
3549   if (!likely_target
3550       ||!likely_target->definition
3551       || DECL_EXTERNAL (likely_target->decl))
3552     return NULL;
3553 
3554   /* Don't use an implicitly-declared destructor (c++/58678).  */
3555   struct cgraph_node *non_thunk_target
3556     = likely_target->function_symbol ();
3557   if (DECL_ARTIFICIAL (non_thunk_target->decl))
3558     return NULL;
3559   if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3560       && likely_target->can_be_discarded_p ())
3561     return NULL;
3562   return likely_target;
3563 }
3564 
3565 /* The ipa-devirt pass.
3566    When polymorphic call has only one likely target in the unit,
3567    turn it into a speculative call.  */
3568 
3569 static unsigned int
3570 ipa_devirt (void)
3571 {
3572   struct cgraph_node *n;
3573   hash_set<void *> bad_call_targets;
3574   struct cgraph_edge *e;
3575 
3576   int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3577   int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3578   int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3579   int ndropped = 0;
3580 
3581   if (!odr_types_ptr)
3582     return 0;
3583 
3584   if (dump_file)
3585     dump_type_inheritance_graph (dump_file);
3586 
3587   /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3588      This is implemented by setting up final_warning_records that are updated
3589      by get_polymorphic_call_targets.
3590      We need to clear cache in this case to trigger recomputation of all
3591      entries.  */
3592   if (warn_suggest_final_methods || warn_suggest_final_types)
3593     {
3594       final_warning_records = new (final_warning_record);
3595       final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3596       free_polymorphic_call_targets_hash ();
3597     }
3598 
3599   FOR_EACH_DEFINED_FUNCTION (n)
3600     {
3601       bool update = false;
3602       if (!opt_for_fn (n->decl, flag_devirtualize))
3603 	continue;
3604       if (dump_file && n->indirect_calls)
3605 	fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3606 		 n->name (), n->order);
3607       for (e = n->indirect_calls; e; e = e->next_callee)
3608 	if (e->indirect_info->polymorphic)
3609 	  {
3610 	    struct cgraph_node *likely_target = NULL;
3611 	    void *cache_token;
3612 	    bool final;
3613 
3614 	    if (final_warning_records)
3615 	      final_warning_records->dyn_count = e->count;
3616 
3617 	    vec <cgraph_node *>targets
3618 	       = possible_polymorphic_call_targets
3619 		    (e, &final, &cache_token, true);
3620 	    unsigned int i;
3621 
3622 	    /* Trigger warnings by calculating non-speculative targets.  */
3623 	    if (warn_suggest_final_methods || warn_suggest_final_types)
3624 	      possible_polymorphic_call_targets (e);
3625 
3626 	    if (dump_file)
3627 	      dump_possible_polymorphic_call_targets
3628 		(dump_file, e);
3629 
3630 	    npolymorphic++;
3631 
3632 	    /* See if the call can be devirtualized by means of ipa-prop's
3633 	       polymorphic call context propagation.  If not, we can just
3634 	       forget about this call being polymorphic and avoid some heavy
3635 	       lifting in remove_unreachable_nodes that will otherwise try to
3636 	       keep all possible targets alive until inlining and in the inliner
3637 	       itself.
3638 
3639 	       This may need to be revisited once we add further ways to use
3640 	       the may edges, but it is a resonable thing to do right now.  */
3641 
3642 	    if ((e->indirect_info->param_index == -1
3643 		|| (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3644 		    && e->indirect_info->vptr_changed))
3645 		&& !flag_ltrans_devirtualize)
3646 	      {
3647 		e->indirect_info->polymorphic = false;
3648 		ndropped++;
3649 	        if (dump_file)
3650 		  fprintf (dump_file, "Dropping polymorphic call info;"
3651 			   " it can not be used by ipa-prop\n");
3652 	      }
3653 
3654 	    if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3655 	      continue;
3656 
3657 	    if (!e->maybe_hot_p ())
3658 	      {
3659 		if (dump_file)
3660 		  fprintf (dump_file, "Call is cold\n\n");
3661 		ncold++;
3662 		continue;
3663 	      }
3664 	    if (e->speculative)
3665 	      {
3666 		if (dump_file)
3667 		  fprintf (dump_file, "Call is already speculated\n\n");
3668 		nspeculated++;
3669 
3670 		/* When dumping see if we agree with speculation.  */
3671 		if (!dump_file)
3672 		  continue;
3673 	      }
3674 	    if (bad_call_targets.contains (cache_token))
3675 	      {
3676 		if (dump_file)
3677 		  fprintf (dump_file, "Target list is known to be useless\n\n");
3678 		nmultiple++;
3679 		continue;
3680 	      }
3681 	    for (i = 0; i < targets.length (); i++)
3682 	      if (likely_target_p (targets[i]))
3683 		{
3684 		  if (likely_target)
3685 		    {
3686 		      likely_target = NULL;
3687 		      if (dump_file)
3688 			fprintf (dump_file, "More than one likely target\n\n");
3689 		      nmultiple++;
3690 		      break;
3691 		    }
3692 		  likely_target = targets[i];
3693 		}
3694 	    if (!likely_target)
3695 	      {
3696 		bad_call_targets.add (cache_token);
3697 	        continue;
3698 	      }
3699 	    /* This is reached only when dumping; check if we agree or disagree
3700  	       with the speculation.  */
3701 	    if (e->speculative)
3702 	      {
3703 		struct cgraph_edge *e2;
3704 		struct ipa_ref *ref;
3705 		e->speculative_call_info (e2, e, ref);
3706 		if (e2->callee->ultimate_alias_target ()
3707 		    == likely_target->ultimate_alias_target ())
3708 		  {
3709 		    fprintf (dump_file, "We agree with speculation\n\n");
3710 		    nok++;
3711 		  }
3712 		else
3713 		  {
3714 		    fprintf (dump_file, "We disagree with speculation\n\n");
3715 		    nwrong++;
3716 		  }
3717 		continue;
3718 	      }
3719 	    if (!likely_target->definition)
3720 	      {
3721 		if (dump_file)
3722 		  fprintf (dump_file, "Target is not a definition\n\n");
3723 		nnotdefined++;
3724 		continue;
3725 	      }
3726 	    /* Do not introduce new references to external symbols.  While we
3727 	       can handle these just well, it is common for programs to
3728 	       incorrectly with headers defining methods they are linked
3729 	       with.  */
3730 	    if (DECL_EXTERNAL (likely_target->decl))
3731 	      {
3732 		if (dump_file)
3733 		  fprintf (dump_file, "Target is external\n\n");
3734 		nexternal++;
3735 		continue;
3736 	      }
3737 	    /* Don't use an implicitly-declared destructor (c++/58678).  */
3738 	    struct cgraph_node *non_thunk_target
3739 	      = likely_target->function_symbol ();
3740 	    if (DECL_ARTIFICIAL (non_thunk_target->decl))
3741 	      {
3742 		if (dump_file)
3743 		  fprintf (dump_file, "Target is artificial\n\n");
3744 		nartificial++;
3745 		continue;
3746 	      }
3747 	    if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3748 		&& likely_target->can_be_discarded_p ())
3749 	      {
3750 		if (dump_file)
3751 		  fprintf (dump_file, "Target is overwritable\n\n");
3752 		noverwritable++;
3753 		continue;
3754 	      }
3755 	    else if (dbg_cnt (devirt))
3756 	      {
3757 		if (dump_enabled_p ())
3758                   {
3759                     location_t locus = gimple_location_safe (e->call_stmt);
3760                     dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3761                                      "speculatively devirtualizing call in %s/%i to %s/%i\n",
3762                                      n->name (), n->order,
3763                                      likely_target->name (),
3764                                      likely_target->order);
3765                   }
3766 		if (!likely_target->can_be_discarded_p ())
3767 		  {
3768 		    cgraph_node *alias;
3769 		    alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3770 		    if (alias)
3771 		      likely_target = alias;
3772 		  }
3773 		nconverted++;
3774 		update = true;
3775 		e->make_speculative
3776 		  (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3777 	      }
3778 	  }
3779       if (update)
3780 	inline_update_overall_summary (n);
3781     }
3782   if (warn_suggest_final_methods || warn_suggest_final_types)
3783     {
3784       if (warn_suggest_final_types)
3785 	{
3786 	  final_warning_records->type_warnings.qsort (type_warning_cmp);
3787 	  for (unsigned int i = 0;
3788 	       i < final_warning_records->type_warnings.length (); i++)
3789 	    if (final_warning_records->type_warnings[i].count)
3790 	      {
3791 	        tree type = final_warning_records->type_warnings[i].type;
3792 	        int count = final_warning_records->type_warnings[i].count;
3793 	        long long dyn_count
3794 		  = final_warning_records->type_warnings[i].dyn_count;
3795 
3796 		if (!dyn_count)
3797 		  warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3798 			     OPT_Wsuggest_final_types, count,
3799 			     "Declaring type %qD final "
3800 			     "would enable devirtualization of %i call",
3801 			     "Declaring type %qD final "
3802 			     "would enable devirtualization of %i calls",
3803 			     type,
3804 			     count);
3805 		else
3806 		  warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3807 			     OPT_Wsuggest_final_types, count,
3808 			     "Declaring type %qD final "
3809 			     "would enable devirtualization of %i call "
3810 			     "executed %lli times",
3811 			     "Declaring type %qD final "
3812 			     "would enable devirtualization of %i calls "
3813 			     "executed %lli times",
3814 			     type,
3815 			     count,
3816 			     dyn_count);
3817 	      }
3818 	}
3819 
3820       if (warn_suggest_final_methods)
3821 	{
3822 	  auto_vec<const decl_warn_count*> decl_warnings_vec;
3823 
3824 	  final_warning_records->decl_warnings.traverse
3825 	    <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3826 	  decl_warnings_vec.qsort (decl_warning_cmp);
3827 	  for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3828 	    {
3829 	      tree decl = decl_warnings_vec[i]->decl;
3830 	      int count = decl_warnings_vec[i]->count;
3831 	      long long dyn_count = decl_warnings_vec[i]->dyn_count;
3832 
3833 	      if (!dyn_count)
3834 		if (DECL_CXX_DESTRUCTOR_P (decl))
3835 		  warning_n (DECL_SOURCE_LOCATION (decl),
3836 			      OPT_Wsuggest_final_methods, count,
3837 			      "Declaring virtual destructor of %qD final "
3838 			      "would enable devirtualization of %i call",
3839 			      "Declaring virtual destructor of %qD final "
3840 			      "would enable devirtualization of %i calls",
3841 			      DECL_CONTEXT (decl), count);
3842 		else
3843 		  warning_n (DECL_SOURCE_LOCATION (decl),
3844 			      OPT_Wsuggest_final_methods, count,
3845 			      "Declaring method %qD final "
3846 			      "would enable devirtualization of %i call",
3847 			      "Declaring method %qD final "
3848 			      "would enable devirtualization of %i calls",
3849 			      decl, count);
3850 	       else if (DECL_CXX_DESTRUCTOR_P (decl))
3851 		  warning_n (DECL_SOURCE_LOCATION (decl),
3852 			      OPT_Wsuggest_final_methods, count,
3853 			      "Declaring virtual destructor of %qD final "
3854 			      "would enable devirtualization of %i call "
3855 			      "executed %lli times",
3856 			      "Declaring virtual destructor of %qD final "
3857 			      "would enable devirtualization of %i calls "
3858 			      "executed %lli times",
3859 			      DECL_CONTEXT (decl), count, dyn_count);
3860 		else
3861 		  warning_n (DECL_SOURCE_LOCATION (decl),
3862 			      OPT_Wsuggest_final_methods, count,
3863 			      "Declaring method %qD final "
3864 			      "would enable devirtualization of %i call "
3865 			      "executed %lli times",
3866 			      "Declaring method %qD final "
3867 			      "would enable devirtualization of %i calls "
3868 			      "executed %lli times",
3869 			      decl, count, dyn_count);
3870 	    }
3871 	}
3872 
3873       delete (final_warning_records);
3874       final_warning_records = 0;
3875     }
3876 
3877   if (dump_file)
3878     fprintf (dump_file,
3879 	     "%i polymorphic calls, %i devirtualized,"
3880 	     " %i speculatively devirtualized, %i cold\n"
3881 	     "%i have multiple targets, %i overwritable,"
3882 	     " %i already speculated (%i agree, %i disagree),"
3883 	     " %i external, %i not defined, %i artificial, %i infos dropped\n",
3884 	     npolymorphic, ndevirtualized, nconverted, ncold,
3885 	     nmultiple, noverwritable, nspeculated, nok, nwrong,
3886 	     nexternal, nnotdefined, nartificial, ndropped);
3887   return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3888 }
3889 
3890 namespace {
3891 
3892 const pass_data pass_data_ipa_devirt =
3893 {
3894   IPA_PASS, /* type */
3895   "devirt", /* name */
3896   OPTGROUP_NONE, /* optinfo_flags */
3897   TV_IPA_DEVIRT, /* tv_id */
3898   0, /* properties_required */
3899   0, /* properties_provided */
3900   0, /* properties_destroyed */
3901   0, /* todo_flags_start */
3902   ( TODO_dump_symtab ), /* todo_flags_finish */
3903 };
3904 
3905 class pass_ipa_devirt : public ipa_opt_pass_d
3906 {
3907 public:
3908   pass_ipa_devirt (gcc::context *ctxt)
3909     : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3910 		      NULL, /* generate_summary */
3911 		      NULL, /* write_summary */
3912 		      NULL, /* read_summary */
3913 		      NULL, /* write_optimization_summary */
3914 		      NULL, /* read_optimization_summary */
3915 		      NULL, /* stmt_fixup */
3916 		      0, /* function_transform_todo_flags_start */
3917 		      NULL, /* function_transform */
3918 		      NULL) /* variable_transform */
3919   {}
3920 
3921   /* opt_pass methods: */
3922   virtual bool gate (function *)
3923     {
3924       /* In LTO, always run the IPA passes and decide on function basis if the
3925 	 pass is enabled.  */
3926       if (in_lto_p)
3927 	return true;
3928       return (flag_devirtualize
3929 	      && (flag_devirtualize_speculatively
3930 		  || (warn_suggest_final_methods
3931 		      || warn_suggest_final_types))
3932 	      && optimize);
3933     }
3934 
3935   virtual unsigned int execute (function *) { return ipa_devirt (); }
3936 
3937 }; // class pass_ipa_devirt
3938 
3939 } // anon namespace
3940 
3941 ipa_opt_pass_d *
3942 make_pass_ipa_devirt (gcc::context *ctxt)
3943 {
3944   return new pass_ipa_devirt (ctxt);
3945 }
3946 
3947 #include "gt-ipa-devirt.h"
3948