xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-devirt.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Basic IPA utilities for type inheritance graph construction and
2    devirtualization.
3    Copyright (C) 2013-2015 Free Software Foundation, Inc.
4    Contributed by Jan Hubicka
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* Brief vocabulary:
23      ODR = One Definition Rule
24         In short, the ODR states that:
25 	1 In any translation unit, a template, type, function, or object can
26 	  have no more than one definition. Some of these can have any number
27 	  of declarations. A definition provides an instance.
28         2 In the entire program, an object or non-inline function cannot have
29 	  more than one definition; if an object or function is used, it must
30 	  have exactly one definition. You can declare an object or function
31 	  that is never used, in which case you don't have to provide
32 	  a definition. In no event can there be more than one definition.
33         3 Some things, like types, templates, and extern inline functions, can
34 	  be defined in more than one translation unit. For a given entity,
35 	  each definition must be the same. Non-extern objects and functions
36 	  in different translation units are different entities, even if their
37 	  names and types are the same.
38 
39      OTR = OBJ_TYPE_REF
40        This is the Gimple representation of type information of a polymorphic call.
41        It contains two parameters:
42 	 otr_type is a type of class whose method is called.
43 	 otr_token is the index into virtual table where address is taken.
44 
45      BINFO
46        This is the type inheritance information attached to each tree
47        RECORD_TYPE by the C++ frontend.  It provides information about base
48        types and virtual tables.
49 
50        BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51        BINFO also links to its type by BINFO_TYPE and to the virtual table by
52        BINFO_VTABLE.
53 
54        Base types of a given type are enumerated by BINFO_BASE_BINFO
55        vector.  Members of this vectors are not BINFOs associated
56        with a base type.  Rather they are new copies of BINFOs
57        (base BINFOs). Their virtual tables may differ from
58        virtual table of the base type.  Also BINFO_OFFSET specifies
59        offset of the base within the type.
60 
61        In the case of single inheritance, the virtual table is shared
62        and BINFO_VTABLE of base BINFO is NULL.  In the case of multiple
63        inheritance the individual virtual tables are pointer to by
64        BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65        binfo associated to the base type).
66 
67        BINFO lookup for a given base type and offset can be done by
68        get_binfo_at_offset.  It returns proper BINFO whose virtual table
69        can be used for lookup of virtual methods associated with the
70        base type.
71 
72      token
73        This is an index of virtual method in virtual table associated
74        to the type defining it. Token can be looked up from OBJ_TYPE_REF
75        or from DECL_VINDEX of a given virtual table.
76 
77      polymorphic (indirect) call
78        This is callgraph representation of virtual method call.  Every
79        polymorphic call contains otr_type and otr_token taken from
80        original OBJ_TYPE_REF at callgraph construction time.
81 
82    What we do here:
83 
84    build_type_inheritance_graph triggers a construction of the type inheritance
85    graph.
86 
87      We reconstruct it based on types of methods we see in the unit.
88      This means that the graph is not complete. Types with no methods are not
89      inserted into the graph.  Also types without virtual methods are not
90      represented at all, though it may be easy to add this.
91 
92      The inheritance graph is represented as follows:
93 
94        Vertices are structures odr_type.  Every odr_type may correspond
95        to one or more tree type nodes that are equivalent by ODR rule.
96        (the multiple type nodes appear only with linktime optimization)
97 
98        Edges are represented by odr_type->base and odr_type->derived_types.
99        At the moment we do not track offsets of types for multiple inheritance.
100        Adding this is easy.
101 
102   possible_polymorphic_call_targets returns, given an parameters found in
103   indirect polymorphic edge all possible polymorphic call targets of the call.
104 
105   pass_ipa_devirt performs simple speculative devirtualization.
106 */
107 
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "hash-set.h"
113 #include "machmode.h"
114 #include "hash-map.h"
115 #include "vec.h"
116 #include "double-int.h"
117 #include "input.h"
118 #include "alias.h"
119 #include "symtab.h"
120 #include "wide-int.h"
121 #include "inchash.h"
122 #include "tree.h"
123 #include "fold-const.h"
124 #include "print-tree.h"
125 #include "calls.h"
126 #include "predict.h"
127 #include "basic-block.h"
128 #include "is-a.h"
129 #include "plugin-api.h"
130 #include "hard-reg-set.h"
131 #include "function.h"
132 #include "ipa-ref.h"
133 #include "cgraph.h"
134 #include "hashtab.h"
135 #include "rtl.h"
136 #include "flags.h"
137 #include "statistics.h"
138 #include "real.h"
139 #include "fixed-value.h"
140 #include "insn-config.h"
141 #include "expmed.h"
142 #include "dojump.h"
143 #include "explow.h"
144 #include "emit-rtl.h"
145 #include "varasm.h"
146 #include "stmt.h"
147 #include "expr.h"
148 #include "tree-pass.h"
149 #include "target.h"
150 #include "hash-table.h"
151 #include "tree-pretty-print.h"
152 #include "ipa-utils.h"
153 #include "tree-ssa-alias.h"
154 #include "internal-fn.h"
155 #include "gimple-fold.h"
156 #include "gimple-expr.h"
157 #include "gimple.h"
158 #include "alloc-pool.h"
159 #include "symbol-summary.h"
160 #include "ipa-prop.h"
161 #include "ipa-inline.h"
162 #include "diagnostic.h"
163 #include "tree-dfa.h"
164 #include "demangle.h"
165 #include "dbgcnt.h"
166 #include "gimple-pretty-print.h"
167 #include "stor-layout.h"
168 #include "intl.h"
169 #include "streamer-hooks.h"
170 #include "lto-streamer.h"
171 
172 /* Hash based set of pairs of types.  */
173 typedef struct
174 {
175   tree first;
176   tree second;
177 } type_pair;
178 
179 struct pair_traits : default_hashset_traits
180 {
181   static hashval_t
182   hash (type_pair p)
183   {
184     return TYPE_UID (p.first) ^ TYPE_UID (p.second);
185   }
186   static bool
187   is_empty (type_pair p)
188   {
189     return p.first == NULL;
190   }
191   static bool
192   is_deleted (type_pair p ATTRIBUTE_UNUSED)
193     {
194       return false;
195     }
196   static bool
197   equal (const type_pair &a, const type_pair &b)
198     {
199       return a.first==b.first && a.second == b.second;
200     }
201   static void
202   mark_empty (type_pair &e)
203     {
204       e.first = NULL;
205     }
206 };
207 
208 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
209 				    hash_set<type_pair,pair_traits> *);
210 
211 static bool odr_violation_reported = false;
212 
213 
214 /* Pointer set of all call targets appearing in the cache.  */
215 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
216 
217 /* The node of type inheritance graph.  For each type unique in
218    One Definition Rule (ODR) sense, we produce one node linking all
219    main variants of types equivalent to it, bases and derived types.  */
220 
221 struct GTY(()) odr_type_d
222 {
223   /* leader type.  */
224   tree type;
225   /* All bases; built only for main variants of types.  */
226   vec<odr_type> GTY((skip)) bases;
227   /* All derived types with virtual methods seen in unit;
228      built only for main variants of types.  */
229   vec<odr_type> GTY((skip)) derived_types;
230 
231   /* All equivalent types, if more than one.  */
232   vec<tree, va_gc> *types;
233   /* Set of all equivalent types, if NON-NULL.  */
234   hash_set<tree> * GTY((skip)) types_set;
235 
236   /* Unique ID indexing the type in odr_types array.  */
237   int id;
238   /* Is it in anonymous namespace? */
239   bool anonymous_namespace;
240   /* Do we know about all derivations of given type?  */
241   bool all_derivations_known;
242   /* Did we report ODR violation here?  */
243   bool odr_violated;
244   /* Set when virtual table without RTTI previaled table with.  */
245   bool rtti_broken;
246 };
247 
248 /* Return TRUE if all derived types of T are known and thus
249    we may consider the walk of derived type complete.
250 
251    This is typically true only for final anonymous namespace types and types
252    defined within functions (that may be COMDAT and thus shared across units,
253    but with the same set of derived types).  */
254 
255 bool
256 type_all_derivations_known_p (const_tree t)
257 {
258   if (TYPE_FINAL_P (t))
259     return true;
260   if (flag_ltrans)
261     return false;
262   /* Non-C++ types may have IDENTIFIER_NODE here, do not crash.  */
263   if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
264     return true;
265   if (type_in_anonymous_namespace_p (t))
266     return true;
267   return (decl_function_context (TYPE_NAME (t)) != NULL);
268 }
269 
270 /* Return TRUE if type's constructors are all visible.  */
271 
272 static bool
273 type_all_ctors_visible_p (tree t)
274 {
275   return !flag_ltrans
276 	 && symtab->state >= CONSTRUCTION
277 	 /* We can not always use type_all_derivations_known_p.
278 	    For function local types we must assume case where
279 	    the function is COMDAT and shared in between units.
280 
281 	    TODO: These cases are quite easy to get, but we need
282 	    to keep track of C++ privatizing via -Wno-weak
283 	    as well as the  IPA privatizing.  */
284 	 && type_in_anonymous_namespace_p (t);
285 }
286 
287 /* Return TRUE if type may have instance.  */
288 
289 static bool
290 type_possibly_instantiated_p (tree t)
291 {
292   tree vtable;
293   varpool_node *vnode;
294 
295   /* TODO: Add abstract types here.  */
296   if (!type_all_ctors_visible_p (t))
297     return true;
298 
299   vtable = BINFO_VTABLE (TYPE_BINFO (t));
300   if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
301     vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
302   vnode = varpool_node::get (vtable);
303   return vnode && vnode->definition;
304 }
305 
306 /* Hash used to unify ODR types based on their mangled name and for anonymous
307    namespace types.  */
308 
309 struct odr_name_hasher
310 {
311   typedef odr_type_d value_type;
312   typedef union tree_node compare_type;
313   static inline hashval_t hash (const value_type *);
314   static inline bool equal (const value_type *, const compare_type *);
315   static inline void remove (value_type *);
316 };
317 
318 /* Has used to unify ODR types based on their associated virtual table.
319    This hash is needed to keep -fno-lto-odr-type-merging to work and contains
320    only polymorphic types.  Types with mangled names are inserted to both.  */
321 
322 struct odr_vtable_hasher:odr_name_hasher
323 {
324   static inline hashval_t hash (const value_type *);
325   static inline bool equal (const value_type *, const compare_type *);
326 };
327 
328 /* Return type that was declared with T's name so that T is an
329    qualified variant of it.  */
330 
331 static inline tree
332 main_odr_variant (const_tree t)
333 {
334   if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
335     return TREE_TYPE (TYPE_NAME (t));
336   /* Unnamed types and non-C++ produced types can be compared by variants.  */
337   else
338     return TYPE_MAIN_VARIANT (t);
339 }
340 
341 static bool
342 can_be_name_hashed_p (tree t)
343 {
344   return (!in_lto_p || type_in_anonymous_namespace_p (t)
345 	  || (TYPE_NAME (t) && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t))));
346 }
347 
348 /* Hash type by its ODR name.  */
349 
350 static hashval_t
351 hash_odr_name (const_tree t)
352 {
353   gcc_checking_assert (main_odr_variant (t) == t);
354 
355   /* If not in LTO, all main variants are unique, so we can do
356      pointer hash.  */
357   if (!in_lto_p)
358     return htab_hash_pointer (t);
359 
360   /* Anonymous types are unique.  */
361   if (type_in_anonymous_namespace_p (t))
362     return htab_hash_pointer (t);
363 
364   gcc_checking_assert (TYPE_NAME (t)
365 		       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
366   return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
367 }
368 
369 /* Return the computed hashcode for ODR_TYPE.  */
370 
371 inline hashval_t
372 odr_name_hasher::hash (const value_type *odr_type)
373 {
374   return hash_odr_name (odr_type->type);
375 }
376 
377 static bool
378 can_be_vtable_hashed_p (tree t)
379 {
380   /* vtable hashing can distinguish only main variants.  */
381   if (TYPE_MAIN_VARIANT (t) != t)
382     return false;
383   /* Anonymous namespace types are always handled by name hash.  */
384   if (type_in_anonymous_namespace_p (t))
385     return false;
386   return (TREE_CODE (t) == RECORD_TYPE
387 	  && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
388 }
389 
390 /* Hash type by assembler name of its vtable.  */
391 
392 static hashval_t
393 hash_odr_vtable (const_tree t)
394 {
395   tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
396   inchash::hash hstate;
397 
398   gcc_checking_assert (in_lto_p);
399   gcc_checking_assert (!type_in_anonymous_namespace_p (t));
400   gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
401 		       && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
402   gcc_checking_assert (main_odr_variant (t) == t);
403 
404   if (TREE_CODE (v) == POINTER_PLUS_EXPR)
405     {
406       add_expr (TREE_OPERAND (v, 1), hstate);
407       v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
408     }
409 
410   hstate.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
411   return hstate.end ();
412 }
413 
414 /* Return the computed hashcode for ODR_TYPE.  */
415 
416 inline hashval_t
417 odr_vtable_hasher::hash (const value_type *odr_type)
418 {
419   return hash_odr_vtable (odr_type->type);
420 }
421 
422 /* For languages with One Definition Rule, work out if
423    types are the same based on their name.
424 
425    This is non-trivial for LTO where minor differences in
426    the type representation may have prevented type merging
427    to merge two copies of otherwise equivalent type.
428 
429    Until we start streaming mangled type names, this function works
430    only for polymorphic types.
431 
432    When STRICT is true, we compare types by their names for purposes of
433    ODR violation warnings.  When strict is false, we consider variants
434    equivalent, becuase it is all that matters for devirtualization machinery.
435 */
436 
437 bool
438 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
439 {
440   gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
441 
442   type1 = main_odr_variant (type1);
443   type2 = main_odr_variant (type2);
444   if (!strict)
445     {
446       type1 = TYPE_MAIN_VARIANT (type1);
447       type2 = TYPE_MAIN_VARIANT (type2);
448     }
449 
450   if (type1 == type2)
451     return true;
452 
453   if (!in_lto_p)
454     return false;
455 
456   /* Check for anonymous namespaces. Those have !TREE_PUBLIC
457      on the corresponding TYPE_STUB_DECL.  */
458   if (type_in_anonymous_namespace_p (type1)
459       || type_in_anonymous_namespace_p (type2))
460     return false;
461 
462 
463   /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
464 
465      Ideally we should never need types without ODR names here.  It can however
466      happen in two cases:
467 
468        1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
469           Here testing for equivalence is safe, since their MAIN_VARIANTs are
470           unique.
471        2) for units streamed with -fno-lto-odr-type-merging.  Here we can't
472 	  establish precise ODR equivalency, but for correctness we care only
473 	  about equivalency on complete polymorphic types.  For these we can
474 	  compare assembler names of their virtual tables.  */
475   if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
476       || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
477     {
478       /* See if types are obviously different (i.e. different codes
479 	 or polymorphic wrt non-polymorphic).  This is not strictly correct
480 	 for ODR violating programs, but we can't do better without streaming
481 	 ODR names.  */
482       if (TREE_CODE (type1) != TREE_CODE (type2))
483 	return false;
484       if (TREE_CODE (type1) == RECORD_TYPE
485 	  && (TYPE_BINFO (type1) == NULL_TREE)
486 	      != (TYPE_BINFO (type2) == NULL_TREE))
487 	return false;
488       if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
489 	  && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
490 	     != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
491 	return false;
492 
493       /* At the moment we have no way to establish ODR equivalence at LTO
494 	 other than comparing virtual table pointers of polymorphic types.
495 	 Eventually we should start saving mangled names in TYPE_NAME.
496 	 Then this condition will become non-trivial.  */
497 
498       if (TREE_CODE (type1) == RECORD_TYPE
499 	  && TYPE_BINFO (type1) && TYPE_BINFO (type2)
500 	  && BINFO_VTABLE (TYPE_BINFO (type1))
501 	  && BINFO_VTABLE (TYPE_BINFO (type2)))
502 	{
503 	  tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
504 	  tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
505 	  gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
506 		      && TREE_CODE (v2) == POINTER_PLUS_EXPR);
507 	  return (operand_equal_p (TREE_OPERAND (v1, 1),
508 				   TREE_OPERAND (v2, 1), 0)
509 		  && DECL_ASSEMBLER_NAME
510 			 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
511 		     == DECL_ASSEMBLER_NAME
512 			 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
513 	}
514       gcc_unreachable ();
515     }
516   return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
517 	  == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
518 }
519 
520 /* Return true if we can decide on ODR equivalency.
521 
522    In non-LTO it is always decide, in LTO however it depends in the type has
523    ODR info attached.
524 
525    When STRICT is false, compare main variants.  */
526 
527 bool
528 types_odr_comparable (tree t1, tree t2, bool strict)
529 {
530   return (!in_lto_p
531 	  || (strict ? main_odr_variant (t1) == main_odr_variant (t2)
532 	      : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
533 	  || (odr_type_p (t1) && odr_type_p (t2))
534 	  || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
535 	      && TYPE_BINFO (t1) && TYPE_BINFO (t2)
536 	      && polymorphic_type_binfo_p (TYPE_BINFO (t1))
537 	      && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
538 }
539 
540 /* Return true if T1 and T2 are ODR equivalent.  If ODR equivalency is not
541    known, be conservative and return false.  */
542 
543 bool
544 types_must_be_same_for_odr (tree t1, tree t2)
545 {
546   if (types_odr_comparable (t1, t2))
547     return types_same_for_odr (t1, t2);
548   else
549     return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
550 }
551 
552 /* Compare types T1 and T2 and return true if they are
553    equivalent.  */
554 
555 inline bool
556 odr_name_hasher::equal (const value_type *o1, const compare_type *t2)
557 {
558   tree t1 = o1->type;
559 
560   gcc_checking_assert (main_odr_variant (t2) == t2);
561   gcc_checking_assert (main_odr_variant (t1) == t1);
562   if (t1 == t2)
563     return true;
564   if (!in_lto_p)
565     return false;
566   /* Check for anonymous namespaces. Those have !TREE_PUBLIC
567      on the corresponding TYPE_STUB_DECL.  */
568   if (type_in_anonymous_namespace_p (t1)
569       || type_in_anonymous_namespace_p (t2))
570     return false;
571   gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
572   gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
573   return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
574 	  == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
575 }
576 
577 /* Compare types T1 and T2 and return true if they are
578    equivalent.  */
579 
580 inline bool
581 odr_vtable_hasher::equal (const value_type *o1, const compare_type *t2)
582 {
583   tree t1 = o1->type;
584 
585   gcc_checking_assert (main_odr_variant (t2) == t2);
586   gcc_checking_assert (main_odr_variant (t1) == t1);
587   gcc_checking_assert (in_lto_p);
588   t1 = TYPE_MAIN_VARIANT (t1);
589   t2 = TYPE_MAIN_VARIANT (t2);
590   if (t1 == t2)
591     return true;
592   tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
593   tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
594   return (operand_equal_p (TREE_OPERAND (v1, 1),
595 			   TREE_OPERAND (v2, 1), 0)
596 	  && DECL_ASSEMBLER_NAME
597 		 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
598 	     == DECL_ASSEMBLER_NAME
599 		 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
600 }
601 
602 /* Free ODR type V.  */
603 
604 inline void
605 odr_name_hasher::remove (value_type *v)
606 {
607   v->bases.release ();
608   v->derived_types.release ();
609   if (v->types_set)
610     delete v->types_set;
611   ggc_free (v);
612 }
613 
614 /* ODR type hash used to look up ODR type based on tree type node.  */
615 
616 typedef hash_table<odr_name_hasher> odr_hash_type;
617 static odr_hash_type *odr_hash;
618 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
619 static odr_vtable_hash_type *odr_vtable_hash;
620 
621 /* ODR types are also stored into ODR_TYPE vector to allow consistent
622    walking.  Bases appear before derived types.  Vector is garbage collected
623    so we won't end up visiting empty types.  */
624 
625 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
626 #define odr_types (*odr_types_ptr)
627 
628 /* Set TYPE_BINFO of TYPE and its variants to BINFO.  */
629 void
630 set_type_binfo (tree type, tree binfo)
631 {
632   for (; type; type = TYPE_NEXT_VARIANT (type))
633     if (COMPLETE_TYPE_P (type))
634       TYPE_BINFO (type) = binfo;
635     else
636       gcc_assert (!TYPE_BINFO (type));
637 }
638 
639 /* Compare T2 and T2 based on name or structure.  */
640 
641 static bool
642 odr_subtypes_equivalent_p (tree t1, tree t2,
643 			   hash_set<type_pair,pair_traits> *visited)
644 {
645   bool an1, an2;
646 
647   /* This can happen in incomplete types that should be handled earlier.  */
648   gcc_assert (t1 && t2);
649 
650   t1 = main_odr_variant (t1);
651   t2 = main_odr_variant (t2);
652   if (t1 == t2)
653     return true;
654 
655   /* Anonymous namespace types must match exactly.  */
656   an1 = type_in_anonymous_namespace_p (t1);
657   an2 = type_in_anonymous_namespace_p (t2);
658   if (an1 != an2 || an1)
659     return false;
660 
661   /* For ODR types be sure to compare their names.
662      To support -wno-odr-type-merging we allow one type to be non-ODR
663      and other ODR even though it is a violation.  */
664   if (types_odr_comparable (t1, t2, true))
665     {
666       if (!types_same_for_odr (t1, t2, true))
667         return false;
668       /* Limit recursion: If subtypes are ODR types and we know
669          that they are same, be happy.  */
670       if (!get_odr_type (t1, true)->odr_violated)
671         return true;
672     }
673 
674   /* Component types, builtins and possibly violating ODR types
675      have to be compared structurally.  */
676   if (TREE_CODE (t1) != TREE_CODE (t2))
677     return false;
678   if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
679     return false;
680 
681   type_pair pair={t1,t2};
682   if (TYPE_UID (t1) > TYPE_UID (t2))
683     {
684       pair.first = t2;
685       pair.second = t1;
686     }
687   if (visited->add (pair))
688     return true;
689   return odr_types_equivalent_p (t1, t2, false, NULL, visited);
690 }
691 
692 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
693    violation warnings.  */
694 
695 void
696 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
697 {
698   int n1, n2;
699 
700   if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
701     {
702       odr_violation_reported = true;
703       if (DECL_VIRTUAL_P (prevailing->decl))
704 	{
705 	  varpool_node *tmp = prevailing;
706 	  prevailing = vtable;
707 	  vtable = tmp;
708 	}
709       if (warning_at (DECL_SOURCE_LOCATION
710 			(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
711 		      OPT_Wodr,
712 		      "virtual table of type %qD violates one definition rule",
713 		      DECL_CONTEXT (vtable->decl)))
714 	inform (DECL_SOURCE_LOCATION (prevailing->decl),
715 		"variable of same assembler name as the virtual table is "
716 		"defined in another translation unit");
717       return;
718     }
719   if (!prevailing->definition || !vtable->definition)
720     return;
721 
722   /* If we do not stream ODR type info, do not bother to do useful compare.  */
723   if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
724       || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
725     return;
726 
727   odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
728 
729   if (class_type->odr_violated)
730     return;
731 
732   for (n1 = 0, n2 = 0; true; n1++, n2++)
733     {
734       struct ipa_ref *ref1, *ref2;
735       bool end1, end2;
736 
737       end1 = !prevailing->iterate_reference (n1, ref1);
738       end2 = !vtable->iterate_reference (n2, ref2);
739 
740       /* !DECL_VIRTUAL_P means RTTI entry;
741 	 We warn when RTTI is lost because non-RTTI previals; we silently
742 	 accept the other case.  */
743       while (!end2
744 	     && (end1
745 	         || (DECL_ASSEMBLER_NAME (ref1->referred->decl)
746 		     != DECL_ASSEMBLER_NAME (ref2->referred->decl)
747 	             && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
748 	     && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
749 	{
750 	  if (!class_type->rtti_broken
751 	      && warning_at (DECL_SOURCE_LOCATION
752 			      (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
753 			     OPT_Wodr,
754 			     "virtual table of type %qD contains RTTI "
755 			     "information",
756 			     DECL_CONTEXT (vtable->decl)))
757 	    {
758 	      inform (DECL_SOURCE_LOCATION
759 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
760 		      "but is prevailed by one without from other translation "
761 		      "unit");
762 	      inform (DECL_SOURCE_LOCATION
763 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
764 		      "RTTI will not work on this type");
765 	      class_type->rtti_broken = true;
766 	    }
767 	  n2++;
768           end2 = !vtable->iterate_reference (n2, ref2);
769 	}
770       while (!end1
771 	     && (end2
772 	         || (DECL_ASSEMBLER_NAME (ref2->referred->decl)
773 		     != DECL_ASSEMBLER_NAME (ref1->referred->decl)
774 	             && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
775 	     && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
776 	{
777 	  n1++;
778           end1 = !prevailing->iterate_reference (n1, ref1);
779 	}
780 
781       /* Finished?  */
782       if (end1 && end2)
783 	{
784 	  /* Extra paranoia; compare the sizes.  We do not have information
785 	     about virtual inheritance offsets, so just be sure that these
786 	     match.
787 	     Do this as very last check so the not very informative error
788 	     is not output too often.  */
789 	  if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
790 	    {
791 	      class_type->odr_violated = true;
792 	      if (warning_at (DECL_SOURCE_LOCATION
793 				(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
794 			      OPT_Wodr,
795 			      "virtual table of type %qD violates "
796 			      "one definition rule  ",
797 			      DECL_CONTEXT (vtable->decl)))
798 		{
799 		  inform (DECL_SOURCE_LOCATION
800 			    (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
801 			  "the conflicting type defined in another translation "
802 			  "unit has virtual table of different size");
803 		}
804 	    }
805 	  return;
806 	}
807 
808       if (!end1 && !end2)
809 	{
810 	  if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
811 	      == DECL_ASSEMBLER_NAME (ref2->referred->decl))
812 	    continue;
813 
814 	  class_type->odr_violated = true;
815 
816 	  /* If the loops above stopped on non-virtual pointer, we have
817 	     mismatch in RTTI information mangling.  */
818 	  if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
819 	      && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
820 	    {
821 	      if (warning_at (DECL_SOURCE_LOCATION
822 				(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
823 			      OPT_Wodr,
824 			      "virtual table of type %qD violates "
825 			      "one definition rule  ",
826 			      DECL_CONTEXT (vtable->decl)))
827 		{
828 		  inform (DECL_SOURCE_LOCATION
829 			    (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
830 			  "the conflicting type defined in another translation "
831 			  "unit with different RTTI information");
832 		}
833 	      return;
834 	    }
835 	  /* At this point both REF1 and REF2 points either to virtual table
836 	     or virtual method.  If one points to virtual table and other to
837 	     method we can complain the same way as if one table was shorter
838 	     than other pointing out the extra method.  */
839 	  if (TREE_CODE (ref1->referred->decl)
840 	      != TREE_CODE (ref2->referred->decl))
841 	    {
842 	      if (TREE_CODE (ref1->referred->decl) == VAR_DECL)
843 		end1 = true;
844 	      else if (TREE_CODE (ref2->referred->decl) == VAR_DECL)
845 		end2 = true;
846 	    }
847 	}
848 
849       class_type->odr_violated = true;
850 
851       /* Complain about size mismatch.  Either we have too many virutal
852  	 functions or too many virtual table pointers.  */
853       if (end1 || end2)
854 	{
855 	  if (end1)
856 	    {
857 	      varpool_node *tmp = prevailing;
858 	      prevailing = vtable;
859 	      vtable = tmp;
860 	      ref1 = ref2;
861 	    }
862 	  if (warning_at (DECL_SOURCE_LOCATION
863 			    (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
864 			  OPT_Wodr,
865 			  "virtual table of type %qD violates "
866 			  "one definition rule",
867 			  DECL_CONTEXT (vtable->decl)))
868 	    {
869 	      if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
870 		{
871 		  inform (DECL_SOURCE_LOCATION
872 			   (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
873 			  "the conflicting type defined in another translation "
874 			  "unit");
875 		  inform (DECL_SOURCE_LOCATION
876 			    (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
877 			  "contains additional virtual method %qD",
878 			  ref1->referred->decl);
879 		}
880 	      else
881 		{
882 		  inform (DECL_SOURCE_LOCATION
883 			   (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
884 			  "the conflicting type defined in another translation "
885 			  "unit has virtual table table with more entries");
886 		}
887 	    }
888 	  return;
889 	}
890 
891       /* And in the last case we have either mistmatch in between two virtual
892 	 methods or two virtual table pointers.  */
893       if (warning_at (DECL_SOURCE_LOCATION
894 			(TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
895 		      "virtual table of type %qD violates "
896 		      "one definition rule  ",
897 		      DECL_CONTEXT (vtable->decl)))
898 	{
899 	  if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
900 	    {
901 	      inform (DECL_SOURCE_LOCATION
902 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
903 		      "the conflicting type defined in another translation "
904 		      "unit");
905 	      gcc_assert (TREE_CODE (ref2->referred->decl)
906 			  == FUNCTION_DECL);
907 	      inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
908 		      "virtual method %qD", ref1->referred->decl);
909 	      inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
910 		      "ought to match virtual method %qD but does not",
911 		      ref2->referred->decl);
912 	    }
913 	  else
914 	    inform (DECL_SOURCE_LOCATION
915 		      (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
916 		    "the conflicting type defined in another translation "
917 		    "unit has virtual table table with different contents");
918 	  return;
919 	}
920     }
921 }
922 
923 /* Output ODR violation warning about T1 and T2 with REASON.
924    Display location of ST1 and ST2 if REASON speaks about field or
925    method of the type.
926    If WARN is false, do nothing. Set WARNED if warning was indeed
927    output.  */
928 
929 void
930 warn_odr (tree t1, tree t2, tree st1, tree st2,
931 	  bool warn, bool *warned, const char *reason)
932 {
933   tree decl2 = TYPE_NAME (t2);
934   if (warned)
935     *warned = false;
936 
937   if (!warn || !TYPE_NAME(t1))
938     return;
939 
940   /* ODR warnings are output druing LTO streaming; we must apply location
941      cache for potential warnings to be output correctly.  */
942   if (lto_location_cache::current_cache)
943     lto_location_cache::current_cache->apply_location_cache ();
944 
945   if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
946 		   "type %qT violates one definition rule",
947 		   t1))
948     return;
949   if (!st1 && !st2)
950     ;
951   /* For FIELD_DECL support also case where one of fields is
952      NULL - this is used when the structures have mismatching number of
953      elements.  */
954   else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
955     {
956       inform (DECL_SOURCE_LOCATION (decl2),
957 	      "a different type is defined in another translation unit");
958       if (!st1)
959 	{
960 	  st1 = st2;
961 	  st2 = NULL;
962 	}
963       inform (DECL_SOURCE_LOCATION (st1),
964 	      "the first difference of corresponding definitions is field %qD",
965 	      st1);
966       if (st2)
967         decl2 = st2;
968     }
969   else if (TREE_CODE (st1) == FUNCTION_DECL)
970     {
971       inform (DECL_SOURCE_LOCATION (decl2),
972 	      "a different type is defined in another translation unit");
973       inform (DECL_SOURCE_LOCATION (st1),
974 	      "the first difference of corresponding definitions is method %qD",
975 	      st1);
976       decl2 = st2;
977     }
978   else
979     return;
980   inform (DECL_SOURCE_LOCATION (decl2), reason);
981 
982   if (warned)
983     *warned = true;
984 }
985 
986 /* We already warned about ODR mismatch.  T1 and T2 ought to be equivalent
987    because they are used on same place in ODR matching types.
988    They are not; inform the user.  */
989 
990 void
991 warn_types_mismatch (tree t1, tree t2)
992 {
993   /* If types have names and they are different, it is most informative to
994      output those.  */
995   if (TYPE_NAME (t1) && TYPE_NAME (t2)
996       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
997       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
998       && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
999 	 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
1000     {
1001       char *name1 = xstrdup (cplus_demangle
1002 	 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1003 	  DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1004       char *name2 = cplus_demangle
1005 	 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1006 	  DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1007       if (name1 && name2 && strcmp (name1, name2))
1008 	{
1009 	  inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1010 		  "type name %<%s%> should match type name %<%s%>",
1011 		  name1, name2);
1012 	  inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1013 		  "the incompatible type is defined here");
1014 	  free (name1);
1015 	  return;
1016 	}
1017       free (name1);
1018     }
1019   /* It is a quite common bug to reference anonymous namespace type in
1020      non-anonymous namespace class.  */
1021   if (type_in_anonymous_namespace_p (t1)
1022       || type_in_anonymous_namespace_p (t2))
1023     {
1024       if (!type_in_anonymous_namespace_p (t1))
1025 	{
1026 	  tree tmp = t1;;
1027 	  t1 = t2;
1028 	  t2 = tmp;
1029 	}
1030       if (TYPE_NAME (t1) && TYPE_NAME (t2))
1031 	{
1032 	  inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1033 		  "type %qT defined in anonymous namespace can not match "
1034 		  "type %qT",
1035 		  t1, t2);
1036 	  inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1037 		  "the incompatible type defined in anonymous namespace in "
1038 		  "another translation unit");
1039 	}
1040       else
1041 	inform (UNKNOWN_LOCATION,
1042 		"types in anonymous namespace does not match across "
1043 		"translation unit boundary");
1044       return;
1045     }
1046   /* A tricky case are component types.  Often they appear the same in source
1047      code and the mismatch is dragged in by type they are build from.
1048      Look for those differences in subtypes and try to be informative.  In other
1049      cases just output nothing because the source code is probably different
1050      and in this case we already output a all necessary info.  */
1051   if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1052     {
1053       if (TREE_CODE (t1) == TREE_CODE (t2))
1054 	{
1055 	  hash_set<type_pair,pair_traits> visited;
1056 	  if (TREE_CODE (t1) == ARRAY_TYPE
1057 	      && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1058 	    {
1059 	      tree i1 = TYPE_DOMAIN (t1);
1060 	      tree i2 = TYPE_DOMAIN (t2);
1061 
1062 	      if (i1 && i2
1063 		  && TYPE_MAX_VALUE (i1)
1064 		  && TYPE_MAX_VALUE (i2)
1065 		  && !operand_equal_p (TYPE_MAX_VALUE (i1),
1066 				       TYPE_MAX_VALUE (i2), 0))
1067 		{
1068 		  inform (UNKNOWN_LOCATION,
1069 			  "array types have different bounds");
1070 		  return;
1071 		}
1072 	    }
1073 	  if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1074 	      && !odr_subtypes_equivalent_p (TREE_TYPE (t1),
1075 					     TREE_TYPE (t2),
1076 					     &visited))
1077 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1078 	  else if (TREE_CODE (t1) == METHOD_TYPE
1079 		   || TREE_CODE (t1) == FUNCTION_TYPE)
1080 	    {
1081 	      tree parms1, parms2;
1082 	      int count = 1;
1083 
1084 	      if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1085 					      &visited))
1086 		{
1087 		  inform (UNKNOWN_LOCATION, "return value type mismatch");
1088 		  warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1089 		  return;
1090 		}
1091 	      for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1092 		   parms1 && parms2;
1093 		   parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1094 		   count++)
1095 		{
1096 		  if (!odr_subtypes_equivalent_p
1097 		      (TREE_VALUE (parms1), TREE_VALUE (parms2), &visited))
1098 		    {
1099 		      inform (UNKNOWN_LOCATION,
1100 			      "type mismatch in parameter %i", count);
1101 		      warn_types_mismatch (TREE_VALUE (parms1),
1102 					   TREE_VALUE (parms2));
1103 		      return;
1104 		    }
1105 		}
1106 	      if (parms1 || parms2)
1107 		{
1108 		  inform (UNKNOWN_LOCATION,
1109 			  "types have different parameter counts");
1110 		  return;
1111 		}
1112 	    }
1113 	}
1114       return;
1115     }
1116   /* This should not happen but if it does, the warning would not be helpful.
1117      TODO: turn it into assert next stage1.  */
1118   if (TYPE_NAME (t1) == TYPE_NAME (t2))
1119     return;
1120   /* In Firefox it is a common bug to have same types but in
1121      different namespaces.  Be a bit more informative on
1122      this.  */
1123   if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
1124       && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
1125 	    != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
1126 	   || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
1127 	       && (DECL_NAME (TYPE_CONTEXT (t1)) !=
1128 		   DECL_NAME (TYPE_CONTEXT (t2))))))
1129     inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1130 	    "type %qT should match type %qT but is defined "
1131 	    "in different namespace  ",
1132 	    t1, t2);
1133   else if (types_odr_comparable (t1, t2, true)
1134 	   && types_same_for_odr (t1, t2, true))
1135     inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1136 	    "type %qT should match type %qT that itself violate "
1137 	    "one definition rule",
1138 	    t1, t2);
1139   else
1140     inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1141 	    "type %qT should match type %qT",
1142 	    t1, t2);
1143   if (DECL_SOURCE_LOCATION (TYPE_NAME (t2)) > BUILTINS_LOCATION)
1144     inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1145 	    "the incompatible type is defined here");
1146 }
1147 
1148 /* Compare T1 and T2, report ODR violations if WARN is true and set
1149    WARNED to true if anything is reported.  Return true if types match.
1150    If true is returned, the types are also compatible in the sense of
1151    gimple_canonical_types_compatible_p.  */
1152 
1153 static bool
1154 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1155 			hash_set<type_pair,pair_traits> *visited)
1156 {
1157   /* Check first for the obvious case of pointer identity.  */
1158   if (t1 == t2)
1159     return true;
1160   gcc_assert (!type_in_anonymous_namespace_p (t1));
1161   gcc_assert (!type_in_anonymous_namespace_p (t2));
1162 
1163   /* Can't be the same type if the types don't have the same code.  */
1164   if (TREE_CODE (t1) != TREE_CODE (t2))
1165     {
1166       warn_odr (t1, t2, NULL, NULL, warn, warned,
1167 	        G_("a different type is defined in another translation unit"));
1168       return false;
1169     }
1170 
1171   if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1172     {
1173       warn_odr (t1, t2, NULL, NULL, warn, warned,
1174 	        G_("a type with different qualifiers is defined in another "
1175 		   "translation unit"));
1176       return false;
1177     }
1178 
1179   if (comp_type_attributes (t1, t2) != 1)
1180     {
1181       warn_odr (t1, t2, NULL, NULL, warn, warned,
1182 	        G_("a type with attributes "
1183 		   "is defined in another translation unit"));
1184       return false;
1185     }
1186 
1187   if (TREE_CODE (t1) == ENUMERAL_TYPE
1188       && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1189     {
1190       tree v1, v2;
1191       for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1192 	   v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1193 	{
1194 	  if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1195 	    {
1196 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1197 			G_("an enum with different value name"
1198 			   " is defined in another translation unit"));
1199 	      return false;
1200 	    }
1201 	  if (TREE_VALUE (v1) != TREE_VALUE (v2)
1202 	      && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1203 				   DECL_INITIAL (TREE_VALUE (v2)), 0))
1204 	    {
1205 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1206 			G_("an enum with different values is defined"
1207 			   " in another translation unit"));
1208 	      return false;
1209 	    }
1210 	}
1211       if (v1 || v2)
1212 	{
1213 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1214 		    G_("an enum with mismatching number of values "
1215 		       "is defined in another translation unit"));
1216 	  return false;
1217 	}
1218     }
1219 
1220   /* Non-aggregate types can be handled cheaply.  */
1221   if (INTEGRAL_TYPE_P (t1)
1222       || SCALAR_FLOAT_TYPE_P (t1)
1223       || FIXED_POINT_TYPE_P (t1)
1224       || TREE_CODE (t1) == VECTOR_TYPE
1225       || TREE_CODE (t1) == COMPLEX_TYPE
1226       || TREE_CODE (t1) == OFFSET_TYPE
1227       || POINTER_TYPE_P (t1))
1228     {
1229       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1230 	{
1231 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1232 		    G_("a type with different precision is defined "
1233 		       "in another translation unit"));
1234 	  return false;
1235 	}
1236       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1237 	{
1238 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1239 		    G_("a type with different signedness is defined "
1240 		       "in another translation unit"));
1241 	  return false;
1242 	}
1243 
1244       if (TREE_CODE (t1) == INTEGER_TYPE
1245 	  && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1246 	{
1247 	  /* char WRT uint_8?  */
1248 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1249 		    G_("a different type is defined in another "
1250 		       "translation unit"));
1251 	  return false;
1252 	}
1253 
1254       /* For canonical type comparisons we do not want to build SCCs
1255 	 so we cannot compare pointed-to types.  But we can, for now,
1256 	 require the same pointed-to type kind and match what
1257 	 useless_type_conversion_p would do.  */
1258       if (POINTER_TYPE_P (t1))
1259 	{
1260 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1261 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1262 	    {
1263 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1264 			G_("it is defined as a pointer in different address "
1265 			   "space in another translation unit"));
1266 	      return false;
1267 	    }
1268 
1269 	  if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1270 	    {
1271 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1272 			G_("it is defined as a pointer to different type "
1273 			   "in another translation unit"));
1274 	      if (warn && warned)
1275 	        warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1276 	      return false;
1277 	    }
1278 	}
1279 
1280       if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1281 	  && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1282 	{
1283 	  /* Probably specific enough.  */
1284 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1285 		    G_("a different type is defined "
1286 		       "in another translation unit"));
1287 	  if (warn && warned)
1288 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1289 	  return false;
1290 	}
1291     }
1292   /* Do type-specific comparisons.  */
1293   else switch (TREE_CODE (t1))
1294     {
1295     case ARRAY_TYPE:
1296       {
1297 	/* Array types are the same if the element types are the same and
1298 	   the number of elements are the same.  */
1299 	if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1300 	  {
1301 	    warn_odr (t1, t2, NULL, NULL, warn, warned,
1302 		      G_("a different type is defined in another "
1303 			 "translation unit"));
1304 	    if (warn && warned)
1305 	      warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1306 	  }
1307 	gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1308 	gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1309 		    == TYPE_NONALIASED_COMPONENT (t2));
1310 
1311 	tree i1 = TYPE_DOMAIN (t1);
1312 	tree i2 = TYPE_DOMAIN (t2);
1313 
1314 	/* For an incomplete external array, the type domain can be
1315 	   NULL_TREE.  Check this condition also.  */
1316 	if (i1 == NULL_TREE || i2 == NULL_TREE)
1317 	  return true;
1318 
1319 	tree min1 = TYPE_MIN_VALUE (i1);
1320 	tree min2 = TYPE_MIN_VALUE (i2);
1321 	tree max1 = TYPE_MAX_VALUE (i1);
1322 	tree max2 = TYPE_MAX_VALUE (i2);
1323 
1324 	/* In C++, minimums should be always 0.  */
1325 	gcc_assert (min1 == min2);
1326 	if (!operand_equal_p (max1, max2, 0))
1327 	  {
1328 	    warn_odr (t1, t2, NULL, NULL, warn, warned,
1329 		      G_("an array of different size is defined "
1330 			 "in another translation unit"));
1331 	    return false;
1332 	  }
1333       }
1334     break;
1335 
1336     case METHOD_TYPE:
1337     case FUNCTION_TYPE:
1338       /* Function types are the same if the return type and arguments types
1339 	 are the same.  */
1340       if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1341 	{
1342 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1343 		    G_("has different return value "
1344 		       "in another translation unit"));
1345 	  if (warn && warned)
1346 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1347 	  return false;
1348 	}
1349 
1350       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
1351 	return true;
1352       else
1353 	{
1354 	  tree parms1, parms2;
1355 
1356 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1357 	       parms1 && parms2;
1358 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1359 	    {
1360 	      if (!odr_subtypes_equivalent_p
1361 		     (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
1362 		{
1363 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1364 			    G_("has different parameters in another "
1365 			       "translation unit"));
1366 		  if (warn && warned)
1367 		    warn_types_mismatch (TREE_VALUE (parms1),
1368 					 TREE_VALUE (parms2));
1369 		  return false;
1370 		}
1371 	    }
1372 
1373 	  if (parms1 || parms2)
1374 	    {
1375 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1376 			G_("has different parameters "
1377 			   "in another translation unit"));
1378 	      return false;
1379 	    }
1380 
1381 	  return true;
1382 	}
1383 
1384     case RECORD_TYPE:
1385     case UNION_TYPE:
1386     case QUAL_UNION_TYPE:
1387       {
1388 	tree f1, f2;
1389 
1390 	/* For aggregate types, all the fields must be the same.  */
1391 	if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1392 	  {
1393 	    if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1394 	        && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1395 		   != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1396 	      {
1397 		if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1398 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1399 			    G_("a type defined in another translation unit "
1400 			       "is not polymorphic"));
1401 		else
1402 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1403 			    G_("a type defined in another translation unit "
1404 			       "is polymorphic"));
1405 		return false;
1406 	      }
1407 	    for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1408 		 f1 || f2;
1409 		 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1410 	      {
1411 		/* Skip non-fields.  */
1412 		while (f1 && TREE_CODE (f1) != FIELD_DECL)
1413 		  f1 = TREE_CHAIN (f1);
1414 		while (f2 && TREE_CODE (f2) != FIELD_DECL)
1415 		  f2 = TREE_CHAIN (f2);
1416 		if (!f1 || !f2)
1417 		  break;
1418 		if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1419 		  {
1420 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1421 			      G_("a type with different virtual table pointers"
1422 			         " is defined in another translation unit"));
1423 		    return false;
1424 		  }
1425 		if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1426 		  {
1427 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1428 			      G_("a type with different bases is defined "
1429 				 "in another translation unit"));
1430 		    return false;
1431 		  }
1432 		if (DECL_NAME (f1) != DECL_NAME (f2)
1433 		    && !DECL_ARTIFICIAL (f1))
1434 		  {
1435 		    warn_odr (t1, t2, f1, f2, warn, warned,
1436 			      G_("a field with different name is defined "
1437 				 "in another translation unit"));
1438 		    return false;
1439 		  }
1440 		if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1441 						TREE_TYPE (f2), visited))
1442 		  {
1443 		    /* Do not warn about artificial fields and just go into
1444  		       generic field mismatch warning.  */
1445 		    if (DECL_ARTIFICIAL (f1))
1446 		      break;
1447 
1448 		    warn_odr (t1, t2, f1, f2, warn, warned,
1449 			      G_("a field of same name but different type "
1450 				 "is defined in another translation unit"));
1451 		    if (warn && warned)
1452 		      warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1453 		    return false;
1454 		  }
1455 		if (!gimple_compare_field_offset (f1, f2))
1456 		  {
1457 		    /* Do not warn about artificial fields and just go into
1458 		       generic field mismatch warning.  */
1459 		    if (DECL_ARTIFICIAL (f1))
1460 		      break;
1461 		    warn_odr (t1, t2, f1, f2, warn, warned,
1462 			      G_("fields have different layout "
1463 				 "in another translation unit"));
1464 		    return false;
1465 		  }
1466 		gcc_assert (DECL_NONADDRESSABLE_P (f1)
1467 			    == DECL_NONADDRESSABLE_P (f2));
1468 	      }
1469 
1470 	    /* If one aggregate has more fields than the other, they
1471 	       are not the same.  */
1472 	    if (f1 || f2)
1473 	      {
1474 		if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1475 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1476 			    G_("a type with different virtual table pointers"
1477 			       " is defined in another translation unit"));
1478 		else if ((f1 && DECL_ARTIFICIAL (f1))
1479 		         || (f2 && DECL_ARTIFICIAL (f2)))
1480 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1481 			    G_("a type with different bases is defined "
1482 			       "in another translation unit"));
1483 		else
1484 		  warn_odr (t1, t2, f1, f2, warn, warned,
1485 			    G_("a type with different number of fields "
1486 			       "is defined in another translation unit"));
1487 
1488 		return false;
1489 	      }
1490 	    if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1491 		&& (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1492 		    != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1493 	      {
1494 		for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1495 		     f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1496 		     f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1497 		  {
1498 		    if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1499 		      {
1500 			warn_odr (t1, t2, f1, f2, warn, warned,
1501 				  G_("a different method of same type "
1502 				     "is defined in another translation unit"));
1503 			return false;
1504 		      }
1505 		    if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1506 		      {
1507 			warn_odr (t1, t2, f1, f2, warn, warned,
1508 				  G_("s definition that differs by virtual "
1509 				     "keyword in another translation unit"));
1510 			return false;
1511 		      }
1512 		    if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1513 		      {
1514 			warn_odr (t1, t2, f1, f2, warn, warned,
1515 				  G_("virtual table layout differs in another "
1516 				     "translation unit"));
1517 			return false;
1518 		      }
1519 		    if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1520 		      {
1521 			warn_odr (t1, t2, f1, f2, warn, warned,
1522 				  G_("method with incompatible type is defined "
1523 				     "in another translation unit"));
1524 			return false;
1525 		      }
1526 		  }
1527 		if (f1 || f2)
1528 		  {
1529 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1530 			      G_("a type with different number of methods "
1531 				 "is defined in another translation unit"));
1532 		    return false;
1533 		  }
1534 	      }
1535 	  }
1536 	break;
1537       }
1538     case VOID_TYPE:
1539     case NULLPTR_TYPE:
1540       break;
1541 
1542     default:
1543       debug_tree (t1);
1544       gcc_unreachable ();
1545     }
1546 
1547   /* Those are better to come last as they are utterly uninformative.  */
1548   if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1549       && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1550     {
1551       warn_odr (t1, t2, NULL, NULL, warn, warned,
1552 		G_("a type with different size "
1553 		   "is defined in another translation unit"));
1554       return false;
1555     }
1556   if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1557       && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1558     {
1559       warn_odr (t1, t2, NULL, NULL, warn, warned,
1560 		G_("a type with different alignment "
1561 		   "is defined in another translation unit"));
1562       return false;
1563     }
1564   gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1565 	      || operand_equal_p (TYPE_SIZE_UNIT (t1),
1566 				  TYPE_SIZE_UNIT (t2), 0));
1567   return true;
1568 }
1569 
1570 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1571    from VAL->type.  This may happen in LTO where tree merging did not merge
1572    all variants of the same type or due to ODR violation.
1573 
1574    Analyze and report ODR violations and add type to duplicate list.
1575    If TYPE is more specified than VAL->type, prevail VAL->type.  Also if
1576    this is first time we see definition of a class return true so the
1577    base types are analyzed.  */
1578 
1579 static bool
1580 add_type_duplicate (odr_type val, tree type)
1581 {
1582   bool build_bases = false;
1583   bool prevail = false;
1584   bool odr_must_violate = false;
1585 
1586   if (!val->types_set)
1587     val->types_set = new hash_set<tree>;
1588 
1589   /* Chose polymorphic type as leader (this happens only in case of ODR
1590      violations.  */
1591   if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1592        && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1593       && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1594           || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1595     {
1596       prevail = true;
1597       build_bases = true;
1598     }
1599   /* Always prefer complete type to be the leader.  */
1600   else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1601     {
1602       prevail = true;
1603       build_bases = TYPE_BINFO (type);
1604     }
1605   else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1606     ;
1607   else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1608 	   && TREE_CODE (type) == ENUMERAL_TYPE
1609 	   && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1610     prevail = true;
1611   else if (TREE_CODE (val->type) == RECORD_TYPE
1612 	   && TREE_CODE (type) == RECORD_TYPE
1613 	   && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1614     {
1615       gcc_assert (!val->bases.length ());
1616       build_bases = true;
1617       prevail = true;
1618     }
1619 
1620   if (prevail)
1621     {
1622       tree tmp = type;
1623 
1624       type = val->type;
1625       val->type = tmp;
1626     }
1627 
1628   val->types_set->add (type);
1629 
1630   /* If we now have a mangled name, be sure to record it to val->type
1631      so ODR hash can work.  */
1632 
1633   if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1634     SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1635 			     DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1636 
1637   bool merge = true;
1638   bool base_mismatch = false;
1639   unsigned int i;
1640   bool warned = false;
1641   hash_set<type_pair,pair_traits> visited;
1642 
1643   gcc_assert (in_lto_p);
1644   vec_safe_push (val->types, type);
1645 
1646   /* If both are class types, compare the bases.  */
1647   if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1648       && TREE_CODE (val->type) == RECORD_TYPE
1649       && TREE_CODE (type) == RECORD_TYPE
1650       && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1651     {
1652       if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1653 	  != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1654 	{
1655 	  if (!flag_ltrans && !warned && !val->odr_violated)
1656 	    {
1657 	      tree extra_base;
1658 	      warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1659 			"a type with the same name but different "
1660 			"number of polymorphic bases is "
1661 			"defined in another translation unit");
1662 	      if (warned)
1663 		{
1664 		  if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1665 		      > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1666 		    extra_base = BINFO_BASE_BINFO
1667 				 (TYPE_BINFO (type),
1668 				  BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1669 		  else
1670 		    extra_base = BINFO_BASE_BINFO
1671 				 (TYPE_BINFO (val->type),
1672 				  BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1673 		  tree extra_base_type = BINFO_TYPE (extra_base);
1674 		  inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1675 			  "the extra base is defined here");
1676 		}
1677 	    }
1678 	  base_mismatch = true;
1679 	}
1680       else
1681 	for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1682 	  {
1683 	    tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1684 	    tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1685 	    tree type1 = BINFO_TYPE (base1);
1686 	    tree type2 = BINFO_TYPE (base2);
1687 
1688 	    if (types_odr_comparable (type1, type2))
1689 	      {
1690 		if (!types_same_for_odr (type1, type2))
1691 		  base_mismatch = true;
1692 	      }
1693 	    else
1694 	      {
1695 		hash_set<type_pair,pair_traits> visited;
1696 		if (!odr_types_equivalent_p (type1, type2, false, NULL,
1697 					     &visited))
1698 		  base_mismatch = true;
1699 	      }
1700 	    if (base_mismatch)
1701 	      {
1702 		if (!warned && !val->odr_violated)
1703 		  {
1704 		    warn_odr (type, val->type, NULL, NULL,
1705 			      !warned, &warned,
1706 			      "a type with the same name but different base "
1707 			      "type is defined in another translation unit");
1708 		    if (warned)
1709 		      warn_types_mismatch (type1, type2);
1710 		  }
1711 		break;
1712 	      }
1713 	    if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1714 	      {
1715 		base_mismatch = true;
1716 		if (!warned && !val->odr_violated)
1717 		  warn_odr (type, val->type, NULL, NULL,
1718 			    !warned, &warned,
1719 			    "a type with the same name but different base "
1720 			    "layout is defined in another translation unit");
1721 		break;
1722 	      }
1723 	    /* One of bases is not of complete type.  */
1724 	    if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1725 	      {
1726 		/* If we have a polymorphic type info specified for TYPE1
1727 		   but not for TYPE2 we possibly missed a base when recording
1728 		   VAL->type earlier.
1729 		   Be sure this does not happen.  */
1730 		if (TYPE_BINFO (type1)
1731 		    && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1732 		    && !build_bases)
1733 		  odr_must_violate = true;
1734 	        break;
1735 	      }
1736 	    /* One base is polymorphic and the other not.
1737 	       This ought to be diagnosed earlier, but do not ICE in the
1738 	       checking bellow.  */
1739 	    else if (TYPE_BINFO (type1)
1740 		     && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1741 		        != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1742 	      {
1743 		if (!warned && !val->odr_violated)
1744 		  warn_odr (type, val->type, NULL, NULL,
1745 			    !warned, &warned,
1746 			    "a base of the type is polymorphic only in one "
1747 			    "translation unit");
1748 		base_mismatch = true;
1749 		break;
1750 	      }
1751 	  }
1752       if (base_mismatch)
1753 	{
1754 	  merge = false;
1755 	  odr_violation_reported = true;
1756 	  val->odr_violated = true;
1757 
1758 	  if (symtab->dump_file)
1759 	    {
1760 	      fprintf (symtab->dump_file, "ODR base violation\n");
1761 
1762 	      print_node (symtab->dump_file, "", val->type, 0);
1763 	      putc ('\n',symtab->dump_file);
1764 	      print_node (symtab->dump_file, "", type, 0);
1765 	      putc ('\n',symtab->dump_file);
1766 	    }
1767 	}
1768     }
1769 
1770   /* Next compare memory layout.  */
1771   if (!odr_types_equivalent_p (val->type, type,
1772 			       !flag_ltrans && !val->odr_violated && !warned,
1773 			       &warned, &visited))
1774     {
1775       merge = false;
1776       odr_violation_reported = true;
1777       val->odr_violated = true;
1778       if (symtab->dump_file)
1779 	{
1780 	  fprintf (symtab->dump_file, "ODR violation\n");
1781 
1782 	  print_node (symtab->dump_file, "", val->type, 0);
1783 	  putc ('\n',symtab->dump_file);
1784 	  print_node (symtab->dump_file, "", type, 0);
1785 	  putc ('\n',symtab->dump_file);
1786 	}
1787     }
1788   gcc_assert (val->odr_violated || !odr_must_violate);
1789   /* Sanity check that all bases will be build same way again.  */
1790 #ifdef ENABLE_CHECKING
1791   if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1792       && TREE_CODE (val->type) == RECORD_TYPE
1793       && TREE_CODE (type) == RECORD_TYPE
1794       && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1795       && !val->odr_violated
1796       && !base_mismatch && val->bases.length ())
1797     {
1798       unsigned int num_poly_bases = 0;
1799       unsigned int j;
1800 
1801       for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1802 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1803 					 (TYPE_BINFO (type), i)))
1804 	  num_poly_bases++;
1805       gcc_assert (num_poly_bases == val->bases.length ());
1806       for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1807 	   i++)
1808 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1809 				       (TYPE_BINFO (type), i)))
1810 	  {
1811 	    odr_type base = get_odr_type
1812 			       (BINFO_TYPE
1813 				  (BINFO_BASE_BINFO (TYPE_BINFO (type),
1814 						     i)),
1815 				true);
1816 	    gcc_assert (val->bases[j] == base);
1817 	    j++;
1818 	  }
1819     }
1820 #endif
1821 
1822 
1823   /* Regularize things a little.  During LTO same types may come with
1824      different BINFOs.  Either because their virtual table was
1825      not merged by tree merging and only later at decl merging or
1826      because one type comes with external vtable, while other
1827      with internal.  We want to merge equivalent binfos to conserve
1828      memory and streaming overhead.
1829 
1830      The external vtables are more harmful: they contain references
1831      to external declarations of methods that may be defined in the
1832      merged LTO unit.  For this reason we absolutely need to remove
1833      them and replace by internal variants. Not doing so will lead
1834      to incomplete answers from possible_polymorphic_call_targets.
1835 
1836      FIXME: disable for now; because ODR types are now build during
1837      streaming in, the variants do not need to be linked to the type,
1838      yet.  We need to do the merging in cleanup pass to be implemented
1839      soon.  */
1840   if (!flag_ltrans && merge
1841       && 0
1842       && TREE_CODE (val->type) == RECORD_TYPE
1843       && TREE_CODE (type) == RECORD_TYPE
1844       && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1845       && TYPE_MAIN_VARIANT (type) == type
1846       && TYPE_MAIN_VARIANT (val->type) == val->type
1847       && BINFO_VTABLE (TYPE_BINFO (val->type))
1848       && BINFO_VTABLE (TYPE_BINFO (type)))
1849     {
1850       tree master_binfo = TYPE_BINFO (val->type);
1851       tree v1 = BINFO_VTABLE (master_binfo);
1852       tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1853 
1854       if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1855 	{
1856 	  gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1857 		      && operand_equal_p (TREE_OPERAND (v1, 1),
1858 					  TREE_OPERAND (v2, 1), 0));
1859 	  v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1860 	  v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1861 	}
1862       gcc_assert (DECL_ASSEMBLER_NAME (v1)
1863 		  == DECL_ASSEMBLER_NAME (v2));
1864 
1865       if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1866 	{
1867 	  unsigned int i;
1868 
1869 	  set_type_binfo (val->type, TYPE_BINFO (type));
1870 	  for (i = 0; i < val->types->length (); i++)
1871 	    {
1872 	      if (TYPE_BINFO ((*val->types)[i])
1873 		  == master_binfo)
1874 		set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1875 	    }
1876 	  BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1877 	}
1878       else
1879 	set_type_binfo (type, master_binfo);
1880     }
1881   return build_bases;
1882 }
1883 
1884 /* Get ODR type hash entry for TYPE.  If INSERT is true, create
1885    possibly new entry.  */
1886 
1887 odr_type
1888 get_odr_type (tree type, bool insert)
1889 {
1890   odr_type_d **slot = NULL;
1891   odr_type_d **vtable_slot = NULL;
1892   odr_type val = NULL;
1893   hashval_t hash;
1894   bool build_bases = false;
1895   bool insert_to_odr_array = false;
1896   int base_id = -1;
1897 
1898   type = main_odr_variant (type);
1899 
1900   gcc_checking_assert (can_be_name_hashed_p (type)
1901 		       || can_be_vtable_hashed_p (type));
1902 
1903   /* Lookup entry, first try name hash, fallback to vtable hash.  */
1904   if (can_be_name_hashed_p (type))
1905     {
1906       hash = hash_odr_name (type);
1907       slot = odr_hash->find_slot_with_hash (type, hash,
1908 					    insert ? INSERT : NO_INSERT);
1909     }
1910   if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
1911     {
1912       hash = hash_odr_vtable (type);
1913       vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
1914 					           insert ? INSERT : NO_INSERT);
1915     }
1916 
1917   if (!slot && !vtable_slot)
1918     return NULL;
1919 
1920   /* See if we already have entry for type.  */
1921   if ((slot && *slot) || (vtable_slot && *vtable_slot))
1922     {
1923       if (slot && *slot)
1924 	{
1925 	  val = *slot;
1926 #ifdef ENABLE_CHECKING
1927 	  if (in_lto_p && can_be_vtable_hashed_p (type))
1928 	    {
1929 	      hash = hash_odr_vtable (type);
1930 	      vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
1931 						                  NO_INSERT);
1932 	      gcc_assert (!vtable_slot || *vtable_slot == *slot);
1933 	      vtable_slot = NULL;
1934 	    }
1935 #endif
1936 	}
1937       else if (*vtable_slot)
1938 	val = *vtable_slot;
1939 
1940       if (val->type != type
1941 	  && (!val->types_set || !val->types_set->add (type)))
1942 	{
1943 	  gcc_assert (insert);
1944 	  /* We have type duplicate, but it may introduce vtable name or
1945  	     mangled name; be sure to keep hashes in sync.  */
1946 	  if (in_lto_p && can_be_vtable_hashed_p (type)
1947 	      && (!vtable_slot || !*vtable_slot))
1948 	    {
1949 	      if (!vtable_slot)
1950 		{
1951 		  hash = hash_odr_vtable (type);
1952 		  vtable_slot = odr_vtable_hash->find_slot_with_hash
1953 			     (type, hash, INSERT);
1954 		  gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
1955 		}
1956 	      *vtable_slot = val;
1957 	    }
1958 	  if (slot && !*slot)
1959 	    *slot = val;
1960 	  build_bases = add_type_duplicate (val, type);
1961 	}
1962     }
1963   else
1964     {
1965       val = ggc_cleared_alloc<odr_type_d> ();
1966       val->type = type;
1967       val->bases = vNULL;
1968       val->derived_types = vNULL;
1969       val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1970       build_bases = COMPLETE_TYPE_P (val->type);
1971       insert_to_odr_array = true;
1972       if (slot)
1973         *slot = val;
1974       if (vtable_slot)
1975 	*vtable_slot = val;
1976     }
1977 
1978   if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1979       && type == TYPE_MAIN_VARIANT (type))
1980     {
1981       tree binfo = TYPE_BINFO (type);
1982       unsigned int i;
1983 
1984       gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
1985 
1986       val->all_derivations_known = type_all_derivations_known_p (type);
1987       for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1988 	/* For now record only polymorphic types. other are
1989 	   pointless for devirtualization and we can not precisely
1990 	   determine ODR equivalency of these during LTO.  */
1991 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1992 	  {
1993 	    tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
1994 	    odr_type base = get_odr_type (base_type, true);
1995 	    gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
1996 	    base->derived_types.safe_push (val);
1997 	    val->bases.safe_push (base);
1998 	    if (base->id > base_id)
1999 	      base_id = base->id;
2000 	  }
2001       }
2002   /* Ensure that type always appears after bases.  */
2003   if (insert_to_odr_array)
2004     {
2005       if (odr_types_ptr)
2006         val->id = odr_types.length ();
2007       vec_safe_push (odr_types_ptr, val);
2008     }
2009   else if (base_id > val->id)
2010     {
2011       odr_types[val->id] = 0;
2012       /* Be sure we did not recorded any derived types; these may need
2013 	 renumbering too.  */
2014       gcc_assert (val->derived_types.length() == 0);
2015       if (odr_types_ptr)
2016 	val->id = odr_types.length ();
2017       vec_safe_push (odr_types_ptr, val);
2018     }
2019   return val;
2020 }
2021 
2022 /* Add TYPE od ODR type hash.  */
2023 
2024 void
2025 register_odr_type (tree type)
2026 {
2027   if (!odr_hash)
2028     {
2029       odr_hash = new odr_hash_type (23);
2030       if (in_lto_p)
2031         odr_vtable_hash = new odr_vtable_hash_type (23);
2032     }
2033   /* Arrange things to be nicer and insert main variants first.  */
2034   if (odr_type_p (TYPE_MAIN_VARIANT (type)))
2035     get_odr_type (TYPE_MAIN_VARIANT (type), true);
2036   if (TYPE_MAIN_VARIANT (type) != type)
2037     get_odr_type (type, true);
2038 }
2039 
2040 /* Return true if type is known to have no derivations.  */
2041 
2042 bool
2043 type_known_to_have_no_deriavations_p (tree t)
2044 {
2045   return (type_all_derivations_known_p (t)
2046 	  && (TYPE_FINAL_P (t)
2047 	      || (odr_hash
2048 		  && !get_odr_type (t, true)->derived_types.length())));
2049 }
2050 
2051 /* Dump ODR type T and all its derived types.  INDENT specifies indentation for
2052    recursive printing.  */
2053 
2054 static void
2055 dump_odr_type (FILE *f, odr_type t, int indent=0)
2056 {
2057   unsigned int i;
2058   fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2059   print_generic_expr (f, t->type, TDF_SLIM);
2060   fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2061   fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2062   if (TYPE_NAME (t->type))
2063     {
2064       /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2065 	       DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2066 	       DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2067       if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2068         fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2069 		 IDENTIFIER_POINTER
2070 		   (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2071     }
2072   if (t->bases.length ())
2073     {
2074       fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2075       for (i = 0; i < t->bases.length (); i++)
2076 	fprintf (f, " %i", t->bases[i]->id);
2077       fprintf (f, "\n");
2078     }
2079   if (t->derived_types.length ())
2080     {
2081       fprintf (f, "%*s derived types:\n", indent * 2, "");
2082       for (i = 0; i < t->derived_types.length (); i++)
2083         dump_odr_type (f, t->derived_types[i], indent + 1);
2084     }
2085   fprintf (f, "\n");
2086 }
2087 
2088 /* Dump the type inheritance graph.  */
2089 
2090 static void
2091 dump_type_inheritance_graph (FILE *f)
2092 {
2093   unsigned int i;
2094   if (!odr_types_ptr)
2095     return;
2096   fprintf (f, "\n\nType inheritance graph:\n");
2097   for (i = 0; i < odr_types.length (); i++)
2098     {
2099       if (odr_types[i] && odr_types[i]->bases.length () == 0)
2100 	dump_odr_type (f, odr_types[i]);
2101     }
2102   for (i = 0; i < odr_types.length (); i++)
2103     {
2104       if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2105 	{
2106 	  unsigned int j;
2107 	  fprintf (f, "Duplicate tree types for odr type %i\n", i);
2108 	  print_node (f, "", odr_types[i]->type, 0);
2109 	  for (j = 0; j < odr_types[i]->types->length (); j++)
2110 	    {
2111 	      tree t;
2112 	      fprintf (f, "duplicate #%i\n", j);
2113 	      print_node (f, "", (*odr_types[i]->types)[j], 0);
2114 	      t = (*odr_types[i]->types)[j];
2115 	      while (TYPE_P (t) && TYPE_CONTEXT (t))
2116 		{
2117 		  t = TYPE_CONTEXT (t);
2118 	          print_node (f, "", t, 0);
2119 		}
2120 	      putc ('\n',f);
2121 	    }
2122 	}
2123     }
2124 }
2125 
2126 /* Given method type T, return type of class it belongs to.
2127    Look up this pointer and get its type.    */
2128 
2129 tree
2130 method_class_type (const_tree t)
2131 {
2132   tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
2133   gcc_assert (TREE_CODE (t) == METHOD_TYPE);
2134 
2135   return TREE_TYPE (first_parm_type);
2136 }
2137 
2138 /* Initialize IPA devirt and build inheritance tree graph.  */
2139 
2140 void
2141 build_type_inheritance_graph (void)
2142 {
2143   struct symtab_node *n;
2144   FILE *inheritance_dump_file;
2145   int flags;
2146 
2147   if (odr_hash)
2148     return;
2149   timevar_push (TV_IPA_INHERITANCE);
2150   inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2151   odr_hash = new odr_hash_type (23);
2152   if (in_lto_p)
2153     odr_vtable_hash = new odr_vtable_hash_type (23);
2154 
2155   /* We reconstruct the graph starting of types of all methods seen in the
2156      the unit.  */
2157   FOR_EACH_SYMBOL (n)
2158     if (is_a <cgraph_node *> (n)
2159 	&& DECL_VIRTUAL_P (n->decl)
2160 	&& n->real_symbol_p ())
2161       get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
2162 		    true);
2163 
2164     /* Look also for virtual tables of types that do not define any methods.
2165 
2166        We need it in a case where class B has virtual base of class A
2167        re-defining its virtual method and there is class C with no virtual
2168        methods with B as virtual base.
2169 
2170        Here we output B's virtual method in two variant - for non-virtual
2171        and virtual inheritance.  B's virtual table has non-virtual version,
2172        while C's has virtual.
2173 
2174        For this reason we need to know about C in order to include both
2175        variants of B.  More correctly, record_target_from_binfo should
2176        add both variants of the method when walking B, but we have no
2177        link in between them.
2178 
2179        We rely on fact that either the method is exported and thus we
2180        assume it is called externally or C is in anonymous namespace and
2181        thus we will see the vtable.  */
2182 
2183     else if (is_a <varpool_node *> (n)
2184 	     && DECL_VIRTUAL_P (n->decl)
2185 	     && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2186 	     && TYPE_BINFO (DECL_CONTEXT (n->decl))
2187 	     && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2188       get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2189   if (inheritance_dump_file)
2190     {
2191       dump_type_inheritance_graph (inheritance_dump_file);
2192       dump_end (TDI_inheritance, inheritance_dump_file);
2193     }
2194   timevar_pop (TV_IPA_INHERITANCE);
2195 }
2196 
2197 /* Return true if N has reference from live virtual table
2198    (and thus can be a destination of polymorphic call).
2199    Be conservatively correct when callgraph is not built or
2200    if the method may be referred externally.  */
2201 
2202 static bool
2203 referenced_from_vtable_p (struct cgraph_node *node)
2204 {
2205   int i;
2206   struct ipa_ref *ref;
2207   bool found = false;
2208 
2209   if (node->externally_visible
2210       || DECL_EXTERNAL (node->decl)
2211       || node->used_from_other_partition)
2212     return true;
2213 
2214   /* Keep this test constant time.
2215      It is unlikely this can happen except for the case where speculative
2216      devirtualization introduced many speculative edges to this node.
2217      In this case the target is very likely alive anyway.  */
2218   if (node->ref_list.referring.length () > 100)
2219     return true;
2220 
2221   /* We need references built.  */
2222   if (symtab->state <= CONSTRUCTION)
2223     return true;
2224 
2225   for (i = 0; node->iterate_referring (i, ref); i++)
2226     if ((ref->use == IPA_REF_ALIAS
2227 	 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2228 	|| (ref->use == IPA_REF_ADDR
2229 	    && TREE_CODE (ref->referring->decl) == VAR_DECL
2230 	    && DECL_VIRTUAL_P (ref->referring->decl)))
2231       {
2232 	found = true;
2233 	break;
2234       }
2235   return found;
2236 }
2237 
2238 /* If TARGET has associated node, record it in the NODES array.
2239    CAN_REFER specify if program can refer to the target directly.
2240    if TARGET is unknown (NULL) or it can not be inserted (for example because
2241    its body was already removed and there is no way to refer to it), clear
2242    COMPLETEP.  */
2243 
2244 static void
2245 maybe_record_node (vec <cgraph_node *> &nodes,
2246 		   tree target, hash_set<tree> *inserted,
2247 		   bool can_refer,
2248 		   bool *completep)
2249 {
2250   struct cgraph_node *target_node, *alias_target;
2251   enum availability avail;
2252 
2253   /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
2254      list of targets; the runtime effect of calling them is undefined.
2255      Only "real" virtual methods should be accounted.  */
2256   if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
2257     return;
2258 
2259   if (!can_refer)
2260     {
2261       /* The only case when method of anonymous namespace becomes unreferable
2262 	 is when we completely optimized it out.  */
2263       if (flag_ltrans
2264 	  || !target
2265 	  || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2266 	*completep = false;
2267       return;
2268     }
2269 
2270   if (!target)
2271     return;
2272 
2273   target_node = cgraph_node::get (target);
2274 
2275   /* Prefer alias target over aliases, so we do not get confused by
2276      fake duplicates.  */
2277   if (target_node)
2278     {
2279       alias_target = target_node->ultimate_alias_target (&avail);
2280       if (target_node != alias_target
2281 	  && avail >= AVAIL_AVAILABLE
2282 	  && target_node->get_availability ())
2283 	target_node = alias_target;
2284     }
2285 
2286   /* Method can only be called by polymorphic call if any
2287      of vtables referring to it are alive.
2288 
2289      While this holds for non-anonymous functions, too, there are
2290      cases where we want to keep them in the list; for example
2291      inline functions with -fno-weak are static, but we still
2292      may devirtualize them when instance comes from other unit.
2293      The same holds for LTO.
2294 
2295      Currently we ignore these functions in speculative devirtualization.
2296      ??? Maybe it would make sense to be more aggressive for LTO even
2297      elsewhere.  */
2298   if (!flag_ltrans
2299       && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2300       && (!target_node
2301           || !referenced_from_vtable_p (target_node)))
2302     ;
2303   /* See if TARGET is useful function we can deal with.  */
2304   else if (target_node != NULL
2305 	   && (TREE_PUBLIC (target)
2306 	       || DECL_EXTERNAL (target)
2307 	       || target_node->definition)
2308 	   && target_node->real_symbol_p ())
2309     {
2310       gcc_assert (!target_node->global.inlined_to);
2311       gcc_assert (target_node->real_symbol_p ());
2312       if (!inserted->add (target))
2313 	{
2314 	  cached_polymorphic_call_targets->add (target_node);
2315 	  nodes.safe_push (target_node);
2316 	}
2317     }
2318   else if (completep
2319 	   && (!type_in_anonymous_namespace_p
2320 		 (DECL_CONTEXT (target))
2321 	       || flag_ltrans))
2322     *completep = false;
2323 }
2324 
2325 /* See if BINFO's type matches OUTER_TYPE.  If so, look up
2326    BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2327    method in vtable and insert method to NODES array
2328    or BASES_TO_CONSIDER if this array is non-NULL.
2329    Otherwise recurse to base BINFOs.
2330    This matches what get_binfo_at_offset does, but with offset
2331    being unknown.
2332 
2333    TYPE_BINFOS is a stack of BINFOS of types with defined
2334    virtual table seen on way from class type to BINFO.
2335 
2336    MATCHED_VTABLES tracks virtual tables we already did lookup
2337    for virtual function in. INSERTED tracks nodes we already
2338    inserted.
2339 
2340    ANONYMOUS is true if BINFO is part of anonymous namespace.
2341 
2342    Clear COMPLETEP when we hit unreferable target.
2343   */
2344 
2345 static void
2346 record_target_from_binfo (vec <cgraph_node *> &nodes,
2347 			  vec <tree> *bases_to_consider,
2348 			  tree binfo,
2349 			  tree otr_type,
2350 			  vec <tree> &type_binfos,
2351 			  HOST_WIDE_INT otr_token,
2352 			  tree outer_type,
2353 			  HOST_WIDE_INT offset,
2354 			  hash_set<tree> *inserted,
2355 			  hash_set<tree> *matched_vtables,
2356 			  bool anonymous,
2357 			  bool *completep)
2358 {
2359   tree type = BINFO_TYPE (binfo);
2360   int i;
2361   tree base_binfo;
2362 
2363 
2364   if (BINFO_VTABLE (binfo))
2365     type_binfos.safe_push (binfo);
2366   if (types_same_for_odr (type, outer_type))
2367     {
2368       int i;
2369       tree type_binfo = NULL;
2370 
2371       /* Look up BINFO with virtual table.  For normal types it is always last
2372 	 binfo on stack.  */
2373       for (i = type_binfos.length () - 1; i >= 0; i--)
2374 	if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2375 	  {
2376 	    type_binfo = type_binfos[i];
2377 	    break;
2378 	  }
2379       if (BINFO_VTABLE (binfo))
2380 	type_binfos.pop ();
2381       /* If this is duplicated BINFO for base shared by virtual inheritance,
2382 	 we may not have its associated vtable.  This is not a problem, since
2383 	 we will walk it on the other path.  */
2384       if (!type_binfo)
2385 	return;
2386       tree inner_binfo = get_binfo_at_offset (type_binfo,
2387 					      offset, otr_type);
2388       if (!inner_binfo)
2389 	{
2390 	  gcc_assert (odr_violation_reported);
2391 	  return;
2392 	}
2393       /* For types in anonymous namespace first check if the respective vtable
2394 	 is alive. If not, we know the type can't be called.  */
2395       if (!flag_ltrans && anonymous)
2396 	{
2397 	  tree vtable = BINFO_VTABLE (inner_binfo);
2398 	  varpool_node *vnode;
2399 
2400 	  if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2401 	    vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2402 	  vnode = varpool_node::get (vtable);
2403 	  if (!vnode || !vnode->definition)
2404 	    return;
2405 	}
2406       gcc_assert (inner_binfo);
2407       if (bases_to_consider
2408 	  ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2409 	  : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2410 	{
2411 	  bool can_refer;
2412 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
2413 							  inner_binfo,
2414 							  &can_refer);
2415 	  if (!bases_to_consider)
2416 	    maybe_record_node (nodes, target, inserted, can_refer, completep);
2417 	  /* Destructors are never called via construction vtables.  */
2418 	  else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2419 	    bases_to_consider->safe_push (target);
2420 	}
2421       return;
2422     }
2423 
2424   /* Walk bases.  */
2425   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2426     /* Walking bases that have no virtual method is pointless exercise.  */
2427     if (polymorphic_type_binfo_p (base_binfo))
2428       record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2429 				type_binfos,
2430 				otr_token, outer_type, offset, inserted,
2431 				matched_vtables, anonymous, completep);
2432   if (BINFO_VTABLE (binfo))
2433     type_binfos.pop ();
2434 }
2435 
2436 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2437    of TYPE, insert them to NODES, recurse into derived nodes.
2438    INSERTED is used to avoid duplicate insertions of methods into NODES.
2439    MATCHED_VTABLES are used to avoid duplicate walking vtables.
2440    Clear COMPLETEP if unreferable target is found.
2441 
2442    If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2443    all cases where BASE_SKIPPED is true (because the base is abstract
2444    class).  */
2445 
2446 static void
2447 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2448 				     hash_set<tree> *inserted,
2449 				     hash_set<tree> *matched_vtables,
2450 				     tree otr_type,
2451 				     odr_type type,
2452 				     HOST_WIDE_INT otr_token,
2453 				     tree outer_type,
2454 				     HOST_WIDE_INT offset,
2455 				     bool *completep,
2456 				     vec <tree> &bases_to_consider,
2457 				     bool consider_construction)
2458 {
2459   tree binfo = TYPE_BINFO (type->type);
2460   unsigned int i;
2461   auto_vec <tree, 8> type_binfos;
2462   bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2463 
2464   /* We may need to consider types w/o instances because of possible derived
2465      types using their methods either directly or via construction vtables.
2466      We are safe to skip them when all derivations are known, since we will
2467      handle them later.
2468      This is done by recording them to BASES_TO_CONSIDER array.  */
2469   if (possibly_instantiated || consider_construction)
2470     {
2471       record_target_from_binfo (nodes,
2472 				(!possibly_instantiated
2473 				 && type_all_derivations_known_p (type->type))
2474 				? &bases_to_consider : NULL,
2475 				binfo, otr_type, type_binfos, otr_token,
2476 				outer_type, offset,
2477 				inserted, matched_vtables,
2478 				type->anonymous_namespace, completep);
2479     }
2480   for (i = 0; i < type->derived_types.length (); i++)
2481     possible_polymorphic_call_targets_1 (nodes, inserted,
2482 					 matched_vtables,
2483 					 otr_type,
2484 					 type->derived_types[i],
2485 					 otr_token, outer_type, offset, completep,
2486 					 bases_to_consider, consider_construction);
2487 }
2488 
2489 /* Cache of queries for polymorphic call targets.
2490 
2491    Enumerating all call targets may get expensive when there are many
2492    polymorphic calls in the program, so we memoize all the previous
2493    queries and avoid duplicated work.  */
2494 
2495 struct polymorphic_call_target_d
2496 {
2497   HOST_WIDE_INT otr_token;
2498   ipa_polymorphic_call_context context;
2499   odr_type type;
2500   vec <cgraph_node *> targets;
2501   tree decl_warning;
2502   int type_warning;
2503   bool complete;
2504   bool speculative;
2505 };
2506 
2507 /* Polymorphic call target cache helpers.  */
2508 
2509 struct polymorphic_call_target_hasher
2510 {
2511   typedef polymorphic_call_target_d value_type;
2512   typedef polymorphic_call_target_d compare_type;
2513   static inline hashval_t hash (const value_type *);
2514   static inline bool equal (const value_type *, const compare_type *);
2515   static inline void remove (value_type *);
2516 };
2517 
2518 /* Return the computed hashcode for ODR_QUERY.  */
2519 
2520 inline hashval_t
2521 polymorphic_call_target_hasher::hash (const value_type *odr_query)
2522 {
2523   inchash::hash hstate (odr_query->otr_token);
2524 
2525   hstate.add_wide_int (odr_query->type->id);
2526   hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2527   hstate.add_wide_int (odr_query->context.offset);
2528 
2529   if (odr_query->context.speculative_outer_type)
2530     {
2531       hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2532       hstate.add_wide_int (odr_query->context.speculative_offset);
2533     }
2534   hstate.add_flag (odr_query->speculative);
2535   hstate.add_flag (odr_query->context.maybe_in_construction);
2536   hstate.add_flag (odr_query->context.maybe_derived_type);
2537   hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2538   hstate.commit_flag ();
2539   return hstate.end ();
2540 }
2541 
2542 /* Compare cache entries T1 and T2.  */
2543 
2544 inline bool
2545 polymorphic_call_target_hasher::equal (const value_type *t1,
2546 				       const compare_type *t2)
2547 {
2548   return (t1->type == t2->type && t1->otr_token == t2->otr_token
2549 	  && t1->speculative == t2->speculative
2550 	  && t1->context.offset == t2->context.offset
2551 	  && t1->context.speculative_offset == t2->context.speculative_offset
2552 	  && t1->context.outer_type == t2->context.outer_type
2553 	  && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2554 	  && t1->context.maybe_in_construction
2555 	      == t2->context.maybe_in_construction
2556 	  && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2557 	  && (t1->context.speculative_maybe_derived_type
2558 	      == t2->context.speculative_maybe_derived_type));
2559 }
2560 
2561 /* Remove entry in polymorphic call target cache hash.  */
2562 
2563 inline void
2564 polymorphic_call_target_hasher::remove (value_type *v)
2565 {
2566   v->targets.release ();
2567   free (v);
2568 }
2569 
2570 /* Polymorphic call target query cache.  */
2571 
2572 typedef hash_table<polymorphic_call_target_hasher>
2573    polymorphic_call_target_hash_type;
2574 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2575 
2576 /* Destroy polymorphic call target query cache.  */
2577 
2578 static void
2579 free_polymorphic_call_targets_hash ()
2580 {
2581   if (cached_polymorphic_call_targets)
2582     {
2583       delete polymorphic_call_target_hash;
2584       polymorphic_call_target_hash = NULL;
2585       delete cached_polymorphic_call_targets;
2586       cached_polymorphic_call_targets = NULL;
2587     }
2588 }
2589 
2590 /* When virtual function is removed, we may need to flush the cache.  */
2591 
2592 static void
2593 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2594 {
2595   if (cached_polymorphic_call_targets
2596       && cached_polymorphic_call_targets->contains (n))
2597     free_polymorphic_call_targets_hash ();
2598 }
2599 
2600 /* Look up base of BINFO that has virtual table VTABLE with OFFSET.  */
2601 
2602 tree
2603 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2604 				tree vtable)
2605 {
2606   tree v = BINFO_VTABLE (binfo);
2607   int i;
2608   tree base_binfo;
2609   unsigned HOST_WIDE_INT this_offset;
2610 
2611   if (v)
2612     {
2613       if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2614 	gcc_unreachable ();
2615 
2616       if (offset == this_offset
2617 	  && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2618 	return binfo;
2619     }
2620 
2621   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2622     if (polymorphic_type_binfo_p (base_binfo))
2623       {
2624 	base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2625 	if (base_binfo)
2626 	  return base_binfo;
2627       }
2628   return NULL;
2629 }
2630 
2631 /* T is known constant value of virtual table pointer.
2632    Store virtual table to V and its offset to OFFSET.
2633    Return false if T does not look like virtual table reference.  */
2634 
2635 bool
2636 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2637 				unsigned HOST_WIDE_INT *offset)
2638 {
2639   /* We expect &MEM[(void *)&virtual_table + 16B].
2640      We obtain object's BINFO from the context of the virtual table.
2641      This one contains pointer to virtual table represented via
2642      POINTER_PLUS_EXPR.  Verify that this pointer matches what
2643      we propagated through.
2644 
2645      In the case of virtual inheritance, the virtual tables may
2646      be nested, i.e. the offset may be different from 16 and we may
2647      need to dive into the type representation.  */
2648   if (TREE_CODE (t) == ADDR_EXPR
2649       && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2650       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2651       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2652       && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2653 	  == VAR_DECL)
2654       && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2655 					 (TREE_OPERAND (t, 0), 0), 0)))
2656     {
2657       *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2658       *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2659       return true;
2660     }
2661 
2662   /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2663      We need to handle it when T comes from static variable initializer or
2664      BINFO. */
2665   if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2666     {
2667       *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2668       t = TREE_OPERAND (t, 0);
2669     }
2670   else
2671     *offset = 0;
2672 
2673   if (TREE_CODE (t) != ADDR_EXPR)
2674     return false;
2675   *v = TREE_OPERAND (t, 0);
2676   return true;
2677 }
2678 
2679 /* T is known constant value of virtual table pointer.  Return BINFO of the
2680    instance type.  */
2681 
2682 tree
2683 vtable_pointer_value_to_binfo (const_tree t)
2684 {
2685   tree vtable;
2686   unsigned HOST_WIDE_INT offset;
2687 
2688   if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2689     return NULL_TREE;
2690 
2691   /* FIXME: for stores of construction vtables we return NULL,
2692      because we do not have BINFO for those. Eventually we should fix
2693      our representation to allow this case to be handled, too.
2694      In the case we see store of BINFO we however may assume
2695      that standard folding will be able to cope with it.  */
2696   return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2697 					 offset, vtable);
2698 }
2699 
2700 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2701    Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2702    and insert them in NODES.
2703 
2704    MATCHED_VTABLES and INSERTED is used to avoid duplicated work.  */
2705 
2706 static void
2707 record_targets_from_bases (tree otr_type,
2708 			   HOST_WIDE_INT otr_token,
2709 			   tree outer_type,
2710 			   HOST_WIDE_INT offset,
2711 			   vec <cgraph_node *> &nodes,
2712 			   hash_set<tree> *inserted,
2713 			   hash_set<tree> *matched_vtables,
2714 			   bool *completep)
2715 {
2716   while (true)
2717     {
2718       HOST_WIDE_INT pos, size;
2719       tree base_binfo;
2720       tree fld;
2721 
2722       if (types_same_for_odr (outer_type, otr_type))
2723 	return;
2724 
2725       for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2726 	{
2727 	  if (TREE_CODE (fld) != FIELD_DECL)
2728 	    continue;
2729 
2730 	  pos = int_bit_position (fld);
2731 	  size = tree_to_shwi (DECL_SIZE (fld));
2732 	  if (pos <= offset && (pos + size) > offset
2733 	      /* Do not get confused by zero sized bases.  */
2734 	      && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2735 	    break;
2736 	}
2737       /* Within a class type we should always find corresponding fields.  */
2738       gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2739 
2740       /* Nonbase types should have been stripped by outer_class_type.  */
2741       gcc_assert (DECL_ARTIFICIAL (fld));
2742 
2743       outer_type = TREE_TYPE (fld);
2744       offset -= pos;
2745 
2746       base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2747 					offset, otr_type);
2748       if (!base_binfo)
2749 	{
2750 	  gcc_assert (odr_violation_reported);
2751 	  return;
2752 	}
2753       gcc_assert (base_binfo);
2754       if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2755 	{
2756 	  bool can_refer;
2757 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
2758 							  base_binfo,
2759 							  &can_refer);
2760 	  if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2761 	    maybe_record_node (nodes, target, inserted, can_refer, completep);
2762 	  matched_vtables->add (BINFO_VTABLE (base_binfo));
2763 	}
2764     }
2765 }
2766 
2767 /* When virtual table is removed, we may need to flush the cache.  */
2768 
2769 static void
2770 devirt_variable_node_removal_hook (varpool_node *n,
2771 				   void *d ATTRIBUTE_UNUSED)
2772 {
2773   if (cached_polymorphic_call_targets
2774       && DECL_VIRTUAL_P (n->decl)
2775       && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2776     free_polymorphic_call_targets_hash ();
2777 }
2778 
2779 /* Record about how many calls would benefit from given type to be final.  */
2780 
2781 struct odr_type_warn_count
2782 {
2783   tree type;
2784   int count;
2785   gcov_type dyn_count;
2786 };
2787 
2788 /* Record about how many calls would benefit from given method to be final.  */
2789 
2790 struct decl_warn_count
2791 {
2792   tree decl;
2793   int count;
2794   gcov_type dyn_count;
2795 };
2796 
2797 /* Information about type and decl warnings.  */
2798 
2799 struct final_warning_record
2800 {
2801   gcov_type dyn_count;
2802   vec<odr_type_warn_count> type_warnings;
2803   hash_map<tree, decl_warn_count> decl_warnings;
2804 };
2805 struct final_warning_record *final_warning_records;
2806 
2807 /* Return vector containing possible targets of polymorphic call of type
2808    OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2809    If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2810    OTR_TYPE and include their virtual method.  This is useful for types
2811    possibly in construction or destruction where the virtual table may
2812    temporarily change to one of base types.  INCLUDE_DERIVER_TYPES make
2813    us to walk the inheritance graph for all derivations.
2814 
2815    If COMPLETEP is non-NULL, store true if the list is complete.
2816    CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2817    in the target cache.  If user needs to visit every target list
2818    just once, it can memoize them.
2819 
2820    If SPECULATIVE is set, the list will not contain targets that
2821    are not speculatively taken.
2822 
2823    Returned vector is placed into cache.  It is NOT caller's responsibility
2824    to free it.  The vector can be freed on cgraph_remove_node call if
2825    the particular node is a virtual function present in the cache.  */
2826 
2827 vec <cgraph_node *>
2828 possible_polymorphic_call_targets (tree otr_type,
2829 			           HOST_WIDE_INT otr_token,
2830 				   ipa_polymorphic_call_context context,
2831 			           bool *completep,
2832 			           void **cache_token,
2833 				   bool speculative)
2834 {
2835   static struct cgraph_node_hook_list *node_removal_hook_holder;
2836   vec <cgraph_node *> nodes = vNULL;
2837   auto_vec <tree, 8> bases_to_consider;
2838   odr_type type, outer_type;
2839   polymorphic_call_target_d key;
2840   polymorphic_call_target_d **slot;
2841   unsigned int i;
2842   tree binfo, target;
2843   bool complete;
2844   bool can_refer = false;
2845   bool skipped = false;
2846 
2847   otr_type = TYPE_MAIN_VARIANT (otr_type);
2848 
2849   /* If ODR is not initialized or the context is invalid, return empty
2850      incomplete list.  */
2851   if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
2852     {
2853       if (completep)
2854 	*completep = context.invalid;
2855       if (cache_token)
2856 	*cache_token = NULL;
2857       return nodes;
2858     }
2859 
2860   /* Do not bother to compute speculative info when user do not asks for it.  */
2861   if (!speculative || !context.speculative_outer_type)
2862     context.clear_speculation ();
2863 
2864   type = get_odr_type (otr_type, true);
2865 
2866   /* Recording type variants would waste results cache.  */
2867   gcc_assert (!context.outer_type
2868 	      || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2869 
2870   /* Look up the outer class type we want to walk.
2871      If we fail to do so, the context is invalid.  */
2872   if ((context.outer_type || context.speculative_outer_type)
2873       && !context.restrict_to_inner_class (otr_type))
2874     {
2875       if (completep)
2876 	*completep = true;
2877       if (cache_token)
2878 	*cache_token = NULL;
2879       return nodes;
2880     }
2881   gcc_assert (!context.invalid);
2882 
2883   /* Check that restrict_to_inner_class kept the main variant.  */
2884   gcc_assert (!context.outer_type
2885 	      || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2886 
2887   /* We canonicalize our query, so we do not need extra hashtable entries.  */
2888 
2889   /* Without outer type, we have no use for offset.  Just do the
2890      basic search from inner type.  */
2891   if (!context.outer_type)
2892     context.clear_outer_type (otr_type);
2893   /* We need to update our hierarchy if the type does not exist.  */
2894   outer_type = get_odr_type (context.outer_type, true);
2895   /* If the type is complete, there are no derivations.  */
2896   if (TYPE_FINAL_P (outer_type->type))
2897     context.maybe_derived_type = false;
2898 
2899   /* Initialize query cache.  */
2900   if (!cached_polymorphic_call_targets)
2901     {
2902       cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2903       polymorphic_call_target_hash
2904        	= new polymorphic_call_target_hash_type (23);
2905       if (!node_removal_hook_holder)
2906 	{
2907 	  node_removal_hook_holder =
2908 	    symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
2909 	  symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
2910 					 NULL);
2911 	}
2912     }
2913 
2914   if (in_lto_p)
2915     {
2916       if (context.outer_type != otr_type)
2917         context.outer_type
2918 	  = get_odr_type (context.outer_type, true)->type;
2919       if (context.speculative_outer_type)
2920         context.speculative_outer_type
2921 	  = get_odr_type (context.speculative_outer_type, true)->type;
2922     }
2923 
2924   /* Look up cached answer.  */
2925   key.type = type;
2926   key.otr_token = otr_token;
2927   key.speculative = speculative;
2928   key.context = context;
2929   slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2930   if (cache_token)
2931    *cache_token = (void *)*slot;
2932   if (*slot)
2933     {
2934       if (completep)
2935 	*completep = (*slot)->complete;
2936       if ((*slot)->type_warning && final_warning_records)
2937 	{
2938 	  final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
2939 	  final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
2940 	    += final_warning_records->dyn_count;
2941 	}
2942       if (!speculative && (*slot)->decl_warning && final_warning_records)
2943 	{
2944 	  struct decl_warn_count *c =
2945 	     final_warning_records->decl_warnings.get ((*slot)->decl_warning);
2946 	  c->count++;
2947 	  c->dyn_count += final_warning_records->dyn_count;
2948 	}
2949       return (*slot)->targets;
2950     }
2951 
2952   complete = true;
2953 
2954   /* Do actual search.  */
2955   timevar_push (TV_IPA_VIRTUAL_CALL);
2956   *slot = XCNEW (polymorphic_call_target_d);
2957   if (cache_token)
2958     *cache_token = (void *)*slot;
2959   (*slot)->type = type;
2960   (*slot)->otr_token = otr_token;
2961   (*slot)->context = context;
2962   (*slot)->speculative = speculative;
2963 
2964   hash_set<tree> inserted;
2965   hash_set<tree> matched_vtables;
2966 
2967   /* First insert targets we speculatively identified as likely.  */
2968   if (context.speculative_outer_type)
2969     {
2970       odr_type speculative_outer_type;
2971       bool speculation_complete = true;
2972 
2973       /* First insert target from type itself and check if it may have
2974 	 derived types.  */
2975       speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2976       if (TYPE_FINAL_P (speculative_outer_type->type))
2977 	context.speculative_maybe_derived_type = false;
2978       binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2979 				   context.speculative_offset, otr_type);
2980       if (binfo)
2981 	target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2982 						   &can_refer);
2983       else
2984 	target = NULL;
2985 
2986       /* In the case we get complete method, we don't need
2987 	 to walk derivations.  */
2988       if (target && DECL_FINAL_P (target))
2989 	context.speculative_maybe_derived_type = false;
2990       if (type_possibly_instantiated_p (speculative_outer_type->type))
2991 	maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
2992       if (binfo)
2993 	matched_vtables.add (BINFO_VTABLE (binfo));
2994 
2995 
2996       /* Next walk recursively all derived types.  */
2997       if (context.speculative_maybe_derived_type)
2998 	for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2999 	  possible_polymorphic_call_targets_1 (nodes, &inserted,
3000 					       &matched_vtables,
3001 					       otr_type,
3002 					       speculative_outer_type->derived_types[i],
3003 					       otr_token, speculative_outer_type->type,
3004 					       context.speculative_offset,
3005 					       &speculation_complete,
3006 					       bases_to_consider,
3007 					       false);
3008     }
3009 
3010   if (!speculative || !nodes.length ())
3011     {
3012       /* First see virtual method of type itself.  */
3013       binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3014 				   context.offset, otr_type);
3015       if (binfo)
3016 	target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3017 						   &can_refer);
3018       else
3019 	{
3020 	  gcc_assert (odr_violation_reported);
3021 	  target = NULL;
3022 	}
3023 
3024       /* Destructors are never called through construction virtual tables,
3025 	 because the type is always known.  */
3026       if (target && DECL_CXX_DESTRUCTOR_P (target))
3027 	context.maybe_in_construction = false;
3028 
3029       if (target)
3030 	{
3031 	  /* In the case we get complete method, we don't need
3032 	     to walk derivations.  */
3033 	  if (DECL_FINAL_P (target))
3034 	    context.maybe_derived_type = false;
3035 	}
3036 
3037       /* If OUTER_TYPE is abstract, we know we are not seeing its instance.  */
3038       if (type_possibly_instantiated_p (outer_type->type))
3039 	maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3040       else
3041 	skipped = true;
3042 
3043       if (binfo)
3044 	matched_vtables.add (BINFO_VTABLE (binfo));
3045 
3046       /* Next walk recursively all derived types.  */
3047       if (context.maybe_derived_type)
3048 	{
3049 	  for (i = 0; i < outer_type->derived_types.length(); i++)
3050 	    possible_polymorphic_call_targets_1 (nodes, &inserted,
3051 						 &matched_vtables,
3052 						 otr_type,
3053 						 outer_type->derived_types[i],
3054 						 otr_token, outer_type->type,
3055 						 context.offset, &complete,
3056 						 bases_to_consider,
3057 						 context.maybe_in_construction);
3058 
3059 	  if (!outer_type->all_derivations_known)
3060 	    {
3061 	      if (!speculative && final_warning_records)
3062 		{
3063 		  if (complete
3064 		      && nodes.length () == 1
3065 		      && warn_suggest_final_types
3066 		      && !outer_type->derived_types.length ())
3067 		    {
3068 		      if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3069 			final_warning_records->type_warnings.safe_grow_cleared
3070 			  (odr_types.length ());
3071 		      final_warning_records->type_warnings[outer_type->id].count++;
3072 		      final_warning_records->type_warnings[outer_type->id].dyn_count
3073 			+= final_warning_records->dyn_count;
3074 		      final_warning_records->type_warnings[outer_type->id].type
3075 			= outer_type->type;
3076 		      (*slot)->type_warning = outer_type->id + 1;
3077 		    }
3078 		  if (complete
3079 		      && warn_suggest_final_methods
3080 		      && nodes.length () == 1
3081 		      && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3082 					     outer_type->type))
3083 		    {
3084 		      bool existed;
3085 		      struct decl_warn_count &c =
3086 			 final_warning_records->decl_warnings.get_or_insert
3087 			    (nodes[0]->decl, &existed);
3088 
3089 		      if (existed)
3090 			{
3091 			  c.count++;
3092 			  c.dyn_count += final_warning_records->dyn_count;
3093 			}
3094 		      else
3095 			{
3096 			  c.count = 1;
3097 			  c.dyn_count = final_warning_records->dyn_count;
3098 			  c.decl = nodes[0]->decl;
3099 			}
3100 		      (*slot)->decl_warning = nodes[0]->decl;
3101 		    }
3102 		}
3103 	      complete = false;
3104 	    }
3105 	}
3106 
3107       if (!speculative)
3108 	{
3109 	  /* Destructors are never called through construction virtual tables,
3110 	     because the type is always known.  One of entries may be
3111 	     cxa_pure_virtual so look to at least two of them.  */
3112 	  if (context.maybe_in_construction)
3113 	    for (i =0 ; i < MIN (nodes.length (), 2); i++)
3114 	      if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3115 		context.maybe_in_construction = false;
3116 	  if (context.maybe_in_construction)
3117 	    {
3118 	      if (type != outer_type
3119 		  && (!skipped
3120 		      || (context.maybe_derived_type
3121 			  && !type_all_derivations_known_p (outer_type->type))))
3122 		record_targets_from_bases (otr_type, otr_token, outer_type->type,
3123 					   context.offset, nodes, &inserted,
3124 					   &matched_vtables, &complete);
3125 	      if (skipped)
3126 		maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3127 	      for (i = 0; i < bases_to_consider.length(); i++)
3128 		maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3129 	    }
3130 	}
3131     }
3132 
3133   (*slot)->targets = nodes;
3134   (*slot)->complete = complete;
3135   if (completep)
3136     *completep = complete;
3137 
3138   timevar_pop (TV_IPA_VIRTUAL_CALL);
3139   return nodes;
3140 }
3141 
3142 bool
3143 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3144 		  vec<const decl_warn_count*> *vec)
3145 {
3146   vec->safe_push (&value);
3147   return true;
3148 }
3149 
3150 /* Dump target list TARGETS into FILE.  */
3151 
3152 static void
3153 dump_targets (FILE *f, vec <cgraph_node *> targets)
3154 {
3155   unsigned int i;
3156 
3157   for (i = 0; i < targets.length (); i++)
3158     {
3159       char *name = NULL;
3160       if (in_lto_p)
3161 	name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3162       fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3163       if (in_lto_p)
3164 	free (name);
3165       if (!targets[i]->definition)
3166 	fprintf (f, " (no definition%s)",
3167 		 DECL_DECLARED_INLINE_P (targets[i]->decl)
3168 		 ? " inline" : "");
3169     }
3170   fprintf (f, "\n");
3171 }
3172 
3173 /* Dump all possible targets of a polymorphic call.  */
3174 
3175 void
3176 dump_possible_polymorphic_call_targets (FILE *f,
3177 					tree otr_type,
3178 					HOST_WIDE_INT otr_token,
3179 					const ipa_polymorphic_call_context &ctx)
3180 {
3181   vec <cgraph_node *> targets;
3182   bool final;
3183   odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3184   unsigned int len;
3185 
3186   if (!type)
3187     return;
3188   targets = possible_polymorphic_call_targets (otr_type, otr_token,
3189 					       ctx,
3190 					       &final, NULL, false);
3191   fprintf (f, "  Targets of polymorphic call of type %i:", type->id);
3192   print_generic_expr (f, type->type, TDF_SLIM);
3193   fprintf (f, " token %i\n", (int)otr_token);
3194 
3195   ctx.dump (f);
3196 
3197   fprintf (f, "    %s%s%s%s\n      ",
3198 	   final ? "This is a complete list." :
3199 	   "This is partial list; extra targets may be defined in other units.",
3200 	   ctx.maybe_in_construction ? " (base types included)" : "",
3201 	   ctx.maybe_derived_type ? " (derived types included)" : "",
3202 	   ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3203   len = targets.length ();
3204   dump_targets (f, targets);
3205 
3206   targets = possible_polymorphic_call_targets (otr_type, otr_token,
3207 					       ctx,
3208 					       &final, NULL, true);
3209   if (targets.length () != len)
3210     {
3211       fprintf (f, "  Speculative targets:");
3212       dump_targets (f, targets);
3213     }
3214   /* Ugly: during callgraph construction the target cache may get populated
3215      before all targets are found.  While this is harmless (because all local
3216      types are discovered and only in those case we devirtualize fully and we
3217      don't do speculative devirtualization before IPA stage) it triggers
3218      assert here when dumping at that stage also populates the case with
3219      speculative targets.  Quietly ignore this.  */
3220   gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3221   fprintf (f, "\n");
3222 }
3223 
3224 
3225 /* Return true if N can be possibly target of a polymorphic call of
3226    OTR_TYPE/OTR_TOKEN.  */
3227 
3228 bool
3229 possible_polymorphic_call_target_p (tree otr_type,
3230 				    HOST_WIDE_INT otr_token,
3231 				    const ipa_polymorphic_call_context &ctx,
3232 				    struct cgraph_node *n)
3233 {
3234   vec <cgraph_node *> targets;
3235   unsigned int i;
3236   enum built_in_function fcode;
3237   bool final;
3238 
3239   if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3240       && ((fcode = DECL_FUNCTION_CODE (n->decl))
3241 	  == BUILT_IN_UNREACHABLE
3242           || fcode == BUILT_IN_TRAP))
3243     return true;
3244 
3245   if (!odr_hash)
3246     return true;
3247   targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3248   for (i = 0; i < targets.length (); i++)
3249     if (n->semantically_equivalent_p (targets[i]))
3250       return true;
3251 
3252   /* At a moment we allow middle end to dig out new external declarations
3253      as a targets of polymorphic calls.  */
3254   if (!final && !n->definition)
3255     return true;
3256   return false;
3257 }
3258 
3259 
3260 
3261 /* Return true if N can be possibly target of a polymorphic call of
3262    OBJ_TYPE_REF expression REF in STMT.  */
3263 
3264 bool
3265 possible_polymorphic_call_target_p (tree ref,
3266 				    gimple stmt,
3267 				    struct cgraph_node *n)
3268 {
3269   ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3270   tree call_fn = gimple_call_fn (stmt);
3271 
3272   return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3273 					     tree_to_uhwi
3274 					       (OBJ_TYPE_REF_TOKEN (call_fn)),
3275 					     context,
3276 					     n);
3277 }
3278 
3279 
3280 /* After callgraph construction new external nodes may appear.
3281    Add them into the graph.  */
3282 
3283 void
3284 update_type_inheritance_graph (void)
3285 {
3286   struct cgraph_node *n;
3287 
3288   if (!odr_hash)
3289     return;
3290   free_polymorphic_call_targets_hash ();
3291   timevar_push (TV_IPA_INHERITANCE);
3292   /* We reconstruct the graph starting from types of all methods seen in the
3293      the unit.  */
3294   FOR_EACH_FUNCTION (n)
3295     if (DECL_VIRTUAL_P (n->decl)
3296 	&& !n->definition
3297 	&& n->real_symbol_p ())
3298       get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3299 				       true);
3300   timevar_pop (TV_IPA_INHERITANCE);
3301 }
3302 
3303 
3304 /* Return true if N looks like likely target of a polymorphic call.
3305    Rule out cxa_pure_virtual, noreturns, function declared cold and
3306    other obvious cases.  */
3307 
3308 bool
3309 likely_target_p (struct cgraph_node *n)
3310 {
3311   int flags;
3312   /* cxa_pure_virtual and similar things are not likely.  */
3313   if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3314     return false;
3315   flags = flags_from_decl_or_type (n->decl);
3316   if (flags & ECF_NORETURN)
3317     return false;
3318   if (lookup_attribute ("cold",
3319 			DECL_ATTRIBUTES (n->decl)))
3320     return false;
3321   if (n->frequency < NODE_FREQUENCY_NORMAL)
3322     return false;
3323   /* If there are no live virtual tables referring the target,
3324      the only way the target can be called is an instance coming from other
3325      compilation unit; speculative devirtualization is built around an
3326      assumption that won't happen.  */
3327   if (!referenced_from_vtable_p (n))
3328     return false;
3329   return true;
3330 }
3331 
3332 /* Compare type warning records P1 and P2 and choose one with larger count;
3333    helper for qsort.  */
3334 
3335 int
3336 type_warning_cmp (const void *p1, const void *p2)
3337 {
3338   const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3339   const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3340 
3341   if (t1->dyn_count < t2->dyn_count)
3342    return 1;
3343   if (t1->dyn_count > t2->dyn_count)
3344    return -1;
3345   return t2->count - t1->count;
3346 }
3347 
3348 /* Compare decl warning records P1 and P2 and choose one with larger count;
3349    helper for qsort.  */
3350 
3351 int
3352 decl_warning_cmp (const void *p1, const void *p2)
3353 {
3354   const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3355   const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3356 
3357   if (t1->dyn_count < t2->dyn_count)
3358    return 1;
3359   if (t1->dyn_count > t2->dyn_count)
3360    return -1;
3361   return t2->count - t1->count;
3362 }
3363 
3364 
3365 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3366    context CTX.  */
3367 
3368 struct cgraph_node *
3369 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3370 				  ipa_polymorphic_call_context ctx)
3371 {
3372   vec <cgraph_node *>targets
3373      = possible_polymorphic_call_targets
3374 	  (otr_type, otr_token, ctx, NULL, NULL, true);
3375   unsigned int i;
3376   struct cgraph_node *likely_target = NULL;
3377 
3378   for (i = 0; i < targets.length (); i++)
3379     if (likely_target_p (targets[i]))
3380       {
3381 	if (likely_target)
3382 	  return NULL;
3383 	likely_target = targets[i];
3384       }
3385   if (!likely_target
3386       ||!likely_target->definition
3387       || DECL_EXTERNAL (likely_target->decl))
3388     return NULL;
3389 
3390   /* Don't use an implicitly-declared destructor (c++/58678).  */
3391   struct cgraph_node *non_thunk_target
3392     = likely_target->function_symbol ();
3393   if (DECL_ARTIFICIAL (non_thunk_target->decl))
3394     return NULL;
3395   if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3396       && likely_target->can_be_discarded_p ())
3397     return NULL;
3398   return likely_target;
3399 }
3400 
3401 /* The ipa-devirt pass.
3402    When polymorphic call has only one likely target in the unit,
3403    turn it into a speculative call.  */
3404 
3405 static unsigned int
3406 ipa_devirt (void)
3407 {
3408   struct cgraph_node *n;
3409   hash_set<void *> bad_call_targets;
3410   struct cgraph_edge *e;
3411 
3412   int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3413   int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3414   int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3415   int ndropped = 0;
3416 
3417   if (!odr_types_ptr)
3418     return 0;
3419 
3420   if (dump_file)
3421     dump_type_inheritance_graph (dump_file);
3422 
3423   /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3424      This is implemented by setting up final_warning_records that are updated
3425      by get_polymorphic_call_targets.
3426      We need to clear cache in this case to trigger recomputation of all
3427      entries.  */
3428   if (warn_suggest_final_methods || warn_suggest_final_types)
3429     {
3430       final_warning_records = new (final_warning_record);
3431       final_warning_records->type_warnings = vNULL;
3432       final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3433       free_polymorphic_call_targets_hash ();
3434     }
3435 
3436   FOR_EACH_DEFINED_FUNCTION (n)
3437     {
3438       bool update = false;
3439       if (!opt_for_fn (n->decl, flag_devirtualize))
3440 	continue;
3441       if (dump_file && n->indirect_calls)
3442 	fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3443 		 n->name (), n->order);
3444       for (e = n->indirect_calls; e; e = e->next_callee)
3445 	if (e->indirect_info->polymorphic)
3446 	  {
3447 	    struct cgraph_node *likely_target = NULL;
3448 	    void *cache_token;
3449 	    bool final;
3450 
3451 	    if (final_warning_records)
3452 	      final_warning_records->dyn_count = e->count;
3453 
3454 	    vec <cgraph_node *>targets
3455 	       = possible_polymorphic_call_targets
3456 		    (e, &final, &cache_token, true);
3457 	    unsigned int i;
3458 
3459 	    /* Trigger warnings by calculating non-speculative targets.  */
3460 	    if (warn_suggest_final_methods || warn_suggest_final_types)
3461 	      possible_polymorphic_call_targets (e);
3462 
3463 	    if (dump_file)
3464 	      dump_possible_polymorphic_call_targets
3465 		(dump_file, e);
3466 
3467 	    npolymorphic++;
3468 
3469 	    /* See if the call can be devirtualized by means of ipa-prop's
3470 	       polymorphic call context propagation.  If not, we can just
3471 	       forget about this call being polymorphic and avoid some heavy
3472 	       lifting in remove_unreachable_nodes that will otherwise try to
3473 	       keep all possible targets alive until inlining and in the inliner
3474 	       itself.
3475 
3476 	       This may need to be revisited once we add further ways to use
3477 	       the may edges, but it is a resonable thing to do right now.  */
3478 
3479 	    if ((e->indirect_info->param_index == -1
3480 		|| (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3481 		    && e->indirect_info->vptr_changed))
3482 		&& !flag_ltrans_devirtualize)
3483 	      {
3484 		e->indirect_info->polymorphic = false;
3485 		ndropped++;
3486 	        if (dump_file)
3487 		  fprintf (dump_file, "Dropping polymorphic call info;"
3488 			   " it can not be used by ipa-prop\n");
3489 	      }
3490 
3491 	    if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3492 	      continue;
3493 
3494 	    if (!e->maybe_hot_p ())
3495 	      {
3496 		if (dump_file)
3497 		  fprintf (dump_file, "Call is cold\n\n");
3498 		ncold++;
3499 		continue;
3500 	      }
3501 	    if (e->speculative)
3502 	      {
3503 		if (dump_file)
3504 		  fprintf (dump_file, "Call is already speculated\n\n");
3505 		nspeculated++;
3506 
3507 		/* When dumping see if we agree with speculation.  */
3508 		if (!dump_file)
3509 		  continue;
3510 	      }
3511 	    if (bad_call_targets.contains (cache_token))
3512 	      {
3513 		if (dump_file)
3514 		  fprintf (dump_file, "Target list is known to be useless\n\n");
3515 		nmultiple++;
3516 		continue;
3517 	      }
3518 	    for (i = 0; i < targets.length (); i++)
3519 	      if (likely_target_p (targets[i]))
3520 		{
3521 		  if (likely_target)
3522 		    {
3523 		      likely_target = NULL;
3524 		      if (dump_file)
3525 			fprintf (dump_file, "More than one likely target\n\n");
3526 		      nmultiple++;
3527 		      break;
3528 		    }
3529 		  likely_target = targets[i];
3530 		}
3531 	    if (!likely_target)
3532 	      {
3533 		bad_call_targets.add (cache_token);
3534 	        continue;
3535 	      }
3536 	    /* This is reached only when dumping; check if we agree or disagree
3537  	       with the speculation.  */
3538 	    if (e->speculative)
3539 	      {
3540 		struct cgraph_edge *e2;
3541 		struct ipa_ref *ref;
3542 		e->speculative_call_info (e2, e, ref);
3543 		if (e2->callee->ultimate_alias_target ()
3544 		    == likely_target->ultimate_alias_target ())
3545 		  {
3546 		    fprintf (dump_file, "We agree with speculation\n\n");
3547 		    nok++;
3548 		  }
3549 		else
3550 		  {
3551 		    fprintf (dump_file, "We disagree with speculation\n\n");
3552 		    nwrong++;
3553 		  }
3554 		continue;
3555 	      }
3556 	    if (!likely_target->definition)
3557 	      {
3558 		if (dump_file)
3559 		  fprintf (dump_file, "Target is not a definition\n\n");
3560 		nnotdefined++;
3561 		continue;
3562 	      }
3563 	    /* Do not introduce new references to external symbols.  While we
3564 	       can handle these just well, it is common for programs to
3565 	       incorrectly with headers defining methods they are linked
3566 	       with.  */
3567 	    if (DECL_EXTERNAL (likely_target->decl))
3568 	      {
3569 		if (dump_file)
3570 		  fprintf (dump_file, "Target is external\n\n");
3571 		nexternal++;
3572 		continue;
3573 	      }
3574 	    /* Don't use an implicitly-declared destructor (c++/58678).  */
3575 	    struct cgraph_node *non_thunk_target
3576 	      = likely_target->function_symbol ();
3577 	    if (DECL_ARTIFICIAL (non_thunk_target->decl))
3578 	      {
3579 		if (dump_file)
3580 		  fprintf (dump_file, "Target is artificial\n\n");
3581 		nartificial++;
3582 		continue;
3583 	      }
3584 	    if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3585 		&& likely_target->can_be_discarded_p ())
3586 	      {
3587 		if (dump_file)
3588 		  fprintf (dump_file, "Target is overwritable\n\n");
3589 		noverwritable++;
3590 		continue;
3591 	      }
3592 	    else if (dbg_cnt (devirt))
3593 	      {
3594 		if (dump_enabled_p ())
3595                   {
3596                     location_t locus = gimple_location_safe (e->call_stmt);
3597                     dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3598                                      "speculatively devirtualizing call in %s/%i to %s/%i\n",
3599                                      n->name (), n->order,
3600                                      likely_target->name (),
3601                                      likely_target->order);
3602                   }
3603 		if (!likely_target->can_be_discarded_p ())
3604 		  {
3605 		    cgraph_node *alias;
3606 		    alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3607 		    if (alias)
3608 		      likely_target = alias;
3609 		  }
3610 		nconverted++;
3611 		update = true;
3612 		e->make_speculative
3613 		  (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3614 	      }
3615 	  }
3616       if (update)
3617 	inline_update_overall_summary (n);
3618     }
3619   if (warn_suggest_final_methods || warn_suggest_final_types)
3620     {
3621       if (warn_suggest_final_types)
3622 	{
3623 	  final_warning_records->type_warnings.qsort (type_warning_cmp);
3624 	  for (unsigned int i = 0;
3625 	       i < final_warning_records->type_warnings.length (); i++)
3626 	    if (final_warning_records->type_warnings[i].count)
3627 	      {
3628 	        tree type = final_warning_records->type_warnings[i].type;
3629 	        int count = final_warning_records->type_warnings[i].count;
3630 	        long long dyn_count
3631 		  = final_warning_records->type_warnings[i].dyn_count;
3632 
3633 		if (!dyn_count)
3634 		  warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3635 			     OPT_Wsuggest_final_types, count,
3636 			     "Declaring type %qD final "
3637 			     "would enable devirtualization of %i call",
3638 			     "Declaring type %qD final "
3639 			     "would enable devirtualization of %i calls",
3640 			     type,
3641 			     count);
3642 		else
3643 		  warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3644 			     OPT_Wsuggest_final_types, count,
3645 			     "Declaring type %qD final "
3646 			     "would enable devirtualization of %i call "
3647 			     "executed %lli times",
3648 			     "Declaring type %qD final "
3649 			     "would enable devirtualization of %i calls "
3650 			     "executed %lli times",
3651 			     type,
3652 			     count,
3653 			     dyn_count);
3654 	      }
3655 	}
3656 
3657       if (warn_suggest_final_methods)
3658 	{
3659 	  vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3660 
3661 	  final_warning_records->decl_warnings.traverse
3662 	    <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3663 	  decl_warnings_vec.qsort (decl_warning_cmp);
3664 	  for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3665 	    {
3666 	      tree decl = decl_warnings_vec[i]->decl;
3667 	      int count = decl_warnings_vec[i]->count;
3668 	      long long dyn_count = decl_warnings_vec[i]->dyn_count;
3669 
3670 	      if (!dyn_count)
3671 		if (DECL_CXX_DESTRUCTOR_P (decl))
3672 		  warning_n (DECL_SOURCE_LOCATION (decl),
3673 			      OPT_Wsuggest_final_methods, count,
3674 			      "Declaring virtual destructor of %qD final "
3675 			      "would enable devirtualization of %i call",
3676 			      "Declaring virtual destructor of %qD final "
3677 			      "would enable devirtualization of %i calls",
3678 			      DECL_CONTEXT (decl), count);
3679 		else
3680 		  warning_n (DECL_SOURCE_LOCATION (decl),
3681 			      OPT_Wsuggest_final_methods, count,
3682 			      "Declaring method %qD final "
3683 			      "would enable devirtualization of %i call",
3684 			      "Declaring method %qD final "
3685 			      "would enable devirtualization of %i calls",
3686 			      decl, count);
3687 	       else if (DECL_CXX_DESTRUCTOR_P (decl))
3688 		  warning_n (DECL_SOURCE_LOCATION (decl),
3689 			      OPT_Wsuggest_final_methods, count,
3690 			      "Declaring virtual destructor of %qD final "
3691 			      "would enable devirtualization of %i call "
3692 			      "executed %lli times",
3693 			      "Declaring virtual destructor of %qD final "
3694 			      "would enable devirtualization of %i calls "
3695 			      "executed %lli times",
3696 			      DECL_CONTEXT (decl), count, dyn_count);
3697 		else
3698 		  warning_n (DECL_SOURCE_LOCATION (decl),
3699 			      OPT_Wsuggest_final_methods, count,
3700 			      "Declaring method %qD final "
3701 			      "would enable devirtualization of %i call "
3702 			      "executed %lli times",
3703 			      "Declaring method %qD final "
3704 			      "would enable devirtualization of %i calls "
3705 			      "executed %lli times",
3706 			      decl, count, dyn_count);
3707 	    }
3708 	}
3709 
3710       delete (final_warning_records);
3711       final_warning_records = 0;
3712     }
3713 
3714   if (dump_file)
3715     fprintf (dump_file,
3716 	     "%i polymorphic calls, %i devirtualized,"
3717 	     " %i speculatively devirtualized, %i cold\n"
3718 	     "%i have multiple targets, %i overwritable,"
3719 	     " %i already speculated (%i agree, %i disagree),"
3720 	     " %i external, %i not defined, %i artificial, %i infos dropped\n",
3721 	     npolymorphic, ndevirtualized, nconverted, ncold,
3722 	     nmultiple, noverwritable, nspeculated, nok, nwrong,
3723 	     nexternal, nnotdefined, nartificial, ndropped);
3724   return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3725 }
3726 
3727 namespace {
3728 
3729 const pass_data pass_data_ipa_devirt =
3730 {
3731   IPA_PASS, /* type */
3732   "devirt", /* name */
3733   OPTGROUP_NONE, /* optinfo_flags */
3734   TV_IPA_DEVIRT, /* tv_id */
3735   0, /* properties_required */
3736   0, /* properties_provided */
3737   0, /* properties_destroyed */
3738   0, /* todo_flags_start */
3739   ( TODO_dump_symtab ), /* todo_flags_finish */
3740 };
3741 
3742 class pass_ipa_devirt : public ipa_opt_pass_d
3743 {
3744 public:
3745   pass_ipa_devirt (gcc::context *ctxt)
3746     : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3747 		      NULL, /* generate_summary */
3748 		      NULL, /* write_summary */
3749 		      NULL, /* read_summary */
3750 		      NULL, /* write_optimization_summary */
3751 		      NULL, /* read_optimization_summary */
3752 		      NULL, /* stmt_fixup */
3753 		      0, /* function_transform_todo_flags_start */
3754 		      NULL, /* function_transform */
3755 		      NULL) /* variable_transform */
3756   {}
3757 
3758   /* opt_pass methods: */
3759   virtual bool gate (function *)
3760     {
3761       /* In LTO, always run the IPA passes and decide on function basis if the
3762 	 pass is enabled.  */
3763       if (in_lto_p)
3764 	return true;
3765       return (flag_devirtualize
3766 	      && (flag_devirtualize_speculatively
3767 		  || (warn_suggest_final_methods
3768 		      || warn_suggest_final_types))
3769 	      && optimize);
3770     }
3771 
3772   virtual unsigned int execute (function *) { return ipa_devirt (); }
3773 
3774 }; // class pass_ipa_devirt
3775 
3776 } // anon namespace
3777 
3778 ipa_opt_pass_d *
3779 make_pass_ipa_devirt (gcc::context *ctxt)
3780 {
3781   return new pass_ipa_devirt (ctxt);
3782 }
3783 
3784 #include "gt-ipa-devirt.h"
3785