xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-polymorphic-call.c (revision 5dd36a3bc8bf2a9dec29ceb6349550414570c447)
1 /* Analysis of polymorphic call context.
2    Copyright (C) 2013-2017 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "tree-ssa-operands.h"
30 #include "streamer-hooks.h"
31 #include "cgraph.h"
32 #include "data-streamer.h"
33 #include "diagnostic.h"
34 #include "alias.h"
35 #include "fold-const.h"
36 #include "calls.h"
37 #include "ipa-utils.h"
38 #include "tree-dfa.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-into-ssa.h"
41 #include "params.h"
42 
43 /* Return true when TYPE contains an polymorphic type and thus is interesting
44    for devirtualization machinery.  */
45 
46 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
47 			     bool consider_placement_new = true,
48 			     bool consider_bases = true);
49 
50 bool
51 contains_polymorphic_type_p (const_tree type)
52 {
53   type = TYPE_MAIN_VARIANT (type);
54 
55   if (RECORD_OR_UNION_TYPE_P (type))
56     {
57       if (TYPE_BINFO (type)
58           && polymorphic_type_binfo_p (TYPE_BINFO (type)))
59 	return true;
60       for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
61 	if (TREE_CODE (fld) == FIELD_DECL
62 	    && !DECL_ARTIFICIAL (fld)
63 	    && contains_polymorphic_type_p (TREE_TYPE (fld)))
64 	  return true;
65       return false;
66     }
67   if (TREE_CODE (type) == ARRAY_TYPE)
68     return contains_polymorphic_type_p (TREE_TYPE (type));
69   return false;
70 }
71 
72 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
73    at possition CUR_OFFSET within TYPE.
74 
75    POD can be changed to an instance of a polymorphic type by
76    placement new.  Here we play safe and assume that any
77    non-polymorphic type is POD.  */
78 bool
79 possible_placement_new (tree type, tree expected_type,
80 			HOST_WIDE_INT cur_offset)
81 {
82   if (cur_offset < 0)
83     return true;
84   return ((TREE_CODE (type) != RECORD_TYPE
85 	   || !TYPE_BINFO (type)
86 	   || cur_offset >= POINTER_SIZE
87 	   || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
88 	  && (!TYPE_SIZE (type)
89 	      || !tree_fits_shwi_p (TYPE_SIZE (type))
90 	      || (cur_offset
91 		  + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
92 		     : POINTER_SIZE)
93 		  <= tree_to_uhwi (TYPE_SIZE (type)))));
94 }
95 
96 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
97    is contained at THIS->OFFSET.  Walk the memory representation of
98    THIS->OUTER_TYPE and find the outermost class type that match
99    OTR_TYPE or contain OTR_TYPE as a base.  Update THIS
100    to represent it.
101 
102    If OTR_TYPE is NULL, just find outermost polymorphic type with
103    virtual table present at possition OFFSET.
104 
105    For example when THIS represents type
106    class A
107      {
108        int a;
109        class B b;
110      }
111    and we look for type at offset sizeof(int), we end up with B and offset 0.
112    If the same is produced by multiple inheritance, we end up with A and offset
113    sizeof(int).
114 
115    If we can not find corresponding class, give up by setting
116    THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
117    Return true when lookup was sucesful.
118 
119    When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
120    valid only via allocation of new polymorphic type inside by means
121    of placement new.
122 
123    When CONSIDER_BASES is false, only look for actual fields, not base types
124    of TYPE.  */
125 
126 bool
127 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
128 						       bool consider_placement_new,
129 						       bool consider_bases)
130 {
131   tree type = outer_type;
132   HOST_WIDE_INT cur_offset = offset;
133   bool speculative = false;
134   bool size_unknown = false;
135   unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
136 
137   /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set.  */
138   if (!outer_type)
139     {
140       clear_outer_type (otr_type);
141       type = otr_type;
142       cur_offset = 0;
143     }
144  /* See if OFFSET points inside OUTER_TYPE.  If it does not, we know
145     that the context is either invalid, or the instance type must be
146     derived from OUTER_TYPE.
147 
148     Because the instance type may contain field whose type is of OUTER_TYPE,
149     we can not derive any effective information about it.
150 
151     TODO: In the case we know all derrived types, we can definitely do better
152     here.  */
153   else if (TYPE_SIZE (outer_type)
154 	   && tree_fits_shwi_p (TYPE_SIZE (outer_type))
155 	   && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
156 	   && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
157    {
158      bool der = maybe_derived_type; /* clear_outer_type will reset it.  */
159      bool dyn = dynamic;
160      clear_outer_type (otr_type);
161      type = otr_type;
162      cur_offset = 0;
163 
164      /* If derived type is not allowed, we know that the context is invalid.
165 	For dynamic types, we really do not have information about
166 	size of the memory location.  It is possible that completely
167 	different type is stored after outer_type.  */
168      if (!der && !dyn)
169        {
170 	 clear_speculation ();
171 	 invalid = true;
172 	 return false;
173        }
174    }
175 
176   if (otr_type && TYPE_SIZE (otr_type)
177       && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
178     otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
179 
180   if (!type || offset < 0)
181     goto no_useful_type_info;
182 
183   /* Find the sub-object the constant actually refers to and mark whether it is
184      an artificial one (as opposed to a user-defined one).
185 
186      This loop is performed twice; first time for outer_type and second time
187      for speculative_outer_type.  The second run has SPECULATIVE set.  */
188   while (true)
189     {
190       unsigned HOST_WIDE_INT pos, size;
191       tree fld;
192 
193       /* If we do not know size of TYPE, we need to be more conservative
194          about accepting cases where we can not find EXPECTED_TYPE.
195 	 Generally the types that do matter here are of constant size.
196 	 Size_unknown case should be very rare.  */
197       if (TYPE_SIZE (type)
198 	  && tree_fits_shwi_p (TYPE_SIZE (type))
199 	  && tree_to_shwi (TYPE_SIZE (type)) >= 0)
200 	size_unknown = false;
201       else
202 	size_unknown = true;
203 
204       /* On a match, just return what we found.  */
205       if ((otr_type
206 	   && types_odr_comparable (type, otr_type)
207 	   && types_same_for_odr (type, otr_type))
208 	  || (!otr_type
209 	      && TREE_CODE (type) == RECORD_TYPE
210 	      && TYPE_BINFO (type)
211 	      && polymorphic_type_binfo_p (TYPE_BINFO (type))))
212 	{
213 	  if (speculative)
214 	    {
215 	      /* If we did not match the offset, just give up on speculation.  */
216 	      if (cur_offset != 0
217 		  /* Also check if speculation did not end up being same as
218 		     non-speculation.  */
219 		  || (types_must_be_same_for_odr (speculative_outer_type,
220 						  outer_type)
221 		      && (maybe_derived_type
222 			  == speculative_maybe_derived_type)))
223 		clear_speculation ();
224 	      return true;
225 	    }
226 	  else
227 	    {
228 	      /* If type is known to be final, do not worry about derived
229 		 types.  Testing it here may help us to avoid speculation.  */
230 	      if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
231 		  && (!in_lto_p || odr_type_p (outer_type))
232 		  && type_with_linkage_p (outer_type)
233 		  && type_known_to_have_no_derivations_p (outer_type))
234 		maybe_derived_type = false;
235 
236 	      /* Type can not contain itself on an non-zero offset.  In that case
237 		 just give up.  Still accept the case where size is now known.
238 		 Either the second copy may appear past the end of type or within
239 		 the non-POD buffer located inside the variably sized type
240 		 itself.  */
241 	      if (cur_offset != 0)
242 		goto no_useful_type_info;
243 	      /* If we determined type precisely or we have no clue on
244  		 speuclation, we are done.  */
245 	      if (!maybe_derived_type || !speculative_outer_type
246 		  || !speculation_consistent_p (speculative_outer_type,
247 					        speculative_offset,
248 					        speculative_maybe_derived_type,
249 						otr_type))
250 		{
251 		  clear_speculation ();
252 	          return true;
253 		}
254 	      /* Otherwise look into speculation now.  */
255 	      else
256 		{
257 		  speculative = true;
258 		  type = speculative_outer_type;
259 		  cur_offset = speculative_offset;
260 		  continue;
261 		}
262 	    }
263 	}
264 
265       /* Walk fields and find corresponding on at OFFSET.  */
266       if (TREE_CODE (type) == RECORD_TYPE)
267 	{
268 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
269 	    {
270 	      if (TREE_CODE (fld) != FIELD_DECL
271 		  || TREE_TYPE (fld) == error_mark_node)
272 		continue;
273 
274 	      pos = int_bit_position (fld);
275 	      if (pos > (unsigned HOST_WIDE_INT)cur_offset)
276 		continue;
277 
278 	      /* Do not consider vptr itself.  Not even for placement new.  */
279 	      if (!pos && DECL_ARTIFICIAL (fld)
280 		  && POINTER_TYPE_P (TREE_TYPE (fld))
281 		  && TYPE_BINFO (type)
282 		  && polymorphic_type_binfo_p (TYPE_BINFO (type)))
283 		continue;
284 
285 	      if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
286 		goto no_useful_type_info;
287 	      size = tree_to_uhwi (DECL_SIZE (fld));
288 
289 	      /* We can always skip types smaller than pointer size:
290 		 those can not contain a virtual table pointer.
291 
292 		 Disqualifying fields that are too small to fit OTR_TYPE
293 		 saves work needed to walk them for no benefit.
294 		 Because of the way the bases are packed into a class, the
295 		 field's size may be smaller than type size, so it needs
296 		 to be done with a care.  */
297 
298 	      if (pos <= (unsigned HOST_WIDE_INT)cur_offset
299 		  && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
300 				     + POINTER_SIZE
301 		  && (!otr_type
302 		      || !TYPE_SIZE (TREE_TYPE (fld))
303 		      || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
304 		      || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
305 			  >= cur_offset + otr_type_size))
306 		break;
307 	    }
308 
309 	  if (!fld)
310 	    goto no_useful_type_info;
311 
312 	  type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
313 	  cur_offset -= pos;
314 	  /* DECL_ARTIFICIAL represents a basetype.  */
315 	  if (!DECL_ARTIFICIAL (fld))
316 	    {
317 	      if (!speculative)
318 		{
319 		  outer_type = type;
320 		  offset = cur_offset;
321 		  /* As soon as we se an field containing the type,
322 		     we know we are not looking for derivations.  */
323 		  maybe_derived_type = false;
324 		}
325 	      else
326 		{
327 		  speculative_outer_type = type;
328 		  speculative_offset = cur_offset;
329 		  speculative_maybe_derived_type = false;
330 		}
331 	    }
332 	  else if (!consider_bases)
333 	    goto no_useful_type_info;
334 	}
335       else if (TREE_CODE (type) == ARRAY_TYPE)
336 	{
337 	  tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
338 
339 	  /* Give up if we don't know array field size.
340 	     Also give up on non-polymorphic types as they are used
341 	     as buffers for placement new.  */
342 	  if (!TYPE_SIZE (subtype)
343 	      || !tree_fits_shwi_p (TYPE_SIZE (subtype))
344 	      || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
345 	      || !contains_polymorphic_type_p (subtype))
346 	    goto no_useful_type_info;
347 
348 	  HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
349 
350 	  /* We may see buffer for placement new.  In this case the expected type
351 	     can be bigger than the subtype.  */
352 	  if (TYPE_SIZE (subtype)
353 	      && (cur_offset + otr_type_size
354 		  > tree_to_uhwi (TYPE_SIZE (subtype))))
355 	    goto no_useful_type_info;
356 
357 	  cur_offset = new_offset;
358 	  type = TYPE_MAIN_VARIANT (subtype);
359 	  if (!speculative)
360 	    {
361 	      outer_type = type;
362 	      offset = cur_offset;
363 	      maybe_derived_type = false;
364 	    }
365 	  else
366 	    {
367 	      speculative_outer_type = type;
368 	      speculative_offset = cur_offset;
369 	      speculative_maybe_derived_type = false;
370 	    }
371 	}
372       /* Give up on anything else.  */
373       else
374 	{
375 no_useful_type_info:
376 	  if (maybe_derived_type && !speculative
377 	      && TREE_CODE (outer_type) == RECORD_TYPE
378 	      && TREE_CODE (otr_type) == RECORD_TYPE
379 	      && TYPE_BINFO (otr_type)
380 	      && !offset
381 	      && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
382 	    {
383 	      clear_outer_type (otr_type);
384 	      if (!speculative_outer_type
385 		  || !speculation_consistent_p (speculative_outer_type,
386 						speculative_offset,
387 					        speculative_maybe_derived_type,
388 						otr_type))
389 		clear_speculation ();
390 	      if (speculative_outer_type)
391 		{
392 		  speculative = true;
393 		  type = speculative_outer_type;
394 		  cur_offset = speculative_offset;
395 		}
396 	      else
397 		return true;
398 	    }
399 	  /* We found no way to embedd EXPECTED_TYPE in TYPE.
400 	     We still permit two special cases - placement new and
401 	     the case of variadic types containing themselves.  */
402 	  if (!speculative
403 	      && consider_placement_new
404 	      && (size_unknown || !type || maybe_derived_type
405 		  || possible_placement_new (type, otr_type, cur_offset)))
406 	    {
407 	      /* In these weird cases we want to accept the context.
408 		 In non-speculative run we have no useful outer_type info
409 		 (TODO: we may eventually want to record upper bound on the
410 		  type size that can be used to prune the walk),
411 		 but we still want to consider speculation that may
412 		 give useful info.  */
413 	      if (!speculative)
414 		{
415 		  clear_outer_type (otr_type);
416 		  if (!speculative_outer_type
417 		      || !speculation_consistent_p (speculative_outer_type,
418 						    speculative_offset,
419 						    speculative_maybe_derived_type,
420 						    otr_type))
421 		    clear_speculation ();
422 		  if (speculative_outer_type)
423 		    {
424 		      speculative = true;
425 		      type = speculative_outer_type;
426 		      cur_offset = speculative_offset;
427 		    }
428 		  else
429 		    return true;
430 		}
431 	      else
432 		{
433 		  clear_speculation ();
434 	          return true;
435 		}
436 	    }
437 	  else
438 	    {
439 	      clear_speculation ();
440 	      if (speculative)
441 		return true;
442 	      clear_outer_type (otr_type);
443 	      invalid = true;
444 	      return false;
445 	    }
446 	}
447     }
448 }
449 
450 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
451    CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
452    be built within OUTER_TYPE by means of placement new.  CONSIDER_BASES makes
453    function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
454    base of one of fields of OUTER_TYPE.  */
455 
456 static bool
457 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
458 		 tree otr_type,
459 		 bool consider_placement_new,
460 		 bool consider_bases)
461 {
462   ipa_polymorphic_call_context context;
463 
464   /* Check that type is within range.  */
465   if (offset < 0)
466     return false;
467 
468   /* PR ipa/71207
469      As OUTER_TYPE can be a type which has a diamond virtual inheritance,
470      it's not necessary that INNER_TYPE will fit within OUTER_TYPE with
471      a given offset.  It can happen that INNER_TYPE also contains a base object,
472      however it would point to the same instance in the OUTER_TYPE.  */
473 
474   context.offset = offset;
475   context.outer_type = TYPE_MAIN_VARIANT (outer_type);
476   context.maybe_derived_type = false;
477   context.dynamic = false;
478   return context.restrict_to_inner_class (otr_type, consider_placement_new,
479 					  consider_bases);
480 }
481 
482 
483 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
484    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
485 
486 tree
487 polymorphic_ctor_dtor_p (tree fn, bool check_clones)
488 {
489   if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
490       || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
491     {
492       if (!check_clones)
493 	return NULL_TREE;
494 
495       /* Watch for clones where we constant propagated the first
496 	 argument (pointer to the instance).  */
497       fn = DECL_ABSTRACT_ORIGIN (fn);
498       if (!fn
499 	  || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
500 	  || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
501 	return NULL_TREE;
502     }
503 
504   if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
505     return NULL_TREE;
506 
507   return fn;
508 }
509 
510 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
511    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
512 
513 tree
514 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
515 {
516   tree fn = block_ultimate_origin (block);
517   if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
518     return NULL_TREE;
519 
520   return polymorphic_ctor_dtor_p (fn, check_clones);
521 }
522 
523 
524 /* We know that the instance is stored in variable or parameter
525    (not dynamically allocated) and we want to disprove the fact
526    that it may be in construction at invocation of CALL.
527 
528    BASE represents memory location where instance is stored.
529    If BASE is NULL, it is assumed to be global memory.
530    OUTER_TYPE is known type of the instance or NULL if not
531    known.
532 
533    For the variable to be in construction we actually need to
534    be in constructor of corresponding global variable or
535    the inline stack of CALL must contain the constructor.
536    Check this condition.  This check works safely only before
537    IPA passes, because inline stacks may become out of date
538    later.  */
539 
540 bool
541 decl_maybe_in_construction_p (tree base, tree outer_type,
542 			      gimple *call, tree function)
543 {
544   if (outer_type)
545     outer_type = TYPE_MAIN_VARIANT (outer_type);
546   gcc_assert (!base || DECL_P (base));
547 
548   /* After inlining the code unification optimizations may invalidate
549      inline stacks.  Also we need to give up on global variables after
550      IPA, because addresses of these may have been propagated to their
551      constructors.  */
552   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
553     return true;
554 
555   /* Pure functions can not do any changes on the dynamic type;
556      that require writting to memory.  */
557   if ((!base || !auto_var_in_fn_p (base, function))
558       && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
559     return false;
560 
561   bool check_clones = !base || is_global_var (base);
562   for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
563        block = BLOCK_SUPERCONTEXT (block))
564     if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
565       {
566 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
567 
568 	if (!outer_type || !types_odr_comparable (type, outer_type))
569 	  {
570 	    if (TREE_CODE (type) == RECORD_TYPE
571 		&& TYPE_BINFO (type)
572 		&& polymorphic_type_binfo_p (TYPE_BINFO (type)))
573 	      return true;
574 	  }
575  	else if (types_same_for_odr (type, outer_type))
576 	  return true;
577       }
578 
579   if (!base || (VAR_P (base) && is_global_var (base)))
580     {
581       if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
582 	  || (!DECL_CXX_CONSTRUCTOR_P (function)
583 	      && !DECL_CXX_DESTRUCTOR_P (function)))
584 	{
585 	  if (!DECL_ABSTRACT_ORIGIN (function))
586 	    return false;
587 	  /* Watch for clones where we constant propagated the first
588 	     argument (pointer to the instance).  */
589 	  function = DECL_ABSTRACT_ORIGIN (function);
590 	  if (!function
591 	      || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
592 	      || (!DECL_CXX_CONSTRUCTOR_P (function)
593 		  && !DECL_CXX_DESTRUCTOR_P (function)))
594 	    return false;
595 	}
596       tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
597       if (!outer_type || !types_odr_comparable (type, outer_type))
598 	{
599 	  if (TREE_CODE (type) == RECORD_TYPE
600 	      && TYPE_BINFO (type)
601 	      && polymorphic_type_binfo_p (TYPE_BINFO (type)))
602 	    return true;
603 	}
604       else if (types_same_for_odr (type, outer_type))
605 	return true;
606     }
607   return false;
608 }
609 
610 /* Dump human readable context to F.  If NEWLINE is true, it will be terminated
611    by a newline.  */
612 
613 void
614 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
615 {
616   fprintf (f, "    ");
617   if (invalid)
618     fprintf (f, "Call is known to be undefined");
619   else
620     {
621       if (useless_p ())
622 	fprintf (f, "nothing known");
623       if (outer_type || offset)
624 	{
625 	  fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
626 	  print_generic_expr (f, outer_type, TDF_SLIM);
627 	  if (maybe_derived_type)
628 	    fprintf (f, " (or a derived type)");
629 	  if (maybe_in_construction)
630 	    fprintf (f, " (maybe in construction)");
631 	  fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
632 		   offset);
633 	}
634       if (speculative_outer_type)
635 	{
636 	  if (outer_type || offset)
637 	    fprintf (f, " ");
638 	  fprintf (f, "Speculative outer type:");
639 	  print_generic_expr (f, speculative_outer_type, TDF_SLIM);
640 	  if (speculative_maybe_derived_type)
641 	    fprintf (f, " (or a derived type)");
642 	  fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
643 		   speculative_offset);
644 	}
645     }
646   if (newline)
647     fprintf(f, "\n");
648 }
649 
650 /* Print context to stderr.  */
651 
652 void
653 ipa_polymorphic_call_context::debug () const
654 {
655   dump (stderr);
656 }
657 
658 /* Stream out the context to OB.  */
659 
660 void
661 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
662 {
663   struct bitpack_d bp = bitpack_create (ob->main_stream);
664 
665   bp_pack_value (&bp, invalid, 1);
666   bp_pack_value (&bp, maybe_in_construction, 1);
667   bp_pack_value (&bp, maybe_derived_type, 1);
668   bp_pack_value (&bp, speculative_maybe_derived_type, 1);
669   bp_pack_value (&bp, dynamic, 1);
670   bp_pack_value (&bp, outer_type != NULL, 1);
671   bp_pack_value (&bp, offset != 0, 1);
672   bp_pack_value (&bp, speculative_outer_type != NULL, 1);
673   streamer_write_bitpack (&bp);
674 
675   if (outer_type != NULL)
676     stream_write_tree (ob, outer_type, true);
677   if (offset)
678     streamer_write_hwi (ob, offset);
679   if (speculative_outer_type != NULL)
680     {
681       stream_write_tree (ob, speculative_outer_type, true);
682       streamer_write_hwi (ob, speculative_offset);
683     }
684   else
685     gcc_assert (!speculative_offset);
686 }
687 
688 /* Stream in the context from IB and DATA_IN.  */
689 
690 void
691 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
692 					 struct data_in *data_in)
693 {
694   struct bitpack_d bp = streamer_read_bitpack (ib);
695 
696   invalid = bp_unpack_value (&bp, 1);
697   maybe_in_construction = bp_unpack_value (&bp, 1);
698   maybe_derived_type = bp_unpack_value (&bp, 1);
699   speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
700   dynamic = bp_unpack_value (&bp, 1);
701   bool outer_type_p = bp_unpack_value (&bp, 1);
702   bool offset_p = bp_unpack_value (&bp, 1);
703   bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
704 
705   if (outer_type_p)
706     outer_type = stream_read_tree (ib, data_in);
707   else
708     outer_type = NULL;
709   if (offset_p)
710     offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
711   else
712     offset = 0;
713   if (speculative_outer_type_p)
714     {
715       speculative_outer_type = stream_read_tree (ib, data_in);
716       speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
717     }
718   else
719     {
720       speculative_outer_type = NULL;
721       speculative_offset = 0;
722     }
723 }
724 
725 /* Proudce polymorphic call context for call method of instance
726    that is located within BASE (that is assumed to be a decl) at offset OFF. */
727 
728 void
729 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
730 {
731   gcc_assert (DECL_P (base));
732   clear_speculation ();
733 
734   if (!contains_polymorphic_type_p (TREE_TYPE (base)))
735     {
736       clear_outer_type ();
737       offset = off;
738       return;
739     }
740   outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
741   offset = off;
742   /* Make very conservative assumption that all objects
743      may be in construction.
744 
745      It is up to caller to revisit this via
746      get_dynamic_type or decl_maybe_in_construction_p.  */
747   maybe_in_construction = true;
748   maybe_derived_type = false;
749   dynamic = false;
750 }
751 
752 /* CST is an invariant (address of decl), try to get meaningful
753    polymorphic call context for polymorphic call of method
754    if instance of OTR_TYPE that is located at offset OFF of this invariant.
755    Return FALSE if nothing meaningful can be found.  */
756 
757 bool
758 ipa_polymorphic_call_context::set_by_invariant (tree cst,
759 						tree otr_type,
760 						HOST_WIDE_INT off)
761 {
762   HOST_WIDE_INT offset2, size, max_size;
763   bool reverse;
764   tree base;
765 
766   invalid = false;
767   off = 0;
768   clear_outer_type (otr_type);
769 
770   if (TREE_CODE (cst) != ADDR_EXPR)
771     return false;
772 
773   cst = TREE_OPERAND (cst, 0);
774   base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
775   if (!DECL_P (base) || max_size == -1 || max_size != size)
776     return false;
777 
778   /* Only type inconsistent programs can have otr_type that is
779      not part of outer type.  */
780   if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
781     return false;
782 
783   set_by_decl (base, off);
784   return true;
785 }
786 
787 /* See if OP is SSA name initialized as a copy or by single assignment.
788    If so, walk the SSA graph up.  Because simple PHI conditional is considered
789    copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
790    graph.  */
791 
792 static tree
793 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
794 {
795   hash_set <tree> *visited = NULL;
796   STRIP_NOPS (op);
797   while (TREE_CODE (op) == SSA_NAME
798 	 && !SSA_NAME_IS_DEFAULT_DEF (op)
799 	 /* We might be called via fold_stmt during cfgcleanup where
800 	    SSA form need not be up-to-date.  */
801 	 && !name_registered_for_update_p (op)
802 	 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
803 	     || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
804     {
805       if (global_visited)
806 	{
807 	  if (!*global_visited)
808 	    *global_visited = new hash_set<tree>;
809 	  if ((*global_visited)->add (op))
810 	    goto done;
811 	}
812       else
813 	{
814 	  if (!visited)
815 	    visited = new hash_set<tree>;
816 	  if (visited->add (op))
817 	    goto done;
818 	}
819       /* Special case
820 	 if (ptr == 0)
821 	   ptr = 0;
822 	 else
823 	   ptr = ptr.foo;
824 	 This pattern is implicitly produced for casts to non-primary
825 	 bases.  When doing context analysis, we do not really care
826 	 about the case pointer is NULL, because the call will be
827 	 undefined anyway.  */
828       if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
829 	{
830 	  gimple *phi = SSA_NAME_DEF_STMT (op);
831 
832 	  if (gimple_phi_num_args (phi) > 2)
833 	    goto done;
834 	  if (gimple_phi_num_args (phi) == 1)
835 	    op = gimple_phi_arg_def (phi, 0);
836 	  else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
837 	    op = gimple_phi_arg_def (phi, 1);
838 	  else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
839 	    op = gimple_phi_arg_def (phi, 0);
840 	  else
841 	    goto done;
842 	}
843       else
844 	{
845 	  if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
846 	    goto done;
847 	  op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
848 	}
849       STRIP_NOPS (op);
850     }
851 done:
852   if (visited)
853     delete (visited);
854   return op;
855 }
856 
857 /* Create polymorphic call context from IP invariant CST.
858    This is typically &global_var.
859    OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
860    is offset of call.  */
861 
862 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
863 							    tree otr_type,
864 							    HOST_WIDE_INT off)
865 {
866   clear_speculation ();
867   set_by_invariant (cst, otr_type, off);
868 }
869 
870 /* Build context for pointer REF contained in FNDECL at statement STMT.
871    if INSTANCE is non-NULL, return pointer to the object described by
872    the context or DECL where context is contained in.  */
873 
874 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
875 							    tree ref,
876 							    gimple *stmt,
877 							    tree *instance)
878 {
879   tree otr_type = NULL;
880   tree base_pointer;
881   hash_set <tree> *visited = NULL;
882 
883   if (TREE_CODE (ref) == OBJ_TYPE_REF)
884     {
885       otr_type = obj_type_ref_class (ref);
886       base_pointer = OBJ_TYPE_REF_OBJECT (ref);
887     }
888   else
889     base_pointer = ref;
890 
891   /* Set up basic info in case we find nothing interesting in the analysis.  */
892   clear_speculation ();
893   clear_outer_type (otr_type);
894   invalid = false;
895 
896   /* Walk SSA for outer object.  */
897   while (true)
898     {
899       base_pointer = walk_ssa_copies (base_pointer, &visited);
900       if (TREE_CODE (base_pointer) == ADDR_EXPR)
901 	{
902 	  HOST_WIDE_INT size, max_size;
903 	  HOST_WIDE_INT offset2;
904 	  bool reverse;
905 	  tree base
906 	    = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
907 				       &offset2, &size, &max_size, &reverse);
908 
909 	  if (max_size != -1 && max_size == size)
910 	    combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
911 				      offset + offset2,
912 				      true,
913 				      NULL /* Do not change outer type.  */);
914 
915 	  /* If this is a varying address, punt.  */
916 	  if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
917 	      && max_size != -1
918 	      && max_size == size)
919 	    {
920 	      /* We found dereference of a pointer.  Type of the pointer
921 		 and MEM_REF is meaningless, but we can look futher.  */
922 	      if (TREE_CODE (base) == MEM_REF)
923 		{
924 		  base_pointer = TREE_OPERAND (base, 0);
925 		  offset
926 		    += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
927 		  outer_type = NULL;
928 		}
929 	      /* We found base object.  In this case the outer_type
930 		 is known.  */
931 	      else if (DECL_P (base))
932 		{
933 		  if (visited)
934 		    delete (visited);
935 		  /* Only type inconsistent programs can have otr_type that is
936 		     not part of outer type.  */
937 		  if (otr_type
938 		      && !contains_type_p (TREE_TYPE (base),
939 					   offset + offset2, otr_type))
940 		    {
941 		      invalid = true;
942 		      if (instance)
943 			*instance = base_pointer;
944 		      return;
945 		    }
946 		  set_by_decl (base, offset + offset2);
947 		  if (outer_type && maybe_in_construction && stmt)
948 		    maybe_in_construction
949 		     = decl_maybe_in_construction_p (base,
950 						     outer_type,
951 						     stmt,
952 						     fndecl);
953 		  if (instance)
954 		    *instance = base;
955 		  return;
956 		}
957 	      else
958 		break;
959 	    }
960 	  else
961 	    break;
962 	}
963       else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
964 	       && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
965 	{
966 	  offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
967 		    * BITS_PER_UNIT;
968 	  base_pointer = TREE_OPERAND (base_pointer, 0);
969 	}
970       else
971 	break;
972     }
973 
974   if (visited)
975     delete (visited);
976 
977   /* Try to determine type of the outer object.  */
978   if (TREE_CODE (base_pointer) == SSA_NAME
979       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
980       && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
981     {
982       /* See if parameter is THIS pointer of a method.  */
983       if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
984 	  && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
985 	{
986 	  outer_type
987 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
988 	  gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
989 		      || TREE_CODE (outer_type) == UNION_TYPE);
990 
991 	  /* Dynamic casting has possibly upcasted the type
992 	     in the hiearchy.  In this case outer type is less
993 	     informative than inner type and we should forget
994 	     about it.  */
995 	  if ((otr_type
996 	       && !contains_type_p (outer_type, offset,
997 				    otr_type))
998 	      || !contains_polymorphic_type_p (outer_type))
999 	    {
1000 	      outer_type = NULL;
1001 	      if (instance)
1002 		*instance = base_pointer;
1003 	      return;
1004 	    }
1005 
1006 	  dynamic = true;
1007 
1008 	  /* If the function is constructor or destructor, then
1009 	     the type is possibly in construction, but we know
1010 	     it is not derived type.  */
1011 	  if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1012 	      || DECL_CXX_DESTRUCTOR_P (fndecl))
1013 	    {
1014 	      maybe_in_construction = true;
1015 	      maybe_derived_type = false;
1016 	    }
1017 	  else
1018 	    {
1019 	      maybe_derived_type = true;
1020 	      maybe_in_construction = false;
1021 	    }
1022 	  if (instance)
1023 	    *instance = base_pointer;
1024 	  return;
1025 	}
1026       /* Non-PODs passed by value are really passed by invisible
1027 	 reference.  In this case we also know the type of the
1028 	 object.  */
1029       if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1030 	{
1031 	  outer_type
1032 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1033 	  /* Only type inconsistent programs can have otr_type that is
1034 	     not part of outer type.  */
1035 	  if (otr_type && !contains_type_p (outer_type, offset,
1036 					    otr_type))
1037 	    {
1038 	      invalid = true;
1039 	      if (instance)
1040 		*instance = base_pointer;
1041 	      return;
1042 	    }
1043 	  /* Non-polymorphic types have no interest for us.  */
1044 	  else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1045 	    {
1046 	      outer_type = NULL;
1047 	      if (instance)
1048 		*instance = base_pointer;
1049 	      return;
1050 	    }
1051 	  maybe_derived_type = false;
1052 	  maybe_in_construction = false;
1053 	  if (instance)
1054 	    *instance = base_pointer;
1055 	  return;
1056 	}
1057     }
1058 
1059   tree base_type = TREE_TYPE (base_pointer);
1060 
1061   if (TREE_CODE (base_pointer) == SSA_NAME
1062       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1063       && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1064 	   || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1065     {
1066       invalid = true;
1067       if (instance)
1068 	*instance = base_pointer;
1069       return;
1070     }
1071   if (TREE_CODE (base_pointer) == SSA_NAME
1072       && SSA_NAME_DEF_STMT (base_pointer)
1073       && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1074     base_type = TREE_TYPE (gimple_assign_rhs1
1075 			    (SSA_NAME_DEF_STMT (base_pointer)));
1076 
1077   if (base_type && POINTER_TYPE_P (base_type))
1078     combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1079 			      offset,
1080 			      true, NULL /* Do not change type here */);
1081   /* TODO: There are multiple ways to derive a type.  For instance
1082      if BASE_POINTER is passed to an constructor call prior our refernece.
1083      We do not make this type of flow sensitive analysis yet.  */
1084   if (instance)
1085     *instance = base_pointer;
1086   return;
1087 }
1088 
1089 /* Structure to be passed in between detect_type_change and
1090    check_stmt_for_type_change.  */
1091 
1092 struct type_change_info
1093 {
1094   /* Offset into the object where there is the virtual method pointer we are
1095      looking for.  */
1096   HOST_WIDE_INT offset;
1097   /* The declaration or SSA_NAME pointer of the base that we are checking for
1098      type change.  */
1099   tree instance;
1100   /* The reference to virtual table pointer used.  */
1101   tree vtbl_ptr_ref;
1102   tree otr_type;
1103   /* If we actually can tell the type that the object has changed to, it is
1104      stored in this field.  Otherwise it remains NULL_TREE.  */
1105   tree known_current_type;
1106   HOST_WIDE_INT known_current_offset;
1107 
1108   /* Set to nonzero if we possibly missed some dynamic type changes and we
1109      should consider the set to be speculative.  */
1110   unsigned speculative;
1111 
1112   /* Set to true if dynamic type change has been detected.  */
1113   bool type_maybe_changed;
1114   /* Set to true if multiple types have been encountered.  known_current_type
1115      must be disregarded in that case.  */
1116   bool multiple_types_encountered;
1117   bool seen_unanalyzed_store;
1118 };
1119 
1120 /* Return true if STMT is not call and can modify a virtual method table pointer.
1121    We take advantage of fact that vtable stores must appear within constructor
1122    and destructor functions.  */
1123 
1124 static bool
1125 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
1126 {
1127   if (is_gimple_assign (stmt))
1128     {
1129       tree lhs = gimple_assign_lhs (stmt);
1130 
1131       if (gimple_clobber_p (stmt))
1132 	return false;
1133       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1134 	{
1135 	  if (flag_strict_aliasing
1136 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1137 	    return false;
1138 
1139 	  if (TREE_CODE (lhs) == COMPONENT_REF
1140 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1141 	    return false;
1142 	  /* In the future we might want to use get_base_ref_and_offset to find
1143 	     if there is a field corresponding to the offset and if so, proceed
1144 	     almost like if it was a component ref.  */
1145 	}
1146     }
1147 
1148   /* Code unification may mess with inline stacks.  */
1149   if (cfun->after_inlining)
1150     return true;
1151 
1152   /* Walk the inline stack and watch out for ctors/dtors.
1153      TODO: Maybe we can require the store to appear in toplevel
1154      block of CTOR/DTOR.  */
1155   for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1156        block = BLOCK_SUPERCONTEXT (block))
1157     if (BLOCK_ABSTRACT_ORIGIN (block)
1158 	&& TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
1159       return inlined_polymorphic_ctor_dtor_block_p (block, false);
1160   return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1161 	  && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1162 	      || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1163 }
1164 
1165 /* If STMT can be proved to be an assignment to the virtual method table
1166    pointer of ANALYZED_OBJ and the type associated with the new table
1167    identified, return the type.  Otherwise return NULL_TREE if type changes
1168    in unknown way or ERROR_MARK_NODE if type is unchanged.  */
1169 
1170 static tree
1171 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
1172 			       HOST_WIDE_INT *type_offset)
1173 {
1174   HOST_WIDE_INT offset, size, max_size;
1175   tree lhs, rhs, base;
1176   bool reverse;
1177 
1178   if (!gimple_assign_single_p (stmt))
1179     return NULL_TREE;
1180 
1181   lhs = gimple_assign_lhs (stmt);
1182   rhs = gimple_assign_rhs1 (stmt);
1183   if (TREE_CODE (lhs) != COMPONENT_REF
1184       || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1185      {
1186 	if (dump_file)
1187 	  fprintf (dump_file, "  LHS is not virtual table.\n");
1188 	return NULL_TREE;
1189      }
1190 
1191   if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1192     ;
1193   else
1194     {
1195       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
1196       if (DECL_P (tci->instance))
1197 	{
1198 	  if (base != tci->instance)
1199 	    {
1200 	      if (dump_file)
1201 		{
1202 		  fprintf (dump_file, "    base:");
1203 		  print_generic_expr (dump_file, base, TDF_SLIM);
1204 		  fprintf (dump_file, " does not match instance:");
1205 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1206 		  fprintf (dump_file, "\n");
1207 		}
1208 	      return NULL_TREE;
1209 	    }
1210 	}
1211       else if (TREE_CODE (base) == MEM_REF)
1212 	{
1213 	  if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1214 	    {
1215 	      if (dump_file)
1216 		{
1217 		  fprintf (dump_file, "    base mem ref:");
1218 		  print_generic_expr (dump_file, base, TDF_SLIM);
1219 		  fprintf (dump_file, " does not match instance:");
1220 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1221 		  fprintf (dump_file, "\n");
1222 		}
1223 	      return NULL_TREE;
1224 	    }
1225 	  if (!integer_zerop (TREE_OPERAND (base, 1)))
1226 	    {
1227 	      if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1228 		{
1229 		  if (dump_file)
1230 		    {
1231 		      fprintf (dump_file, "    base mem ref:");
1232 		      print_generic_expr (dump_file, base, TDF_SLIM);
1233 		      fprintf (dump_file, " has non-representable offset:");
1234 		      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1235 		      fprintf (dump_file, "\n");
1236 		    }
1237 		  return NULL_TREE;
1238 		}
1239 	      else
1240 	        offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1241 	    }
1242 	}
1243       else if (!operand_equal_p (tci->instance, base, 0)
1244 	       || tci->offset)
1245 	{
1246 	  if (dump_file)
1247 	    {
1248 	      fprintf (dump_file, "    base:");
1249 	      print_generic_expr (dump_file, base, TDF_SLIM);
1250 	      fprintf (dump_file, " does not match instance:");
1251 	      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1252 	      fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1253 	    }
1254 	  return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1255 	}
1256       if (offset != tci->offset
1257 	  || size != POINTER_SIZE
1258 	  || max_size != POINTER_SIZE)
1259 	{
1260 	  if (dump_file)
1261 	    fprintf (dump_file, "    wrong offset %i!=%i or size %i\n",
1262 		     (int)offset, (int)tci->offset, (int)size);
1263 	  return offset + POINTER_SIZE <= tci->offset
1264 	         || (max_size != -1
1265 		     && tci->offset + POINTER_SIZE > offset + max_size)
1266 		 ? error_mark_node : NULL;
1267 	}
1268     }
1269 
1270   tree vtable;
1271   unsigned HOST_WIDE_INT offset2;
1272 
1273   if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1274     {
1275       if (dump_file)
1276 	fprintf (dump_file, "    Failed to lookup binfo\n");
1277       return NULL;
1278     }
1279 
1280   tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1281 					       offset2, vtable);
1282   if (!binfo)
1283     {
1284       if (dump_file)
1285 	fprintf (dump_file, "    Construction vtable used\n");
1286       /* FIXME: We should suport construction contexts.  */
1287       return NULL;
1288     }
1289 
1290   *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1291   return DECL_CONTEXT (vtable);
1292 }
1293 
1294 /* Record dynamic type change of TCI to TYPE.  */
1295 
1296 static void
1297 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1298 {
1299   if (dump_file)
1300     {
1301       if (type)
1302 	{
1303           fprintf (dump_file, "  Recording type: ");
1304 	  print_generic_expr (dump_file, type, TDF_SLIM);
1305           fprintf (dump_file, " at offset %i\n", (int)offset);
1306 	}
1307      else
1308        fprintf (dump_file, "  Recording unknown type\n");
1309     }
1310 
1311   /* If we found a constructor of type that is not polymorphic or
1312      that may contain the type in question as a field (not as base),
1313      restrict to the inner class first to make type matching bellow
1314      happier.  */
1315   if (type
1316       && (offset
1317           || (TREE_CODE (type) != RECORD_TYPE
1318 	      || !TYPE_BINFO (type)
1319 	      || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1320     {
1321       ipa_polymorphic_call_context context;
1322 
1323       context.offset = offset;
1324       context.outer_type = type;
1325       context.maybe_in_construction = false;
1326       context.maybe_derived_type = false;
1327       context.dynamic = true;
1328       /* If we failed to find the inner type, we know that the call
1329 	 would be undefined for type produced here.  */
1330       if (!context.restrict_to_inner_class (tci->otr_type))
1331 	{
1332 	  if (dump_file)
1333 	    fprintf (dump_file, "  Ignoring; does not contain otr_type\n");
1334 	  return;
1335 	}
1336       /* Watch for case we reached an POD type and anticipate placement
1337 	 new.  */
1338       if (!context.maybe_derived_type)
1339 	{
1340           type = context.outer_type;
1341           offset = context.offset;
1342 	}
1343     }
1344   if (tci->type_maybe_changed
1345       && (!types_same_for_odr (type, tci->known_current_type)
1346 	  || offset != tci->known_current_offset))
1347     tci->multiple_types_encountered = true;
1348   tci->known_current_type = TYPE_MAIN_VARIANT (type);
1349   tci->known_current_offset = offset;
1350   tci->type_maybe_changed = true;
1351 }
1352 
1353 
1354 /* The maximum number of may-defs we visit when looking for a must-def
1355    that changes the dynamic type in check_stmt_for_type_change.  Tuned
1356    after the PR12392 testcase which unlimited spends 40% time within
1357    these alias walks and 8% with the following limit.  */
1358 
1359 static inline bool
1360 csftc_abort_walking_p (unsigned speculative)
1361 {
1362   unsigned max = PARAM_VALUE (PARAM_MAX_SPECULATIVE_DEVIRT_MAYDEFS);
1363   return speculative > max ? true : false;
1364 }
1365 
1366 /* Callback of walk_aliased_vdefs and a helper function for
1367    detect_type_change to check whether a particular statement may modify
1368    the virtual table pointer, and if possible also determine the new type of
1369    the (sub-)object.  It stores its result into DATA, which points to a
1370    type_change_info structure.  */
1371 
1372 static bool
1373 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1374 {
1375   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
1376   struct type_change_info *tci = (struct type_change_info *) data;
1377   tree fn;
1378 
1379   /* If we already gave up, just terminate the rest of walk.  */
1380   if (tci->multiple_types_encountered)
1381     return true;
1382 
1383   if (is_gimple_call (stmt))
1384     {
1385       if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1386 	return false;
1387 
1388       /* Check for a constructor call.  */
1389       if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1390 	  && DECL_CXX_CONSTRUCTOR_P (fn)
1391 	  && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1392 	  && gimple_call_num_args (stmt))
1393       {
1394 	tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1395 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1396 	HOST_WIDE_INT offset = 0, size, max_size;
1397 	bool reverse;
1398 
1399 	if (dump_file)
1400 	  {
1401 	    fprintf (dump_file, "  Checking constructor call: ");
1402 	    print_gimple_stmt (dump_file, stmt, 0, 0);
1403 	  }
1404 
1405 	/* See if THIS parameter seems like instance pointer.  */
1406 	if (TREE_CODE (op) == ADDR_EXPR)
1407 	  {
1408 	    op = get_ref_base_and_extent (TREE_OPERAND (op, 0), &offset,
1409 					  &size, &max_size, &reverse);
1410 	    if (size != max_size || max_size == -1)
1411 	      {
1412                 tci->speculative++;
1413 	        return csftc_abort_walking_p (tci->speculative);
1414 	      }
1415 	    if (op && TREE_CODE (op) == MEM_REF)
1416 	      {
1417 		if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1418 		  {
1419                     tci->speculative++;
1420 		    return csftc_abort_walking_p (tci->speculative);
1421 		  }
1422 		offset += tree_to_shwi (TREE_OPERAND (op, 1))
1423 			  * BITS_PER_UNIT;
1424 		op = TREE_OPERAND (op, 0);
1425 	      }
1426 	    else if (DECL_P (op))
1427 	      ;
1428 	    else
1429 	      {
1430                 tci->speculative++;
1431 	        return csftc_abort_walking_p (tci->speculative);
1432 	      }
1433 	    op = walk_ssa_copies (op);
1434 	  }
1435 	if (operand_equal_p (op, tci->instance, 0)
1436 	    && TYPE_SIZE (type)
1437 	    && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1438 	    && tree_fits_shwi_p (TYPE_SIZE (type))
1439 	    && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
1440 	    /* Some inlined constructors may look as follows:
1441 		  _3 = operator new (16);
1442 		  MEM[(struct  &)_3] ={v} {CLOBBER};
1443 		  MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1444 		    = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1445 		  _7 = &MEM[(struct CompositeClass *)_3].object;
1446 		  EmptyClass::EmptyClass (_7);
1447 
1448 	       When determining dynamic type of _3 and because we stop at first
1449 	       dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1450 	       In this case the emptyclass is not even polymorphic and we miss
1451 	       it is contained in an outer type that is polymorphic.  */
1452 
1453 	    && (tci->offset == offset || contains_polymorphic_type_p (type)))
1454 	  {
1455 	    record_known_type (tci, type, tci->offset - offset);
1456 	    return true;
1457 	  }
1458       }
1459      /* Calls may possibly change dynamic type by placement new. Assume
1460         it will not happen, but make result speculative only.  */
1461      if (dump_file)
1462 	{
1463           fprintf (dump_file, "  Function call may change dynamic type:");
1464 	  print_gimple_stmt (dump_file, stmt, 0, 0);
1465 	}
1466      tci->speculative++;
1467      return csftc_abort_walking_p (tci->speculative);
1468    }
1469   /* Check for inlined virtual table store.  */
1470   else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1471     {
1472       tree type;
1473       HOST_WIDE_INT offset = 0;
1474       if (dump_file)
1475 	{
1476 	  fprintf (dump_file, "  Checking vtbl store: ");
1477 	  print_gimple_stmt (dump_file, stmt, 0, 0);
1478 	}
1479 
1480       type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1481       if (type == error_mark_node)
1482 	return false;
1483       gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1484       if (!type)
1485 	{
1486 	  if (dump_file)
1487 	    fprintf (dump_file, "  Unanalyzed store may change type.\n");
1488 	  tci->seen_unanalyzed_store = true;
1489 	  tci->speculative++;
1490 	}
1491       else
1492         record_known_type (tci, type, offset);
1493       return true;
1494     }
1495   else
1496     return false;
1497 }
1498 
1499 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1500    OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1501    INSTANCE is pointer to the outer instance as returned by
1502    get_polymorphic_context.  To avoid creation of temporary expressions,
1503    INSTANCE may also be an declaration of get_polymorphic_context found the
1504    value to be in static storage.
1505 
1506    If the type of instance is not fully determined
1507    (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1508    is set), try to walk memory writes and find the actual construction of the
1509    instance.
1510 
1511    Return true if memory is unchanged from function entry.
1512 
1513    We do not include this analysis in the context analysis itself, because
1514    it needs memory SSA to be fully built and the walk may be expensive.
1515    So it is not suitable for use withing fold_stmt and similar uses.  */
1516 
1517 bool
1518 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1519 						tree otr_object,
1520 						tree otr_type,
1521 						gimple *call)
1522 {
1523   struct type_change_info tci;
1524   ao_ref ao;
1525   bool function_entry_reached = false;
1526   tree instance_ref = NULL;
1527   gimple *stmt = call;
1528   /* Remember OFFSET before it is modified by restrict_to_inner_class.
1529      This is because we do not update INSTANCE when walking inwards.  */
1530   HOST_WIDE_INT instance_offset = offset;
1531   tree instance_outer_type = outer_type;
1532 
1533   if (otr_type)
1534     otr_type = TYPE_MAIN_VARIANT (otr_type);
1535 
1536   /* Walk into inner type. This may clear maybe_derived_type and save us
1537      from useless work.  It also makes later comparsions with static type
1538      easier.  */
1539   if (outer_type && otr_type)
1540     {
1541       if (!restrict_to_inner_class (otr_type))
1542         return false;
1543     }
1544 
1545   if (!maybe_in_construction && !maybe_derived_type)
1546     return false;
1547 
1548   /* If we are in fact not looking at any object object or the instance is
1549      some placement new into a random load, give up straight away.  */
1550   if (TREE_CODE (instance) == MEM_REF)
1551     return false;
1552 
1553   /* We need to obtain refernce to virtual table pointer.  It is better
1554      to look it up in the code rather than build our own.  This require bit
1555      of pattern matching, but we end up verifying that what we found is
1556      correct.
1557 
1558      What we pattern match is:
1559 
1560        tmp = instance->_vptr.A;   // vtbl ptr load
1561        tmp2 = tmp[otr_token];	  // vtable lookup
1562        OBJ_TYPE_REF(tmp2;instance->0) (instance);
1563 
1564      We want to start alias oracle walk from vtbl pointer load,
1565      but we may not be able to identify it, for example, when PRE moved the
1566      load around.  */
1567 
1568   if (gimple_code (call) == GIMPLE_CALL)
1569     {
1570       tree ref = gimple_call_fn (call);
1571       HOST_WIDE_INT offset2, size, max_size;
1572       bool reverse;
1573 
1574       if (TREE_CODE (ref) == OBJ_TYPE_REF)
1575 	{
1576 	  ref = OBJ_TYPE_REF_EXPR (ref);
1577 	  ref = walk_ssa_copies (ref);
1578 
1579 	  /* If call target is already known, no need to do the expensive
1580  	     memory walk.  */
1581 	  if (is_gimple_min_invariant (ref))
1582 	    return false;
1583 
1584 	  /* Check if definition looks like vtable lookup.  */
1585 	  if (TREE_CODE (ref) == SSA_NAME
1586 	      && !SSA_NAME_IS_DEFAULT_DEF (ref)
1587 	      && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1588 	      && TREE_CODE (gimple_assign_rhs1
1589 			     (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1590 	    {
1591 	      ref = get_base_address
1592 		     (TREE_OPERAND (gimple_assign_rhs1
1593 				     (SSA_NAME_DEF_STMT (ref)), 0));
1594 	      ref = walk_ssa_copies (ref);
1595 	      /* Find base address of the lookup and see if it looks like
1596 		 vptr load.  */
1597 	      if (TREE_CODE (ref) == SSA_NAME
1598 		  && !SSA_NAME_IS_DEFAULT_DEF (ref)
1599 		  && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1600 		{
1601 		  tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1602 		  tree base_ref
1603 		    = get_ref_base_and_extent (ref_exp, &offset2, &size,
1604 					       &max_size, &reverse);
1605 
1606 		  /* Finally verify that what we found looks like read from
1607 		     OTR_OBJECT or from INSTANCE with offset OFFSET.  */
1608 		  if (base_ref
1609 		      && ((TREE_CODE (base_ref) == MEM_REF
1610 		           && ((offset2 == instance_offset
1611 		                && TREE_OPERAND (base_ref, 0) == instance)
1612 			       || (!offset2
1613 				   && TREE_OPERAND (base_ref, 0)
1614 				      == otr_object)))
1615 			  || (DECL_P (instance) && base_ref == instance
1616 			      && offset2 == instance_offset)))
1617 		    {
1618 		      stmt = SSA_NAME_DEF_STMT (ref);
1619 		      instance_ref = ref_exp;
1620 		    }
1621 		}
1622 	    }
1623 	}
1624     }
1625 
1626   /* If we failed to look up the reference in code, build our own.  */
1627   if (!instance_ref)
1628     {
1629       /* If the statement in question does not use memory, we can't tell
1630 	 anything.  */
1631       if (!gimple_vuse (stmt))
1632 	return false;
1633       ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1634     }
1635   else
1636   /* Otherwise use the real reference.  */
1637     ao_ref_init (&ao, instance_ref);
1638 
1639   /* We look for vtbl pointer read.  */
1640   ao.size = POINTER_SIZE;
1641   ao.max_size = ao.size;
1642   /* We are looking for stores to vptr pointer within the instance of
1643      outer type.
1644      TODO: The vptr pointer type is globally known, we probably should
1645      keep it and do that even when otr_type is unknown.  */
1646   if (otr_type)
1647     {
1648       ao.base_alias_set
1649 	= get_alias_set (outer_type ? outer_type : otr_type);
1650       ao.ref_alias_set
1651         = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1652     }
1653 
1654   if (dump_file)
1655     {
1656       fprintf (dump_file, "Determining dynamic type for call: ");
1657       print_gimple_stmt (dump_file, call, 0, 0);
1658       fprintf (dump_file, "  Starting walk at: ");
1659       print_gimple_stmt (dump_file, stmt, 0, 0);
1660       fprintf (dump_file, "  instance pointer: ");
1661       print_generic_expr (dump_file, otr_object, TDF_SLIM);
1662       fprintf (dump_file, "  Outer instance pointer: ");
1663       print_generic_expr (dump_file, instance, TDF_SLIM);
1664       fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
1665       fprintf (dump_file, " vtbl reference: ");
1666       print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1667       fprintf (dump_file, "\n");
1668     }
1669 
1670   tci.offset = instance_offset;
1671   tci.instance = instance;
1672   tci.vtbl_ptr_ref = instance_ref;
1673   tci.known_current_type = NULL_TREE;
1674   tci.known_current_offset = 0;
1675   tci.otr_type = otr_type;
1676   tci.type_maybe_changed = false;
1677   tci.multiple_types_encountered = false;
1678   tci.speculative = 0;
1679   tci.seen_unanalyzed_store = false;
1680 
1681   walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1682 		      &tci, NULL, &function_entry_reached);
1683 
1684   /* If we did not find any type changing statements, we may still drop
1685      maybe_in_construction flag if the context already have outer type.
1686 
1687      Here we make special assumptions about both constructors and
1688      destructors which are all the functions that are allowed to alter the
1689      VMT pointers.  It assumes that destructors begin with assignment into
1690      all VMT pointers and that constructors essentially look in the
1691      following way:
1692 
1693      1) The very first thing they do is that they call constructors of
1694      ancestor sub-objects that have them.
1695 
1696      2) Then VMT pointers of this and all its ancestors is set to new
1697      values corresponding to the type corresponding to the constructor.
1698 
1699      3) Only afterwards, other stuff such as constructor of member
1700      sub-objects and the code written by the user is run.  Only this may
1701      include calling virtual functions, directly or indirectly.
1702 
1703      4) placement new can not be used to change type of non-POD statically
1704      allocated variables.
1705 
1706      There is no way to call a constructor of an ancestor sub-object in any
1707      other way.
1708 
1709      This means that we do not have to care whether constructors get the
1710      correct type information because they will always change it (in fact,
1711      if we define the type to be given by the VMT pointer, it is undefined).
1712 
1713      The most important fact to derive from the above is that if, for some
1714      statement in the section 3, we try to detect whether the dynamic type
1715      has changed, we can safely ignore all calls as we examine the function
1716      body backwards until we reach statements in section 2 because these
1717      calls cannot be ancestor constructors or destructors (if the input is
1718      not bogus) and so do not change the dynamic type (this holds true only
1719      for automatically allocated objects but at the moment we devirtualize
1720      only these).  We then must detect that statements in section 2 change
1721      the dynamic type and can try to derive the new type.  That is enough
1722      and we can stop, we will never see the calls into constructors of
1723      sub-objects in this code.
1724 
1725      Therefore if the static outer type was found (outer_type)
1726      we can safely ignore tci.speculative that is set on calls and give up
1727      only if there was dyanmic type store that may affect given variable
1728      (seen_unanalyzed_store)  */
1729 
1730   if (!tci.type_maybe_changed
1731       || (outer_type
1732 	  && !dynamic
1733 	  && !tci.seen_unanalyzed_store
1734 	  && !tci.multiple_types_encountered
1735 	  && ((offset == tci.offset
1736 	       && types_same_for_odr (tci.known_current_type,
1737 				      outer_type))
1738 	       || (instance_offset == offset
1739 		   && types_same_for_odr (tci.known_current_type,
1740 					  instance_outer_type)))))
1741     {
1742       if (!outer_type || tci.seen_unanalyzed_store)
1743 	return false;
1744       if (maybe_in_construction)
1745         maybe_in_construction = false;
1746       if (dump_file)
1747 	fprintf (dump_file, "  No dynamic type change found.\n");
1748       return true;
1749     }
1750 
1751   if (tci.known_current_type
1752       && !function_entry_reached
1753       && !tci.multiple_types_encountered)
1754     {
1755       if (!tci.speculative)
1756 	{
1757 	  outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1758 	  offset = tci.known_current_offset;
1759 	  dynamic = true;
1760 	  maybe_in_construction = false;
1761 	  maybe_derived_type = false;
1762 	  if (dump_file)
1763 	    fprintf (dump_file, "  Determined dynamic type.\n");
1764 	}
1765       else if (!speculative_outer_type
1766 	       || speculative_maybe_derived_type)
1767 	{
1768 	  speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1769 	  speculative_offset = tci.known_current_offset;
1770 	  speculative_maybe_derived_type = false;
1771 	  if (dump_file)
1772 	    fprintf (dump_file, "  Determined speculative dynamic type.\n");
1773 	}
1774     }
1775   else if (dump_file)
1776     {
1777       fprintf (dump_file, "  Found multiple types%s%s\n",
1778 	       function_entry_reached ? " (function entry reached)" : "",
1779 	       function_entry_reached ? " (multiple types encountered)" : "");
1780     }
1781 
1782   return false;
1783 }
1784 
1785 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1786    seems consistent (and useful) with what we already have in the non-speculative context.  */
1787 
1788 bool
1789 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1790 							HOST_WIDE_INT spec_offset,
1791 							bool spec_maybe_derived_type,
1792 							tree otr_type) const
1793 {
1794   if (!flag_devirtualize_speculatively)
1795     return false;
1796 
1797   /* Non-polymorphic types are useless for deriving likely polymorphic
1798      call targets.  */
1799   if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1800     return false;
1801 
1802   /* If we know nothing, speculation is always good.  */
1803   if (!outer_type)
1804     return true;
1805 
1806   /* Speculation is only useful to avoid derived types.
1807      This is not 100% true for placement new, where the outer context may
1808      turn out to be useless, but ignore these for now.  */
1809   if (!maybe_derived_type)
1810     return false;
1811 
1812   /* If types agrees, speculation is consistent, but it makes sense only
1813      when it says something new.  */
1814   if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1815     return maybe_derived_type && !spec_maybe_derived_type;
1816 
1817   /* If speculation does not contain the type in question, ignore it.  */
1818   if (otr_type
1819       && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1820     return false;
1821 
1822   /* If outer type already contains speculation as a filed,
1823      it is useless.  We already know from OUTER_TYPE
1824      SPEC_TYPE and that it is not in the construction.  */
1825   if (contains_type_p (outer_type, offset - spec_offset,
1826 		       spec_outer_type, false, false))
1827     return false;
1828 
1829   /* If speculative outer type is not more specified than outer
1830      type, just give up.
1831      We can only decide this safely if we can compare types with OUTER_TYPE.
1832    */
1833   if ((!in_lto_p || odr_type_p (outer_type))
1834       && !contains_type_p (spec_outer_type,
1835 			   spec_offset - offset,
1836 			   outer_type, false))
1837     return false;
1838   return true;
1839 }
1840 
1841 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1842    NEW_MAYBE_DERIVED_TYPE
1843    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1844 
1845 bool
1846 ipa_polymorphic_call_context::combine_speculation_with
1847    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1848     tree otr_type)
1849 {
1850   if (!new_outer_type)
1851     return false;
1852 
1853   /* restrict_to_inner_class may eliminate wrong speculation making our job
1854      easeier.  */
1855   if (otr_type)
1856     restrict_to_inner_class (otr_type);
1857 
1858   if (!speculation_consistent_p (new_outer_type, new_offset,
1859 				 new_maybe_derived_type, otr_type))
1860     return false;
1861 
1862   /* New speculation is a win in case we have no speculation or new
1863      speculation does not consider derivations.  */
1864   if (!speculative_outer_type
1865       || (speculative_maybe_derived_type
1866 	  && !new_maybe_derived_type))
1867     {
1868       speculative_outer_type = new_outer_type;
1869       speculative_offset = new_offset;
1870       speculative_maybe_derived_type = new_maybe_derived_type;
1871       return true;
1872     }
1873   else if (types_must_be_same_for_odr (speculative_outer_type,
1874 				       new_outer_type))
1875     {
1876       if (speculative_offset != new_offset)
1877 	{
1878 	  /* OK we have two contexts that seems valid but they disagree,
1879 	     just give up.
1880 
1881 	     This is not a lattice operation, so we may want to drop it later.  */
1882 	  if (dump_file && (dump_flags & TDF_DETAILS))
1883 	    fprintf (dump_file,
1884 		     "Speculative outer types match, "
1885 		     "offset mismatch -> invalid speculation\n");
1886 	  clear_speculation ();
1887 	  return true;
1888 	}
1889       else
1890 	{
1891 	  if (speculative_maybe_derived_type && !new_maybe_derived_type)
1892 	    {
1893 	      speculative_maybe_derived_type = false;
1894 	      return true;
1895 	    }
1896 	  else
1897 	    return false;
1898 	}
1899     }
1900   /* Choose type that contains the other.  This one either contains the outer
1901      as a field (thus giving exactly one target) or is deeper in the type
1902      hiearchy.  */
1903   else if (speculative_outer_type
1904 	   && speculative_maybe_derived_type
1905 	   && (new_offset > speculative_offset
1906 	       || (new_offset == speculative_offset
1907 		   && contains_type_p (new_outer_type,
1908 				       0, speculative_outer_type, false))))
1909     {
1910       tree old_outer_type = speculative_outer_type;
1911       HOST_WIDE_INT old_offset = speculative_offset;
1912       bool old_maybe_derived_type = speculative_maybe_derived_type;
1913 
1914       speculative_outer_type = new_outer_type;
1915       speculative_offset = new_offset;
1916       speculative_maybe_derived_type = new_maybe_derived_type;
1917 
1918       if (otr_type)
1919 	restrict_to_inner_class (otr_type);
1920 
1921       /* If the speculation turned out to make no sense, revert to sensible
1922 	 one.  */
1923       if (!speculative_outer_type)
1924 	{
1925 	  speculative_outer_type = old_outer_type;
1926 	  speculative_offset = old_offset;
1927 	  speculative_maybe_derived_type = old_maybe_derived_type;
1928 	  return false;
1929 	}
1930       return (old_offset != speculative_offset
1931 	      || old_maybe_derived_type != speculative_maybe_derived_type
1932 	      || types_must_be_same_for_odr (speculative_outer_type,
1933 					     new_outer_type));
1934     }
1935   return false;
1936 }
1937 
1938 /* Make speculation less specific so
1939    NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1940    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1941 
1942 bool
1943 ipa_polymorphic_call_context::meet_speculation_with
1944    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1945     tree otr_type)
1946 {
1947   if (!new_outer_type && speculative_outer_type)
1948     {
1949       clear_speculation ();
1950       return true;
1951     }
1952 
1953   /* restrict_to_inner_class may eliminate wrong speculation making our job
1954      easeier.  */
1955   if (otr_type)
1956     restrict_to_inner_class (otr_type);
1957 
1958   if (!speculative_outer_type
1959       || !speculation_consistent_p (speculative_outer_type,
1960 				    speculative_offset,
1961 				    speculative_maybe_derived_type,
1962 				    otr_type))
1963     return false;
1964 
1965   if (!speculation_consistent_p (new_outer_type, new_offset,
1966 				 new_maybe_derived_type, otr_type))
1967     {
1968       clear_speculation ();
1969       return true;
1970     }
1971 
1972   else if (types_must_be_same_for_odr (speculative_outer_type,
1973 				       new_outer_type))
1974     {
1975       if (speculative_offset != new_offset)
1976 	{
1977 	  clear_speculation ();
1978 	  return true;
1979 	}
1980       else
1981 	{
1982 	  if (!speculative_maybe_derived_type && new_maybe_derived_type)
1983 	    {
1984 	      speculative_maybe_derived_type = true;
1985 	      return true;
1986 	    }
1987 	  else
1988 	    return false;
1989 	}
1990     }
1991   /* See if one type contains the other as a field (not base).  */
1992   else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
1993 			    speculative_outer_type, false, false))
1994     return false;
1995   else if (contains_type_p (speculative_outer_type,
1996 			    speculative_offset - new_offset,
1997 			    new_outer_type, false, false))
1998     {
1999       speculative_outer_type = new_outer_type;
2000       speculative_offset = new_offset;
2001       speculative_maybe_derived_type = new_maybe_derived_type;
2002       return true;
2003     }
2004   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2005   else if (contains_type_p (new_outer_type,
2006 			    new_offset - speculative_offset,
2007 			    speculative_outer_type, false, true))
2008     {
2009       if (!speculative_maybe_derived_type)
2010 	{
2011 	  speculative_maybe_derived_type = true;
2012 	  return true;
2013 	}
2014       return false;
2015     }
2016   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2017   else if (contains_type_p (speculative_outer_type,
2018 			    speculative_offset - new_offset, new_outer_type, false, true))
2019     {
2020       speculative_outer_type = new_outer_type;
2021       speculative_offset = new_offset;
2022       speculative_maybe_derived_type = true;
2023       return true;
2024     }
2025   else
2026     {
2027       if (dump_file && (dump_flags & TDF_DETAILS))
2028         fprintf (dump_file, "Giving up on speculative meet\n");
2029       clear_speculation ();
2030       return true;
2031     }
2032 }
2033 
2034 /* Assume that both THIS and a given context is valid and strenghten THIS
2035    if possible.  Return true if any strenghtening was made.
2036    If actual type the context is being used in is known, OTR_TYPE should be
2037    set accordingly. This improves quality of combined result.  */
2038 
2039 bool
2040 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2041 					    tree otr_type)
2042 {
2043   bool updated = false;
2044 
2045   if (ctx.useless_p () || invalid)
2046     return false;
2047 
2048   /* Restricting context to inner type makes merging easier, however do not
2049      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2050   if (otr_type && !invalid && !ctx.invalid)
2051     {
2052       restrict_to_inner_class (otr_type);
2053       ctx.restrict_to_inner_class (otr_type);
2054       if(invalid)
2055         return false;
2056     }
2057 
2058   if (dump_file && (dump_flags & TDF_DETAILS))
2059     {
2060       fprintf (dump_file, "Polymorphic call context combine:");
2061       dump (dump_file);
2062       fprintf (dump_file, "With context:                    ");
2063       ctx.dump (dump_file);
2064       if (otr_type)
2065 	{
2066           fprintf (dump_file, "To be used with type:            ");
2067 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2068           fprintf (dump_file, "\n");
2069 	}
2070     }
2071 
2072   /* If call is known to be invalid, we are done.  */
2073   if (ctx.invalid)
2074     {
2075       if (dump_file && (dump_flags & TDF_DETAILS))
2076         fprintf (dump_file, "-> Invalid context\n");
2077       goto invalidate;
2078     }
2079 
2080   if (!ctx.outer_type)
2081     ;
2082   else if (!outer_type)
2083     {
2084       outer_type = ctx.outer_type;
2085       offset = ctx.offset;
2086       dynamic = ctx.dynamic;
2087       maybe_in_construction = ctx.maybe_in_construction;
2088       maybe_derived_type = ctx.maybe_derived_type;
2089       updated = true;
2090     }
2091   /* If types are known to be same, merging is quite easy.  */
2092   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2093     {
2094       if (offset != ctx.offset
2095 	  && TYPE_SIZE (outer_type)
2096 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2097 	{
2098 	  if (dump_file && (dump_flags & TDF_DETAILS))
2099 	    fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2100 	  clear_speculation ();
2101 	  clear_outer_type ();
2102 	  invalid = true;
2103 	  return true;
2104 	}
2105       if (dump_file && (dump_flags & TDF_DETAILS))
2106         fprintf (dump_file, "Outer types match, merging flags\n");
2107       if (maybe_in_construction && !ctx.maybe_in_construction)
2108 	{
2109 	  updated = true;
2110 	  maybe_in_construction = false;
2111 	}
2112       if (maybe_derived_type && !ctx.maybe_derived_type)
2113 	{
2114 	  updated = true;
2115 	  maybe_derived_type = false;
2116 	}
2117       if (dynamic && !ctx.dynamic)
2118 	{
2119 	  updated = true;
2120 	  dynamic = false;
2121 	}
2122     }
2123   /* If we know the type precisely, there is not much to improve.  */
2124   else if (!maybe_derived_type && !maybe_in_construction
2125 	   && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2126     {
2127       /* It may be easy to check if second context permits the first
2128 	 and set INVALID otherwise.  This is not easy to do in general;
2129 	 contains_type_p may return false negatives for non-comparable
2130 	 types.
2131 
2132 	 If OTR_TYPE is known, we however can expect that
2133 	 restrict_to_inner_class should have discovered the same base
2134 	 type.  */
2135       if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2136 	{
2137 	  if (dump_file && (dump_flags & TDF_DETAILS))
2138 	    fprintf (dump_file, "Contextes disagree -> invalid\n");
2139 	  goto invalidate;
2140 	}
2141     }
2142   /* See if one type contains the other as a field (not base).
2143      In this case we want to choose the wider type, because it contains
2144      more information.  */
2145   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2146 			    outer_type, false, false))
2147     {
2148       if (dump_file && (dump_flags & TDF_DETAILS))
2149 	fprintf (dump_file, "Second type contain the first as a field\n");
2150 
2151       if (maybe_derived_type)
2152 	{
2153 	  outer_type = ctx.outer_type;
2154 	  maybe_derived_type = ctx.maybe_derived_type;
2155 	  offset = ctx.offset;
2156 	  dynamic = ctx.dynamic;
2157 	  updated = true;
2158 	}
2159 
2160       /* If we do not know how the context is being used, we can
2161 	 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2162 	 to other component of OUTER_TYPE later and we know nothing
2163 	 about it.  */
2164       if (otr_type && maybe_in_construction
2165 	  && !ctx.maybe_in_construction)
2166 	{
2167           maybe_in_construction = false;
2168 	  updated = true;
2169 	}
2170     }
2171   else if (contains_type_p (outer_type, offset - ctx.offset,
2172 			    ctx.outer_type, false, false))
2173     {
2174       if (dump_file && (dump_flags & TDF_DETAILS))
2175 	fprintf (dump_file, "First type contain the second as a field\n");
2176 
2177       if (otr_type && maybe_in_construction
2178 	  && !ctx.maybe_in_construction)
2179 	{
2180           maybe_in_construction = false;
2181 	  updated = true;
2182 	}
2183     }
2184   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2185   else if (contains_type_p (ctx.outer_type,
2186 			    ctx.offset - offset, outer_type, false, true))
2187     {
2188       if (dump_file && (dump_flags & TDF_DETAILS))
2189 	fprintf (dump_file, "First type is base of second\n");
2190       if (!maybe_derived_type)
2191 	{
2192 	  if (!ctx.maybe_in_construction
2193 	      && types_odr_comparable (outer_type, ctx.outer_type))
2194 	    {
2195 	      if (dump_file && (dump_flags & TDF_DETAILS))
2196 		fprintf (dump_file, "Second context does not permit base -> invalid\n");
2197 	      goto invalidate;
2198 	    }
2199 	}
2200       /* Pick variant deeper in the hiearchy.  */
2201       else
2202 	{
2203 	  outer_type = ctx.outer_type;
2204 	  maybe_in_construction = ctx.maybe_in_construction;
2205 	  maybe_derived_type = ctx.maybe_derived_type;
2206 	  offset = ctx.offset;
2207 	  dynamic = ctx.dynamic;
2208           updated = true;
2209 	}
2210     }
2211   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2212   else if (contains_type_p (outer_type,
2213 			    offset - ctx.offset, ctx.outer_type, false, true))
2214     {
2215       if (dump_file && (dump_flags & TDF_DETAILS))
2216 	fprintf (dump_file, "Second type is base of first\n");
2217       if (!ctx.maybe_derived_type)
2218 	{
2219 	  if (!maybe_in_construction
2220 	      && types_odr_comparable (outer_type, ctx.outer_type))
2221 	    {
2222 	      if (dump_file && (dump_flags & TDF_DETAILS))
2223 		fprintf (dump_file, "First context does not permit base -> invalid\n");
2224 	      goto invalidate;
2225 	    }
2226 	  /* Pick the base type.  */
2227 	  else if (maybe_in_construction)
2228 	    {
2229 	      outer_type = ctx.outer_type;
2230 	      maybe_in_construction = ctx.maybe_in_construction;
2231 	      maybe_derived_type = ctx.maybe_derived_type;
2232 	      offset = ctx.offset;
2233 	      dynamic = ctx.dynamic;
2234 	      updated = true;
2235 	    }
2236 	}
2237     }
2238   /* TODO handle merging using hiearchy. */
2239   else if (dump_file && (dump_flags & TDF_DETAILS))
2240     fprintf (dump_file, "Giving up on merge\n");
2241 
2242   updated |= combine_speculation_with (ctx.speculative_outer_type,
2243 				       ctx.speculative_offset,
2244 				       ctx.speculative_maybe_derived_type,
2245 				       otr_type);
2246 
2247   if (updated && dump_file && (dump_flags & TDF_DETAILS))
2248     {
2249       fprintf (dump_file, "Updated as:                      ");
2250       dump (dump_file);
2251       fprintf (dump_file, "\n");
2252     }
2253   return updated;
2254 
2255 invalidate:
2256   invalid = true;
2257   clear_speculation ();
2258   clear_outer_type ();
2259   return true;
2260 }
2261 
2262 /* Take non-speculative info, merge it with speculative and clear speculation.
2263    Used when we no longer manage to keep track of actual outer type, but we
2264    think it is still there.
2265 
2266    If OTR_TYPE is set, the transformation can be done more effectively assuming
2267    that context is going to be used only that way.  */
2268 
2269 void
2270 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2271 {
2272   tree spec_outer_type = outer_type;
2273   HOST_WIDE_INT spec_offset = offset;
2274   bool spec_maybe_derived_type = maybe_derived_type;
2275 
2276   if (invalid)
2277     {
2278       invalid = false;
2279       clear_outer_type ();
2280       clear_speculation ();
2281       return;
2282     }
2283   if (!outer_type)
2284     return;
2285   clear_outer_type ();
2286   combine_speculation_with (spec_outer_type, spec_offset,
2287 			    spec_maybe_derived_type,
2288 			    otr_type);
2289 }
2290 
2291 /* Use when we can not track dynamic type change.  This speculatively assume
2292    type change is not happening.  */
2293 
2294 void
2295 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2296 							    tree otr_type)
2297 {
2298   if (dynamic)
2299     make_speculative (otr_type);
2300   else if (in_poly_cdtor)
2301     maybe_in_construction = true;
2302 }
2303 
2304 /* Return TRUE if this context conveys the same information as OTHER.  */
2305 
2306 bool
2307 ipa_polymorphic_call_context::equal_to
2308     (const ipa_polymorphic_call_context &x) const
2309 {
2310   if (useless_p ())
2311     return x.useless_p ();
2312   if (invalid)
2313     return x.invalid;
2314   if (x.useless_p () || x.invalid)
2315     return false;
2316 
2317   if (outer_type)
2318     {
2319       if (!x.outer_type
2320 	  || !types_odr_comparable (outer_type, x.outer_type)
2321 	  || !types_same_for_odr (outer_type, x.outer_type)
2322 	  || offset != x.offset
2323 	  || maybe_in_construction != x.maybe_in_construction
2324 	  || maybe_derived_type != x.maybe_derived_type
2325 	  || dynamic != x.dynamic)
2326 	return false;
2327     }
2328   else if (x.outer_type)
2329     return false;
2330 
2331 
2332   if (speculative_outer_type
2333       && speculation_consistent_p (speculative_outer_type, speculative_offset,
2334 				   speculative_maybe_derived_type, NULL_TREE))
2335     {
2336       if (!x.speculative_outer_type)
2337 	return false;
2338 
2339       if (!types_odr_comparable (speculative_outer_type,
2340 				 x.speculative_outer_type)
2341 	  || !types_same_for_odr  (speculative_outer_type,
2342 				   x.speculative_outer_type)
2343 	  || speculative_offset != x.speculative_offset
2344 	  || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2345 	return false;
2346     }
2347   else if (x.speculative_outer_type
2348 	   && x.speculation_consistent_p (x.speculative_outer_type,
2349 					  x.speculative_offset,
2350 				  	  x.speculative_maybe_derived_type,
2351 					  NULL))
2352     return false;
2353 
2354   return true;
2355 }
2356 
2357 /* Modify context to be strictly less restrictive than CTX.  */
2358 
2359 bool
2360 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2361 					 tree otr_type)
2362 {
2363   bool updated = false;
2364 
2365   if (useless_p () || ctx.invalid)
2366     return false;
2367 
2368   /* Restricting context to inner type makes merging easier, however do not
2369      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2370   if (otr_type && !useless_p () && !ctx.useless_p ())
2371     {
2372       restrict_to_inner_class (otr_type);
2373       ctx.restrict_to_inner_class (otr_type);
2374       if(invalid)
2375         return false;
2376     }
2377 
2378   if (equal_to (ctx))
2379     return false;
2380 
2381   if (ctx.useless_p () || invalid)
2382     {
2383       *this = ctx;
2384       return true;
2385     }
2386 
2387   if (dump_file && (dump_flags & TDF_DETAILS))
2388     {
2389       fprintf (dump_file, "Polymorphic call context meet:");
2390       dump (dump_file);
2391       fprintf (dump_file, "With context:                    ");
2392       ctx.dump (dump_file);
2393       if (otr_type)
2394 	{
2395           fprintf (dump_file, "To be used with type:            ");
2396 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2397           fprintf (dump_file, "\n");
2398 	}
2399     }
2400 
2401   if (!dynamic && ctx.dynamic)
2402     {
2403       dynamic = true;
2404       updated = true;
2405     }
2406 
2407   /* If call is known to be invalid, we are done.  */
2408   if (!outer_type)
2409     ;
2410   else if (!ctx.outer_type)
2411     {
2412       clear_outer_type ();
2413       updated = true;
2414     }
2415   /* If types are known to be same, merging is quite easy.  */
2416   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2417     {
2418       if (offset != ctx.offset
2419 	  && TYPE_SIZE (outer_type)
2420 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2421 	{
2422 	  if (dump_file && (dump_flags & TDF_DETAILS))
2423 	    fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2424 	  clear_outer_type ();
2425 	  return true;
2426 	}
2427       if (dump_file && (dump_flags & TDF_DETAILS))
2428         fprintf (dump_file, "Outer types match, merging flags\n");
2429       if (!maybe_in_construction && ctx.maybe_in_construction)
2430 	{
2431 	  updated = true;
2432 	  maybe_in_construction = true;
2433 	}
2434       if (!maybe_derived_type && ctx.maybe_derived_type)
2435 	{
2436 	  updated = true;
2437 	  maybe_derived_type = true;
2438 	}
2439       if (!dynamic && ctx.dynamic)
2440 	{
2441 	  updated = true;
2442 	  dynamic = true;
2443 	}
2444     }
2445   /* See if one type contains the other as a field (not base).  */
2446   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2447 			    outer_type, false, false))
2448     {
2449       if (dump_file && (dump_flags & TDF_DETAILS))
2450 	fprintf (dump_file, "Second type contain the first as a field\n");
2451 
2452       /* The second type is more specified, so we keep the first.
2453          We need to set DYNAMIC flag to avoid declaring context INVALID
2454 	 of OFFSET ends up being out of range.  */
2455       if (!dynamic
2456 	  && (ctx.dynamic
2457 	      || (!otr_type
2458 		  && (!TYPE_SIZE (ctx.outer_type)
2459 		      || !TYPE_SIZE (outer_type)
2460 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2461 					   TYPE_SIZE (outer_type), 0)))))
2462 	{
2463 	  dynamic = true;
2464 	  updated = true;
2465 	}
2466     }
2467   else if (contains_type_p (outer_type, offset - ctx.offset,
2468 			    ctx.outer_type, false, false))
2469     {
2470       if (dump_file && (dump_flags & TDF_DETAILS))
2471 	fprintf (dump_file, "First type contain the second as a field\n");
2472 
2473       if (!dynamic
2474 	  && (ctx.dynamic
2475 	      || (!otr_type
2476 		  && (!TYPE_SIZE (ctx.outer_type)
2477 		      || !TYPE_SIZE (outer_type)
2478 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2479 					   TYPE_SIZE (outer_type), 0)))))
2480 	dynamic = true;
2481       outer_type = ctx.outer_type;
2482       offset = ctx.offset;
2483       dynamic = ctx.dynamic;
2484       maybe_in_construction = ctx.maybe_in_construction;
2485       maybe_derived_type = ctx.maybe_derived_type;
2486       updated = true;
2487     }
2488   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2489   else if (contains_type_p (ctx.outer_type,
2490 			    ctx.offset - offset, outer_type, false, true))
2491     {
2492       if (dump_file && (dump_flags & TDF_DETAILS))
2493 	fprintf (dump_file, "First type is base of second\n");
2494       if (!maybe_derived_type)
2495 	{
2496 	  maybe_derived_type = true;
2497 	  updated = true;
2498 	}
2499       if (!maybe_in_construction && ctx.maybe_in_construction)
2500 	{
2501 	  maybe_in_construction = true;
2502 	  updated = true;
2503 	}
2504       if (!dynamic && ctx.dynamic)
2505 	{
2506 	  dynamic = true;
2507 	  updated = true;
2508 	}
2509     }
2510   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2511   else if (contains_type_p (outer_type,
2512 			    offset - ctx.offset, ctx.outer_type, false, true))
2513     {
2514       if (dump_file && (dump_flags & TDF_DETAILS))
2515 	fprintf (dump_file, "Second type is base of first\n");
2516       outer_type = ctx.outer_type;
2517       offset = ctx.offset;
2518       updated = true;
2519       if (!maybe_derived_type)
2520 	maybe_derived_type = true;
2521       if (!maybe_in_construction && ctx.maybe_in_construction)
2522 	maybe_in_construction = true;
2523       if (!dynamic && ctx.dynamic)
2524 	dynamic = true;
2525     }
2526   /* TODO handle merging using hiearchy. */
2527   else
2528     {
2529       if (dump_file && (dump_flags & TDF_DETAILS))
2530         fprintf (dump_file, "Giving up on meet\n");
2531       clear_outer_type ();
2532       updated = true;
2533     }
2534 
2535   updated |= meet_speculation_with (ctx.speculative_outer_type,
2536 				    ctx.speculative_offset,
2537 				    ctx.speculative_maybe_derived_type,
2538 				    otr_type);
2539 
2540   if (updated && dump_file && (dump_flags & TDF_DETAILS))
2541     {
2542       fprintf (dump_file, "Updated as:                      ");
2543       dump (dump_file);
2544       fprintf (dump_file, "\n");
2545     }
2546   return updated;
2547 }
2548