xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-prop.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Interprocedural analyses.
2    Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "machmode.h"
25 #include "vec.h"
26 #include "double-int.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "options.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "predict.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "hashtab.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "statistics.h"
53 #include "real.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "varasm.h"
62 #include "stmt.h"
63 #include "expr.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
66 #include "gimplify.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
71 #include "target.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
78 #include "ipa-prop.h"
79 #include "bitmap.h"
80 #include "gimple-ssa.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
85 #include "tree-dfa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
94 #include "params.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
98 #include "dbgcnt.h"
99 #include "domwalk.h"
100 #include "builtins.h"
101 
102 /* Function summary where the parameter infos are actually stored. */
103 ipa_node_params_t *ipa_node_params_sum = NULL;
104 /* Vector of IPA-CP transformation data for each clone.  */
105 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
106 /* Vector where the parameter infos are actually stored. */
107 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
108 
109 /* Holders of ipa cgraph hooks: */
110 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
111 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
112 static struct cgraph_node_hook_list *function_insertion_hook_holder;
113 
114 /* Description of a reference to an IPA constant.  */
115 struct ipa_cst_ref_desc
116 {
117   /* Edge that corresponds to the statement which took the reference.  */
118   struct cgraph_edge *cs;
119   /* Linked list of duplicates created when call graph edges are cloned.  */
120   struct ipa_cst_ref_desc *next_duplicate;
121   /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
122      if out of control.  */
123   int refcount;
124 };
125 
126 /* Allocation pool for reference descriptions.  */
127 
128 static alloc_pool ipa_refdesc_pool;
129 
130 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
131    with NODE should prevent us from analyzing it for the purposes of IPA-CP.  */
132 
133 static bool
134 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
135 {
136   tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
137 
138   if (!fs_opts)
139     return false;
140   return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
141 }
142 
143 /* Return index of the formal whose tree is PTREE in function which corresponds
144    to INFO.  */
145 
146 static int
147 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
148 {
149   int i, count;
150 
151   count = descriptors.length ();
152   for (i = 0; i < count; i++)
153     if (descriptors[i].decl == ptree)
154       return i;
155 
156   return -1;
157 }
158 
159 /* Return index of the formal whose tree is PTREE in function which corresponds
160    to INFO.  */
161 
162 int
163 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
164 {
165   return ipa_get_param_decl_index_1 (info->descriptors, ptree);
166 }
167 
168 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
169    NODE.  */
170 
171 static void
172 ipa_populate_param_decls (struct cgraph_node *node,
173 			  vec<ipa_param_descriptor> &descriptors)
174 {
175   tree fndecl;
176   tree fnargs;
177   tree parm;
178   int param_num;
179 
180   fndecl = node->decl;
181   gcc_assert (gimple_has_body_p (fndecl));
182   fnargs = DECL_ARGUMENTS (fndecl);
183   param_num = 0;
184   for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
185     {
186       descriptors[param_num].decl = parm;
187       descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
188 							     true);
189       param_num++;
190     }
191 }
192 
193 /* Return how many formal parameters FNDECL has.  */
194 
195 int
196 count_formal_params (tree fndecl)
197 {
198   tree parm;
199   int count = 0;
200   gcc_assert (gimple_has_body_p (fndecl));
201 
202   for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
203     count++;
204 
205   return count;
206 }
207 
208 /* Return the declaration of Ith formal parameter of the function corresponding
209    to INFO.  Note there is no setter function as this array is built just once
210    using ipa_initialize_node_params. */
211 
212 void
213 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
214 {
215   fprintf (file, "param #%i", i);
216   if (info->descriptors[i].decl)
217     {
218       fprintf (file, " ");
219       print_generic_expr (file, info->descriptors[i].decl, 0);
220     }
221 }
222 
223 /* Initialize the ipa_node_params structure associated with NODE
224    to hold PARAM_COUNT parameters.  */
225 
226 void
227 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
228 {
229   struct ipa_node_params *info = IPA_NODE_REF (node);
230 
231   if (!info->descriptors.exists () && param_count)
232     info->descriptors.safe_grow_cleared (param_count);
233 }
234 
235 /* Initialize the ipa_node_params structure associated with NODE by counting
236    the function parameters, creating the descriptors and populating their
237    param_decls.  */
238 
239 void
240 ipa_initialize_node_params (struct cgraph_node *node)
241 {
242   struct ipa_node_params *info = IPA_NODE_REF (node);
243 
244   if (!info->descriptors.exists ())
245     {
246       ipa_alloc_node_params (node, count_formal_params (node->decl));
247       ipa_populate_param_decls (node, info->descriptors);
248     }
249 }
250 
251 /* Print the jump functions associated with call graph edge CS to file F.  */
252 
253 static void
254 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
255 {
256   int i, count;
257 
258   count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
259   for (i = 0; i < count; i++)
260     {
261       struct ipa_jump_func *jump_func;
262       enum jump_func_type type;
263 
264       jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
265       type = jump_func->type;
266 
267       fprintf (f, "       param %d: ", i);
268       if (type == IPA_JF_UNKNOWN)
269 	fprintf (f, "UNKNOWN\n");
270       else if (type == IPA_JF_CONST)
271 	{
272 	  tree val = jump_func->value.constant.value;
273 	  fprintf (f, "CONST: ");
274 	  print_generic_expr (f, val, 0);
275 	  if (TREE_CODE (val) == ADDR_EXPR
276 	      && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
277 	    {
278 	      fprintf (f, " -> ");
279 	      print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
280 				  0);
281 	    }
282 	  fprintf (f, "\n");
283 	}
284       else if (type == IPA_JF_PASS_THROUGH)
285 	{
286 	  fprintf (f, "PASS THROUGH: ");
287 	  fprintf (f, "%d, op %s",
288 		   jump_func->value.pass_through.formal_id,
289 		   get_tree_code_name(jump_func->value.pass_through.operation));
290 	  if (jump_func->value.pass_through.operation != NOP_EXPR)
291 	    {
292 	      fprintf (f, " ");
293 	      print_generic_expr (f,
294 				  jump_func->value.pass_through.operand, 0);
295 	    }
296 	  if (jump_func->value.pass_through.agg_preserved)
297 	    fprintf (f, ", agg_preserved");
298 	  fprintf (f, "\n");
299 	}
300       else if (type == IPA_JF_ANCESTOR)
301 	{
302 	  fprintf (f, "ANCESTOR: ");
303 	  fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
304 		   jump_func->value.ancestor.formal_id,
305 		   jump_func->value.ancestor.offset);
306 	  if (jump_func->value.ancestor.agg_preserved)
307 	    fprintf (f, ", agg_preserved");
308 	  fprintf (f, "\n");
309 	}
310 
311       if (jump_func->agg.items)
312 	{
313 	  struct ipa_agg_jf_item *item;
314 	  int j;
315 
316 	  fprintf (f, "         Aggregate passed by %s:\n",
317 		   jump_func->agg.by_ref ? "reference" : "value");
318 	  FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
319 	    {
320 	      fprintf (f, "           offset: " HOST_WIDE_INT_PRINT_DEC ", ",
321 		       item->offset);
322 	      if (TYPE_P (item->value))
323 		fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
324 			 tree_to_uhwi (TYPE_SIZE (item->value)));
325 	      else
326 		{
327 		  fprintf (f, "cst: ");
328 		  print_generic_expr (f, item->value, 0);
329 		}
330 	      fprintf (f, "\n");
331 	    }
332 	}
333 
334       struct ipa_polymorphic_call_context *ctx
335 	= ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
336       if (ctx && !ctx->useless_p ())
337 	{
338 	  fprintf (f, "         Context: ");
339 	  ctx->dump (dump_file);
340 	}
341 
342       if (jump_func->alignment.known)
343 	{
344 	  fprintf (f, "         Alignment: %u, misalignment: %u\n",
345 		   jump_func->alignment.align,
346 		   jump_func->alignment.misalign);
347 	}
348       else
349 	fprintf (f, "         Unknown alignment\n");
350     }
351 }
352 
353 
354 /* Print the jump functions of all arguments on all call graph edges going from
355    NODE to file F.  */
356 
357 void
358 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
359 {
360   struct cgraph_edge *cs;
361 
362   fprintf (f, "  Jump functions of caller  %s/%i:\n", node->name (),
363 	   node->order);
364   for (cs = node->callees; cs; cs = cs->next_callee)
365     {
366       if (!ipa_edge_args_info_available_for_edge_p (cs))
367 	continue;
368 
369       fprintf (f, "    callsite  %s/%i -> %s/%i : \n",
370 	       xstrdup_for_dump (node->name ()), node->order,
371 	       xstrdup_for_dump (cs->callee->name ()),
372 	       cs->callee->order);
373       ipa_print_node_jump_functions_for_edge (f, cs);
374     }
375 
376   for (cs = node->indirect_calls; cs; cs = cs->next_callee)
377     {
378       struct cgraph_indirect_call_info *ii;
379       if (!ipa_edge_args_info_available_for_edge_p (cs))
380 	continue;
381 
382       ii = cs->indirect_info;
383       if (ii->agg_contents)
384 	fprintf (f, "    indirect %s callsite, calling param %i, "
385 		 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
386 		 ii->member_ptr ? "member ptr" : "aggregate",
387 		 ii->param_index, ii->offset,
388 		 ii->by_ref ? "by reference" : "by_value");
389       else
390 	fprintf (f, "    indirect %s callsite, calling param %i, "
391 		 "offset " HOST_WIDE_INT_PRINT_DEC,
392 		 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
393 		 ii->offset);
394 
395       if (cs->call_stmt)
396 	{
397 	  fprintf (f, ", for stmt ");
398 	  print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
399 	}
400       else
401 	fprintf (f, "\n");
402       if (ii->polymorphic)
403 	ii->context.dump (f);
404       ipa_print_node_jump_functions_for_edge (f, cs);
405     }
406 }
407 
408 /* Print ipa_jump_func data structures of all nodes in the call graph to F.  */
409 
410 void
411 ipa_print_all_jump_functions (FILE *f)
412 {
413   struct cgraph_node *node;
414 
415   fprintf (f, "\nJump functions:\n");
416   FOR_EACH_FUNCTION (node)
417     {
418       ipa_print_node_jump_functions (f, node);
419     }
420 }
421 
422 /* Set jfunc to be a know-really nothing jump function.  */
423 
424 static void
425 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
426 {
427   jfunc->type = IPA_JF_UNKNOWN;
428   jfunc->alignment.known = false;
429 }
430 
431 /* Set JFUNC to be a copy of another jmp (to be used by jump function
432    combination code).  The two functions will share their rdesc.  */
433 
434 static void
435 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
436 		     struct ipa_jump_func *src)
437 
438 {
439   gcc_checking_assert (src->type == IPA_JF_CONST);
440   dst->type = IPA_JF_CONST;
441   dst->value.constant = src->value.constant;
442 }
443 
444 /* Set JFUNC to be a constant jmp function.  */
445 
446 static void
447 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
448 		     struct cgraph_edge *cs)
449 {
450   constant = unshare_expr (constant);
451   if (constant && EXPR_P (constant))
452     SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
453   jfunc->type = IPA_JF_CONST;
454   jfunc->value.constant.value = unshare_expr_without_location (constant);
455 
456   if (TREE_CODE (constant) == ADDR_EXPR
457       && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
458     {
459       struct ipa_cst_ref_desc *rdesc;
460       if (!ipa_refdesc_pool)
461 	ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
462 					sizeof (struct ipa_cst_ref_desc), 32);
463 
464       rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
465       rdesc->cs = cs;
466       rdesc->next_duplicate = NULL;
467       rdesc->refcount = 1;
468       jfunc->value.constant.rdesc = rdesc;
469     }
470   else
471     jfunc->value.constant.rdesc = NULL;
472 }
473 
474 /* Set JFUNC to be a simple pass-through jump function.  */
475 static void
476 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
477 				bool agg_preserved)
478 {
479   jfunc->type = IPA_JF_PASS_THROUGH;
480   jfunc->value.pass_through.operand = NULL_TREE;
481   jfunc->value.pass_through.formal_id = formal_id;
482   jfunc->value.pass_through.operation = NOP_EXPR;
483   jfunc->value.pass_through.agg_preserved = agg_preserved;
484 }
485 
486 /* Set JFUNC to be an arithmetic pass through jump function.  */
487 
488 static void
489 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
490 			       tree operand, enum tree_code operation)
491 {
492   jfunc->type = IPA_JF_PASS_THROUGH;
493   jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
494   jfunc->value.pass_through.formal_id = formal_id;
495   jfunc->value.pass_through.operation = operation;
496   jfunc->value.pass_through.agg_preserved = false;
497 }
498 
499 /* Set JFUNC to be an ancestor jump function.  */
500 
501 static void
502 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
503 		     int formal_id, bool agg_preserved)
504 {
505   jfunc->type = IPA_JF_ANCESTOR;
506   jfunc->value.ancestor.formal_id = formal_id;
507   jfunc->value.ancestor.offset = offset;
508   jfunc->value.ancestor.agg_preserved = agg_preserved;
509 }
510 
511 /* Get IPA BB information about the given BB.  FBI is the context of analyzis
512    of this function body.  */
513 
514 static struct ipa_bb_info *
515 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
516 {
517   gcc_checking_assert (fbi);
518   return &fbi->bb_infos[bb->index];
519 }
520 
521 /* Structure to be passed in between detect_type_change and
522    check_stmt_for_type_change.  */
523 
524 struct prop_type_change_info
525 {
526   /* Offset into the object where there is the virtual method pointer we are
527      looking for.  */
528   HOST_WIDE_INT offset;
529   /* The declaration or SSA_NAME pointer of the base that we are checking for
530      type change.  */
531   tree object;
532   /* Set to true if dynamic type change has been detected.  */
533   bool type_maybe_changed;
534 };
535 
536 /* Return true if STMT can modify a virtual method table pointer.
537 
538    This function makes special assumptions about both constructors and
539    destructors which are all the functions that are allowed to alter the VMT
540    pointers.  It assumes that destructors begin with assignment into all VMT
541    pointers and that constructors essentially look in the following way:
542 
543    1) The very first thing they do is that they call constructors of ancestor
544    sub-objects that have them.
545 
546    2) Then VMT pointers of this and all its ancestors is set to new values
547    corresponding to the type corresponding to the constructor.
548 
549    3) Only afterwards, other stuff such as constructor of member sub-objects
550    and the code written by the user is run.  Only this may include calling
551    virtual functions, directly or indirectly.
552 
553    There is no way to call a constructor of an ancestor sub-object in any
554    other way.
555 
556    This means that we do not have to care whether constructors get the correct
557    type information because they will always change it (in fact, if we define
558    the type to be given by the VMT pointer, it is undefined).
559 
560    The most important fact to derive from the above is that if, for some
561    statement in the section 3, we try to detect whether the dynamic type has
562    changed, we can safely ignore all calls as we examine the function body
563    backwards until we reach statements in section 2 because these calls cannot
564    be ancestor constructors or destructors (if the input is not bogus) and so
565    do not change the dynamic type (this holds true only for automatically
566    allocated objects but at the moment we devirtualize only these).  We then
567    must detect that statements in section 2 change the dynamic type and can try
568    to derive the new type.  That is enough and we can stop, we will never see
569    the calls into constructors of sub-objects in this code.  Therefore we can
570    safely ignore all call statements that we traverse.
571   */
572 
573 static bool
574 stmt_may_be_vtbl_ptr_store (gimple stmt)
575 {
576   if (is_gimple_call (stmt))
577     return false;
578   if (gimple_clobber_p (stmt))
579     return false;
580   else if (is_gimple_assign (stmt))
581     {
582       tree lhs = gimple_assign_lhs (stmt);
583 
584       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
585 	{
586 	  if (flag_strict_aliasing
587 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
588 	    return false;
589 
590 	  if (TREE_CODE (lhs) == COMPONENT_REF
591 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
592 	    return false;
593 	  /* In the future we might want to use get_base_ref_and_offset to find
594 	     if there is a field corresponding to the offset and if so, proceed
595 	     almost like if it was a component ref.  */
596 	}
597     }
598   return true;
599 }
600 
601 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
602    to check whether a particular statement may modify the virtual table
603    pointerIt stores its result into DATA, which points to a
604    prop_type_change_info structure.  */
605 
606 static bool
607 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
608 {
609   gimple stmt = SSA_NAME_DEF_STMT (vdef);
610   struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
611 
612   if (stmt_may_be_vtbl_ptr_store (stmt))
613     {
614       tci->type_maybe_changed = true;
615       return true;
616     }
617   else
618     return false;
619 }
620 
621 /* See if ARG is PARAM_DECl describing instance passed by pointer
622    or reference in FUNCTION.  Return false if the dynamic type may change
623    in between beggining of the function until CALL is invoked.
624 
625    Generally functions are not allowed to change type of such instances,
626    but they call destructors.  We assume that methods can not destroy the THIS
627    pointer.  Also as a special cases, constructor and destructors may change
628    type of the THIS pointer.  */
629 
630 static bool
631 param_type_may_change_p (tree function, tree arg, gimple call)
632 {
633   /* Pure functions can not do any changes on the dynamic type;
634      that require writting to memory.  */
635   if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
636     return false;
637   /* We need to check if we are within inlined consturctor
638      or destructor (ideally we would have way to check that the
639      inline cdtor is actually working on ARG, but we don't have
640      easy tie on this, so punt on all non-pure cdtors.
641      We may also record the types of cdtors and once we know type
642      of the instance match them.
643 
644      Also code unification optimizations may merge calls from
645      different blocks making return values unreliable.  So
646      do nothing during late optimization.  */
647   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
648     return true;
649   if (TREE_CODE (arg) == SSA_NAME
650       && SSA_NAME_IS_DEFAULT_DEF (arg)
651       && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
652     {
653       /* Normal (non-THIS) argument.  */
654       if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
655 	   || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
656 	  /* THIS pointer of an method - here we we want to watch constructors
657 	     and destructors as those definitely may change the dynamic
658 	     type.  */
659 	  || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
660 	      && !DECL_CXX_CONSTRUCTOR_P (function)
661 	      && !DECL_CXX_DESTRUCTOR_P (function)
662 	      && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
663 	{
664 	  /* Walk the inline stack and watch out for ctors/dtors.  */
665 	  for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
666 	       block = BLOCK_SUPERCONTEXT (block))
667 	    if (inlined_polymorphic_ctor_dtor_block_p (block, false))
668 	      return true;
669 	  return false;
670 	}
671     }
672   return true;
673 }
674 
675 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
676    callsite CALL) by looking for assignments to its virtual table pointer.  If
677    it is, return true and fill in the jump function JFUNC with relevant type
678    information or set it to unknown.  ARG is the object itself (not a pointer
679    to it, unless dereferenced).  BASE is the base of the memory access as
680    returned by get_ref_base_and_extent, as is the offset.
681 
682    This is helper function for detect_type_change and detect_type_change_ssa
683    that does the heavy work which is usually unnecesary.  */
684 
685 static bool
686 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
687 				       gcall *call, struct ipa_jump_func *jfunc,
688 				       HOST_WIDE_INT offset)
689 {
690   struct prop_type_change_info tci;
691   ao_ref ao;
692   bool entry_reached = false;
693 
694   gcc_checking_assert (DECL_P (arg)
695 		       || TREE_CODE (arg) == MEM_REF
696 		       || handled_component_p (arg));
697 
698   comp_type = TYPE_MAIN_VARIANT (comp_type);
699 
700   /* Const calls cannot call virtual methods through VMT and so type changes do
701      not matter.  */
702   if (!flag_devirtualize || !gimple_vuse (call)
703       /* Be sure expected_type is polymorphic.  */
704       || !comp_type
705       || TREE_CODE (comp_type) != RECORD_TYPE
706       || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
707       || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
708     return true;
709 
710   ao_ref_init (&ao, arg);
711   ao.base = base;
712   ao.offset = offset;
713   ao.size = POINTER_SIZE;
714   ao.max_size = ao.size;
715 
716   tci.offset = offset;
717   tci.object = get_base_address (arg);
718   tci.type_maybe_changed = false;
719 
720   walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
721 		      &tci, NULL, &entry_reached);
722   if (!tci.type_maybe_changed)
723     return false;
724 
725   ipa_set_jf_unknown (jfunc);
726   return true;
727 }
728 
729 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
730    If it is, return true and fill in the jump function JFUNC with relevant type
731    information or set it to unknown.  ARG is the object itself (not a pointer
732    to it, unless dereferenced).  BASE is the base of the memory access as
733    returned by get_ref_base_and_extent, as is the offset.  */
734 
735 static bool
736 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
737 		    struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
738 {
739   if (!flag_devirtualize)
740     return false;
741 
742   if (TREE_CODE	(base) == MEM_REF
743       && !param_type_may_change_p (current_function_decl,
744 				   TREE_OPERAND (base, 0),
745 				   call))
746     return false;
747   return detect_type_change_from_memory_writes (arg, base, comp_type,
748 						call, jfunc, offset);
749 }
750 
751 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
752    SSA name (its dereference will become the base and the offset is assumed to
753    be zero).  */
754 
755 static bool
756 detect_type_change_ssa (tree arg, tree comp_type,
757 			gcall *call, struct ipa_jump_func *jfunc)
758 {
759   gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
760   if (!flag_devirtualize
761       || !POINTER_TYPE_P (TREE_TYPE (arg)))
762     return false;
763 
764   if (!param_type_may_change_p (current_function_decl, arg, call))
765     return false;
766 
767   arg = build2 (MEM_REF, ptr_type_node, arg,
768 		build_int_cst (ptr_type_node, 0));
769 
770   return detect_type_change_from_memory_writes (arg, arg, comp_type,
771 						call, jfunc, 0);
772 }
773 
774 /* Callback of walk_aliased_vdefs.  Flags that it has been invoked to the
775    boolean variable pointed to by DATA.  */
776 
777 static bool
778 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
779 		     void *data)
780 {
781   bool *b = (bool *) data;
782   *b = true;
783   return true;
784 }
785 
786 /* Return true if we have already walked so many statements in AA that we
787    should really just start giving up.  */
788 
789 static bool
790 aa_overwalked (struct ipa_func_body_info *fbi)
791 {
792   gcc_checking_assert (fbi);
793   return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
794 }
795 
796 /* Find the nearest valid aa status for parameter specified by INDEX that
797    dominates BB.  */
798 
799 static struct ipa_param_aa_status *
800 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
801 			   int index)
802 {
803   while (true)
804     {
805       bb = get_immediate_dominator (CDI_DOMINATORS, bb);
806       if (!bb)
807 	return NULL;
808       struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
809       if (!bi->param_aa_statuses.is_empty ()
810 	  && bi->param_aa_statuses[index].valid)
811 	return &bi->param_aa_statuses[index];
812     }
813 }
814 
815 /* Get AA status structure for the given BB and parameter with INDEX.  Allocate
816    structures and/or intialize the result with a dominating description as
817    necessary.  */
818 
819 static struct ipa_param_aa_status *
820 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
821 			  int index)
822 {
823   gcc_checking_assert (fbi);
824   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
825   if (bi->param_aa_statuses.is_empty ())
826     bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
827   struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
828   if (!paa->valid)
829     {
830       gcc_checking_assert (!paa->parm_modified
831 			   && !paa->ref_modified
832 			   && !paa->pt_modified);
833       struct ipa_param_aa_status *dom_paa;
834       dom_paa = find_dominating_aa_status (fbi, bb, index);
835       if (dom_paa)
836 	*paa = *dom_paa;
837       else
838 	paa->valid = true;
839     }
840 
841   return paa;
842 }
843 
844 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
845    a value known not to be modified in this function before reaching the
846    statement STMT.  FBI holds information about the function we have so far
847    gathered but do not survive the summary building stage.  */
848 
849 static bool
850 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
851 			      gimple stmt, tree parm_load)
852 {
853   struct ipa_param_aa_status *paa;
854   bool modified = false;
855   ao_ref refd;
856 
857   /* FIXME: FBI can be NULL if we are being called from outside
858      ipa_node_analysis or ipcp_transform_function, which currently happens
859      during inlining analysis.  It would be great to extend fbi's lifetime and
860      always have it.  Currently, we are just not afraid of too much walking in
861      that case.  */
862   if (fbi)
863     {
864       if (aa_overwalked (fbi))
865 	return false;
866       paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
867       if (paa->parm_modified)
868 	return false;
869     }
870   else
871     paa = NULL;
872 
873   gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
874   ao_ref_init (&refd, parm_load);
875   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
876 				   &modified, NULL);
877   if (fbi)
878     fbi->aa_walked += walked;
879   if (paa && modified)
880     paa->parm_modified = true;
881   return !modified;
882 }
883 
884 /* If STMT is an assignment that loads a value from an parameter declaration,
885    return the index of the parameter in ipa_node_params which has not been
886    modified.  Otherwise return -1.  */
887 
888 static int
889 load_from_unmodified_param (struct ipa_func_body_info *fbi,
890 			    vec<ipa_param_descriptor> descriptors,
891 			    gimple stmt)
892 {
893   int index;
894   tree op1;
895 
896   if (!gimple_assign_single_p (stmt))
897     return -1;
898 
899   op1 = gimple_assign_rhs1 (stmt);
900   if (TREE_CODE (op1) != PARM_DECL)
901     return -1;
902 
903   index = ipa_get_param_decl_index_1 (descriptors, op1);
904   if (index < 0
905       || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
906     return -1;
907 
908   return index;
909 }
910 
911 /* Return true if memory reference REF (which must be a load through parameter
912    with INDEX) loads data that are known to be unmodified in this function
913    before reaching statement STMT.  */
914 
915 static bool
916 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
917 			   int index, gimple stmt, tree ref)
918 {
919   struct ipa_param_aa_status *paa;
920   bool modified = false;
921   ao_ref refd;
922 
923   /* FIXME: FBI can be NULL if we are being called from outside
924      ipa_node_analysis or ipcp_transform_function, which currently happens
925      during inlining analysis.  It would be great to extend fbi's lifetime and
926      always have it.  Currently, we are just not afraid of too much walking in
927      that case.  */
928   if (fbi)
929     {
930       if (aa_overwalked (fbi))
931 	return false;
932       paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
933       if (paa->ref_modified)
934 	return false;
935     }
936   else
937     paa = NULL;
938 
939   gcc_checking_assert (gimple_vuse (stmt));
940   ao_ref_init (&refd, ref);
941   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
942 				   &modified, NULL);
943   if (fbi)
944     fbi->aa_walked += walked;
945   if (paa && modified)
946     paa->ref_modified = true;
947   return !modified;
948 }
949 
950 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
951    is known to be unmodified in this function before reaching call statement
952    CALL into which it is passed.  FBI describes the function body.  */
953 
954 static bool
955 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
956 			      gimple call, tree parm)
957 {
958   bool modified = false;
959   ao_ref refd;
960 
961   /* It's unnecessary to calculate anything about memory contnets for a const
962      function because it is not goin to use it.  But do not cache the result
963      either.  Also, no such calculations for non-pointers.  */
964   if (!gimple_vuse (call)
965       || !POINTER_TYPE_P (TREE_TYPE (parm))
966       || aa_overwalked (fbi))
967     return false;
968 
969   struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
970 							      gimple_bb (call),
971 							      index);
972   if (paa->pt_modified)
973     return false;
974 
975   ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
976   int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
977 				   &modified, NULL);
978   fbi->aa_walked += walked;
979   if (modified)
980     paa->pt_modified = true;
981   return !modified;
982 }
983 
984 /* Return true if we can prove that OP is a memory reference loading unmodified
985    data from an aggregate passed as a parameter and if the aggregate is passed
986    by reference, that the alias type of the load corresponds to the type of the
987    formal parameter (so that we can rely on this type for TBAA in callers).
988    INFO and PARMS_AINFO describe parameters of the current function (but the
989    latter can be NULL), STMT is the load statement.  If function returns true,
990    *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
991    within the aggregate and whether it is a load from a value passed by
992    reference respectively.  */
993 
994 bool
995 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
996 			vec<ipa_param_descriptor> descriptors,
997 			gimple stmt, tree op, int *index_p,
998 			HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
999 			bool *by_ref_p)
1000 {
1001   int index;
1002   HOST_WIDE_INT size, max_size;
1003   tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1004 
1005   if (max_size == -1 || max_size != size || *offset_p < 0)
1006     return false;
1007 
1008   if (DECL_P (base))
1009     {
1010       int index = ipa_get_param_decl_index_1 (descriptors, base);
1011       if (index >= 0
1012 	  && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1013 	{
1014 	  *index_p = index;
1015 	  *by_ref_p = false;
1016 	  if (size_p)
1017 	    *size_p = size;
1018 	  return true;
1019 	}
1020       return false;
1021     }
1022 
1023   if (TREE_CODE (base) != MEM_REF
1024 	   || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1025 	   || !integer_zerop (TREE_OPERAND (base, 1)))
1026     return false;
1027 
1028   if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1029     {
1030       tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1031       index = ipa_get_param_decl_index_1 (descriptors, parm);
1032     }
1033   else
1034     {
1035       /* This branch catches situations where a pointer parameter is not a
1036 	 gimple register, for example:
1037 
1038 	 void hip7(S*) (struct S * p)
1039 	 {
1040 	 void (*<T2e4>) (struct S *) D.1867;
1041 	 struct S * p.1;
1042 
1043 	 <bb 2>:
1044 	 p.1_1 = p;
1045 	 D.1867_2 = p.1_1->f;
1046 	 D.1867_2 ();
1047 	 gdp = &p;
1048       */
1049 
1050       gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1051       index = load_from_unmodified_param (fbi, descriptors, def);
1052     }
1053 
1054   if (index >= 0
1055       && parm_ref_data_preserved_p (fbi, index, stmt, op))
1056     {
1057       *index_p = index;
1058       *by_ref_p = true;
1059       if (size_p)
1060 	*size_p = size;
1061       return true;
1062     }
1063   return false;
1064 }
1065 
1066 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1067    of an assignment statement STMT, try to determine whether we are actually
1068    handling any of the following cases and construct an appropriate jump
1069    function into JFUNC if so:
1070 
1071    1) The passed value is loaded from a formal parameter which is not a gimple
1072    register (most probably because it is addressable, the value has to be
1073    scalar) and we can guarantee the value has not changed.  This case can
1074    therefore be described by a simple pass-through jump function.  For example:
1075 
1076       foo (int a)
1077       {
1078         int a.0;
1079 
1080         a.0_2 = a;
1081         bar (a.0_2);
1082 
1083    2) The passed value can be described by a simple arithmetic pass-through
1084    jump function. E.g.
1085 
1086       foo (int a)
1087       {
1088         int D.2064;
1089 
1090         D.2064_4 = a.1(D) + 4;
1091         bar (D.2064_4);
1092 
1093    This case can also occur in combination of the previous one, e.g.:
1094 
1095       foo (int a, int z)
1096       {
1097         int a.0;
1098         int D.2064;
1099 
1100 	a.0_3 = a;
1101 	D.2064_4 = a.0_3 + 4;
1102 	foo (D.2064_4);
1103 
1104    3) The passed value is an address of an object within another one (which
1105    also passed by reference).  Such situations are described by an ancestor
1106    jump function and describe situations such as:
1107 
1108      B::foo() (struct B * const this)
1109      {
1110        struct A * D.1845;
1111 
1112        D.1845_2 = &this_1(D)->D.1748;
1113        A::bar (D.1845_2);
1114 
1115    INFO is the structure describing individual parameters access different
1116    stages of IPA optimizations.  PARMS_AINFO contains the information that is
1117    only needed for intraprocedural analysis.  */
1118 
1119 static void
1120 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1121 				  struct ipa_node_params *info,
1122 				  struct ipa_jump_func *jfunc,
1123 				  gcall *call, gimple stmt, tree name,
1124 				  tree param_type)
1125 {
1126   HOST_WIDE_INT offset, size, max_size;
1127   tree op1, tc_ssa, base, ssa;
1128   int index;
1129 
1130   op1 = gimple_assign_rhs1 (stmt);
1131 
1132   if (TREE_CODE (op1) == SSA_NAME)
1133     {
1134       if (SSA_NAME_IS_DEFAULT_DEF (op1))
1135 	index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1136       else
1137 	index = load_from_unmodified_param (fbi, info->descriptors,
1138 					    SSA_NAME_DEF_STMT (op1));
1139       tc_ssa = op1;
1140     }
1141   else
1142     {
1143       index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1144       tc_ssa = gimple_assign_lhs (stmt);
1145     }
1146 
1147   if (index >= 0)
1148     {
1149       tree op2 = gimple_assign_rhs2 (stmt);
1150 
1151       if (op2)
1152 	{
1153 	  if (!is_gimple_ip_invariant (op2)
1154 	      || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1155 		  && !useless_type_conversion_p (TREE_TYPE (name),
1156 						 TREE_TYPE (op1))))
1157 	    return;
1158 
1159 	  ipa_set_jf_arith_pass_through (jfunc, index, op2,
1160 					 gimple_assign_rhs_code (stmt));
1161 	}
1162       else if (gimple_assign_single_p (stmt))
1163 	{
1164 	  bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1165 	  ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1166 	}
1167       return;
1168     }
1169 
1170   if (TREE_CODE (op1) != ADDR_EXPR)
1171     return;
1172   op1 = TREE_OPERAND (op1, 0);
1173   if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1174     return;
1175   base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1176   if (TREE_CODE (base) != MEM_REF
1177       /* If this is a varying address, punt.  */
1178       || max_size == -1
1179       || max_size != size)
1180     return;
1181   offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1182   ssa = TREE_OPERAND (base, 0);
1183   if (TREE_CODE (ssa) != SSA_NAME
1184       || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1185       || offset < 0)
1186     return;
1187 
1188   /* Dynamic types are changed in constructors and destructors.  */
1189   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1190   if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1191     ipa_set_ancestor_jf (jfunc, offset,  index,
1192 			 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1193 }
1194 
1195 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1196    it looks like:
1197 
1198    iftmp.1_3 = &obj_2(D)->D.1762;
1199 
1200    The base of the MEM_REF must be a default definition SSA NAME of a
1201    parameter.  Return NULL_TREE if it looks otherwise.  If case of success, the
1202    whole MEM_REF expression is returned and the offset calculated from any
1203    handled components and the MEM_REF itself is stored into *OFFSET.  The whole
1204    RHS stripped off the ADDR_EXPR is stored into *OBJ_P.  */
1205 
1206 static tree
1207 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1208 {
1209   HOST_WIDE_INT size, max_size;
1210   tree expr, parm, obj;
1211 
1212   if (!gimple_assign_single_p (assign))
1213     return NULL_TREE;
1214   expr = gimple_assign_rhs1 (assign);
1215 
1216   if (TREE_CODE (expr) != ADDR_EXPR)
1217     return NULL_TREE;
1218   expr = TREE_OPERAND (expr, 0);
1219   obj = expr;
1220   expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1221 
1222   if (TREE_CODE (expr) != MEM_REF
1223       /* If this is a varying address, punt.  */
1224       || max_size == -1
1225       || max_size != size
1226       || *offset < 0)
1227     return NULL_TREE;
1228   parm = TREE_OPERAND (expr, 0);
1229   if (TREE_CODE (parm) != SSA_NAME
1230       || !SSA_NAME_IS_DEFAULT_DEF (parm)
1231       || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1232     return NULL_TREE;
1233 
1234   *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1235   *obj_p = obj;
1236   return expr;
1237 }
1238 
1239 
1240 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1241    statement PHI, try to find out whether NAME is in fact a
1242    multiple-inheritance typecast from a descendant into an ancestor of a formal
1243    parameter and thus can be described by an ancestor jump function and if so,
1244    write the appropriate function into JFUNC.
1245 
1246    Essentially we want to match the following pattern:
1247 
1248      if (obj_2(D) != 0B)
1249        goto <bb 3>;
1250      else
1251        goto <bb 4>;
1252 
1253    <bb 3>:
1254      iftmp.1_3 = &obj_2(D)->D.1762;
1255 
1256    <bb 4>:
1257      # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1258      D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1259      return D.1879_6;  */
1260 
1261 static void
1262 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1263 				    struct ipa_node_params *info,
1264 				    struct ipa_jump_func *jfunc,
1265 				    gcall *call, gphi *phi)
1266 {
1267   HOST_WIDE_INT offset;
1268   gimple assign, cond;
1269   basic_block phi_bb, assign_bb, cond_bb;
1270   tree tmp, parm, expr, obj;
1271   int index, i;
1272 
1273   if (gimple_phi_num_args (phi) != 2)
1274     return;
1275 
1276   if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1277     tmp = PHI_ARG_DEF (phi, 0);
1278   else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1279     tmp = PHI_ARG_DEF (phi, 1);
1280   else
1281     return;
1282   if (TREE_CODE (tmp) != SSA_NAME
1283       || SSA_NAME_IS_DEFAULT_DEF (tmp)
1284       || !POINTER_TYPE_P (TREE_TYPE (tmp))
1285       || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1286     return;
1287 
1288   assign = SSA_NAME_DEF_STMT (tmp);
1289   assign_bb = gimple_bb (assign);
1290   if (!single_pred_p (assign_bb))
1291     return;
1292   expr = get_ancestor_addr_info (assign, &obj, &offset);
1293   if (!expr)
1294     return;
1295   parm = TREE_OPERAND (expr, 0);
1296   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1297   if (index < 0)
1298     return;
1299 
1300   cond_bb = single_pred (assign_bb);
1301   cond = last_stmt (cond_bb);
1302   if (!cond
1303       || gimple_code (cond) != GIMPLE_COND
1304       || gimple_cond_code (cond) != NE_EXPR
1305       || gimple_cond_lhs (cond) != parm
1306       || !integer_zerop (gimple_cond_rhs (cond)))
1307     return;
1308 
1309   phi_bb = gimple_bb (phi);
1310   for (i = 0; i < 2; i++)
1311     {
1312       basic_block pred = EDGE_PRED (phi_bb, i)->src;
1313       if (pred != assign_bb && pred != cond_bb)
1314 	return;
1315     }
1316 
1317   ipa_set_ancestor_jf (jfunc, offset, index,
1318 		       parm_ref_data_pass_through_p (fbi, index, call, parm));
1319 }
1320 
1321 /* Inspect the given TYPE and return true iff it has the same structure (the
1322    same number of fields of the same types) as a C++ member pointer.  If
1323    METHOD_PTR and DELTA are non-NULL, store the trees representing the
1324    corresponding fields there.  */
1325 
1326 static bool
1327 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1328 {
1329   tree fld;
1330 
1331   if (TREE_CODE (type) != RECORD_TYPE)
1332     return false;
1333 
1334   fld = TYPE_FIELDS (type);
1335   if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1336       || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1337       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1338     return false;
1339 
1340   if (method_ptr)
1341     *method_ptr = fld;
1342 
1343   fld = DECL_CHAIN (fld);
1344   if (!fld || INTEGRAL_TYPE_P (fld)
1345       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1346     return false;
1347   if (delta)
1348     *delta = fld;
1349 
1350   if (DECL_CHAIN (fld))
1351     return false;
1352 
1353   return true;
1354 }
1355 
1356 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1357    return the rhs of its defining statement.  Otherwise return RHS as it
1358    is.  */
1359 
1360 static inline tree
1361 get_ssa_def_if_simple_copy (tree rhs)
1362 {
1363   while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1364     {
1365       gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1366 
1367       if (gimple_assign_single_p (def_stmt))
1368 	rhs = gimple_assign_rhs1 (def_stmt);
1369       else
1370 	break;
1371     }
1372   return rhs;
1373 }
1374 
1375 /* Simple linked list, describing known contents of an aggregate beforere
1376    call.  */
1377 
1378 struct ipa_known_agg_contents_list
1379 {
1380   /* Offset and size of the described part of the aggregate.  */
1381   HOST_WIDE_INT offset, size;
1382   /* Known constant value or NULL if the contents is known to be unknown.  */
1383   tree constant;
1384   /* Pointer to the next structure in the list.  */
1385   struct ipa_known_agg_contents_list *next;
1386 };
1387 
1388 /* Find the proper place in linked list of ipa_known_agg_contents_list
1389    structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1390    unless there is a partial overlap, in which case return NULL, or such
1391    element is already there, in which case set *ALREADY_THERE to true.  */
1392 
1393 static struct ipa_known_agg_contents_list **
1394 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1395 				HOST_WIDE_INT lhs_offset,
1396 				HOST_WIDE_INT lhs_size,
1397 				bool *already_there)
1398 {
1399   struct ipa_known_agg_contents_list **p = list;
1400   while (*p && (*p)->offset < lhs_offset)
1401     {
1402       if ((*p)->offset + (*p)->size > lhs_offset)
1403 	return NULL;
1404       p = &(*p)->next;
1405     }
1406 
1407   if (*p && (*p)->offset < lhs_offset + lhs_size)
1408     {
1409       if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1410 	/* We already know this value is subsequently overwritten with
1411 	   something else.  */
1412 	*already_there = true;
1413       else
1414 	/* Otherwise this is a partial overlap which we cannot
1415 	   represent.  */
1416 	return NULL;
1417     }
1418   return p;
1419 }
1420 
1421 /* Build aggregate jump function from LIST, assuming there are exactly
1422    CONST_COUNT constant entries there and that th offset of the passed argument
1423    is ARG_OFFSET and store it into JFUNC.  */
1424 
1425 static void
1426 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1427 			       int const_count, HOST_WIDE_INT arg_offset,
1428 			       struct ipa_jump_func *jfunc)
1429 {
1430   vec_alloc (jfunc->agg.items, const_count);
1431   while (list)
1432     {
1433       if (list->constant)
1434 	{
1435 	  struct ipa_agg_jf_item item;
1436 	  item.offset = list->offset - arg_offset;
1437 	  gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1438 	  item.value = unshare_expr_without_location (list->constant);
1439 	  jfunc->agg.items->quick_push (item);
1440 	}
1441       list = list->next;
1442     }
1443 }
1444 
1445 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1446    in ARG is filled in with constant values.  ARG can either be an aggregate
1447    expression or a pointer to an aggregate.  ARG_TYPE is the type of the
1448    aggregate.  JFUNC is the jump function into which the constants are
1449    subsequently stored.  */
1450 
1451 static void
1452 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1453 					 tree arg_type,
1454 					 struct ipa_jump_func *jfunc)
1455 {
1456   struct ipa_known_agg_contents_list *list = NULL;
1457   int item_count = 0, const_count = 0;
1458   HOST_WIDE_INT arg_offset, arg_size;
1459   gimple_stmt_iterator gsi;
1460   tree arg_base;
1461   bool check_ref, by_ref;
1462   ao_ref r;
1463 
1464   if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1465     return;
1466 
1467   /* The function operates in three stages.  First, we prepare check_ref, r,
1468      arg_base and arg_offset based on what is actually passed as an actual
1469      argument.  */
1470 
1471   if (POINTER_TYPE_P (arg_type))
1472     {
1473       by_ref = true;
1474       if (TREE_CODE (arg) == SSA_NAME)
1475 	{
1476 	  tree type_size;
1477           if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1478             return;
1479 	  check_ref = true;
1480 	  arg_base = arg;
1481 	  arg_offset = 0;
1482 	  type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1483 	  arg_size = tree_to_uhwi (type_size);
1484 	  ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1485 	}
1486       else if (TREE_CODE (arg) == ADDR_EXPR)
1487 	{
1488 	  HOST_WIDE_INT arg_max_size;
1489 
1490 	  arg = TREE_OPERAND (arg, 0);
1491 	  arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1492 					  &arg_max_size);
1493 	  if (arg_max_size == -1
1494 	      || arg_max_size != arg_size
1495 	      || arg_offset < 0)
1496 	    return;
1497 	  if (DECL_P (arg_base))
1498 	    {
1499 	      check_ref = false;
1500 	      ao_ref_init (&r, arg_base);
1501 	    }
1502 	  else
1503 	    return;
1504 	}
1505       else
1506 	return;
1507     }
1508   else
1509     {
1510       HOST_WIDE_INT arg_max_size;
1511 
1512       gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1513 
1514       by_ref = false;
1515       check_ref = false;
1516       arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1517 					  &arg_max_size);
1518       if (arg_max_size == -1
1519 	  || arg_max_size != arg_size
1520 	  || arg_offset < 0)
1521 	return;
1522 
1523       ao_ref_init (&r, arg);
1524     }
1525 
1526   /* Second stage walks back the BB, looks at individual statements and as long
1527      as it is confident of how the statements affect contents of the
1528      aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1529      describing it.  */
1530   gsi = gsi_for_stmt (call);
1531   gsi_prev (&gsi);
1532   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1533     {
1534       struct ipa_known_agg_contents_list *n, **p;
1535       gimple stmt = gsi_stmt (gsi);
1536       HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1537       tree lhs, rhs, lhs_base;
1538 
1539       if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1540 	continue;
1541       if (!gimple_assign_single_p (stmt))
1542 	break;
1543 
1544       lhs = gimple_assign_lhs (stmt);
1545       rhs = gimple_assign_rhs1 (stmt);
1546       if (!is_gimple_reg_type (TREE_TYPE (rhs))
1547 	  || TREE_CODE (lhs) == BIT_FIELD_REF
1548 	  || contains_bitfld_component_ref_p (lhs))
1549 	break;
1550 
1551       lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1552 					  &lhs_max_size);
1553       if (lhs_max_size == -1
1554 	  || lhs_max_size != lhs_size)
1555 	break;
1556 
1557       if (check_ref)
1558 	{
1559 	  if (TREE_CODE (lhs_base) != MEM_REF
1560 	      || TREE_OPERAND (lhs_base, 0) != arg_base
1561 	      || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1562 	    break;
1563 	}
1564       else if (lhs_base != arg_base)
1565 	{
1566 	  if (DECL_P (lhs_base))
1567 	    continue;
1568 	  else
1569 	    break;
1570 	}
1571 
1572       bool already_there = false;
1573       p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1574 					  &already_there);
1575       if (!p)
1576 	break;
1577       if (already_there)
1578 	continue;
1579 
1580       rhs = get_ssa_def_if_simple_copy (rhs);
1581       n = XALLOCA (struct ipa_known_agg_contents_list);
1582       n->size = lhs_size;
1583       n->offset = lhs_offset;
1584       if (is_gimple_ip_invariant (rhs))
1585 	{
1586 	  n->constant = rhs;
1587 	  const_count++;
1588 	}
1589       else
1590 	n->constant = NULL_TREE;
1591       n->next = *p;
1592       *p = n;
1593 
1594       item_count++;
1595       if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1596 	  || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1597 	break;
1598     }
1599 
1600   /* Third stage just goes over the list and creates an appropriate vector of
1601      ipa_agg_jf_item structures out of it, of sourse only if there are
1602      any known constants to begin with.  */
1603 
1604   if (const_count)
1605     {
1606       jfunc->agg.by_ref = by_ref;
1607       build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1608     }
1609 }
1610 
1611 static tree
1612 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1613 {
1614   int n;
1615   tree type = (e->callee
1616 	       ? TREE_TYPE (e->callee->decl)
1617 	       : gimple_call_fntype (e->call_stmt));
1618   tree t = TYPE_ARG_TYPES (type);
1619 
1620   for (n = 0; n < i; n++)
1621     {
1622       if (!t)
1623         break;
1624       t = TREE_CHAIN (t);
1625     }
1626   if (t)
1627     return TREE_VALUE (t);
1628   if (!e->callee)
1629     return NULL;
1630   t = DECL_ARGUMENTS (e->callee->decl);
1631   for (n = 0; n < i; n++)
1632     {
1633       if (!t)
1634 	return NULL;
1635       t = TREE_CHAIN (t);
1636     }
1637   if (t)
1638     return TREE_TYPE (t);
1639   return NULL;
1640 }
1641 
1642 /* Compute jump function for all arguments of callsite CS and insert the
1643    information in the jump_functions array in the ipa_edge_args corresponding
1644    to this callsite.  */
1645 
1646 static void
1647 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1648 				     struct cgraph_edge *cs)
1649 {
1650   struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1651   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1652   gcall *call = cs->call_stmt;
1653   int n, arg_num = gimple_call_num_args (call);
1654   bool useful_context = false;
1655 
1656   if (arg_num == 0 || args->jump_functions)
1657     return;
1658   vec_safe_grow_cleared (args->jump_functions, arg_num);
1659   if (flag_devirtualize)
1660     vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1661 
1662   if (gimple_call_internal_p (call))
1663     return;
1664   if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1665     return;
1666 
1667   for (n = 0; n < arg_num; n++)
1668     {
1669       struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1670       tree arg = gimple_call_arg (call, n);
1671       tree param_type = ipa_get_callee_param_type (cs, n);
1672       if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1673 	{
1674 	  tree instance;
1675 	  struct ipa_polymorphic_call_context context (cs->caller->decl,
1676 						       arg, cs->call_stmt,
1677 						       &instance);
1678 	  context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1679 	  *ipa_get_ith_polymorhic_call_context (args, n) = context;
1680 	  if (!context.useless_p ())
1681 	    useful_context = true;
1682 	}
1683 
1684       if (POINTER_TYPE_P (TREE_TYPE(arg)))
1685 	{
1686 	  unsigned HOST_WIDE_INT hwi_bitpos;
1687 	  unsigned align;
1688 
1689 	  get_pointer_alignment_1 (arg, &align, &hwi_bitpos);
1690 	  if (align > BITS_PER_UNIT
1691 	      && align % BITS_PER_UNIT == 0
1692 	      && hwi_bitpos % BITS_PER_UNIT == 0)
1693 	    {
1694 	      jfunc->alignment.known = true;
1695 	      jfunc->alignment.align = align / BITS_PER_UNIT;
1696 	      jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1697 	    }
1698 	  else
1699 	    gcc_assert (!jfunc->alignment.known);
1700 	}
1701       else
1702 	gcc_assert (!jfunc->alignment.known);
1703 
1704       if (is_gimple_ip_invariant (arg))
1705 	ipa_set_jf_constant (jfunc, arg, cs);
1706       else if (!is_gimple_reg_type (TREE_TYPE (arg))
1707 	       && TREE_CODE (arg) == PARM_DECL)
1708 	{
1709 	  int index = ipa_get_param_decl_index (info, arg);
1710 
1711 	  gcc_assert (index >=0);
1712 	  /* Aggregate passed by value, check for pass-through, otherwise we
1713 	     will attempt to fill in aggregate contents later in this
1714 	     for cycle.  */
1715 	  if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1716 	    {
1717 	      ipa_set_jf_simple_pass_through (jfunc, index, false);
1718 	      continue;
1719 	    }
1720 	}
1721       else if (TREE_CODE (arg) == SSA_NAME)
1722 	{
1723 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
1724 	    {
1725 	      int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1726 	      if (index >= 0)
1727 		{
1728 		  bool agg_p;
1729 		  agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1730 		  ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1731 		}
1732 	    }
1733 	  else
1734 	    {
1735 	      gimple stmt = SSA_NAME_DEF_STMT (arg);
1736 	      if (is_gimple_assign (stmt))
1737 		compute_complex_assign_jump_func (fbi, info, jfunc,
1738 						  call, stmt, arg, param_type);
1739 	      else if (gimple_code (stmt) == GIMPLE_PHI)
1740 		compute_complex_ancestor_jump_func (fbi, info, jfunc,
1741 						    call,
1742 						    as_a <gphi *> (stmt));
1743 	    }
1744 	}
1745 
1746       /* If ARG is pointer, we can not use its type to determine the type of aggregate
1747 	 passed (because type conversions are ignored in gimple).  Usually we can
1748 	 safely get type from function declaration, but in case of K&R prototypes or
1749 	 variadic functions we can try our luck with type of the pointer passed.
1750 	 TODO: Since we look for actual initialization of the memory object, we may better
1751 	 work out the type based on the memory stores we find.  */
1752       if (!param_type)
1753 	param_type = TREE_TYPE (arg);
1754 
1755       if ((jfunc->type != IPA_JF_PASS_THROUGH
1756 	      || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1757 	  && (jfunc->type != IPA_JF_ANCESTOR
1758 	      || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1759 	  && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1760 	      || POINTER_TYPE_P (param_type)))
1761 	determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1762     }
1763   if (!useful_context)
1764     vec_free (args->polymorphic_call_contexts);
1765 }
1766 
1767 /* Compute jump functions for all edges - both direct and indirect - outgoing
1768    from BB.  */
1769 
1770 static void
1771 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1772 {
1773   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1774   int i;
1775   struct cgraph_edge *cs;
1776 
1777   FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1778     {
1779       struct cgraph_node *callee = cs->callee;
1780 
1781       if (callee)
1782 	{
1783 	  callee->ultimate_alias_target ();
1784 	  /* We do not need to bother analyzing calls to unknown functions
1785 	     unless they may become known during lto/whopr.  */
1786 	  if (!callee->definition && !flag_lto)
1787 	    continue;
1788 	}
1789       ipa_compute_jump_functions_for_edge (fbi, cs);
1790     }
1791 }
1792 
1793 /* If STMT looks like a statement loading a value from a member pointer formal
1794    parameter, return that parameter and store the offset of the field to
1795    *OFFSET_P, if it is non-NULL.  Otherwise return NULL (but *OFFSET_P still
1796    might be clobbered).  If USE_DELTA, then we look for a use of the delta
1797    field rather than the pfn.  */
1798 
1799 static tree
1800 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1801 				    HOST_WIDE_INT *offset_p)
1802 {
1803   tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1804 
1805   if (!gimple_assign_single_p (stmt))
1806     return NULL_TREE;
1807 
1808   rhs = gimple_assign_rhs1 (stmt);
1809   if (TREE_CODE (rhs) == COMPONENT_REF)
1810     {
1811       ref_field = TREE_OPERAND (rhs, 1);
1812       rhs = TREE_OPERAND (rhs, 0);
1813     }
1814   else
1815     ref_field = NULL_TREE;
1816   if (TREE_CODE (rhs) != MEM_REF)
1817     return NULL_TREE;
1818   rec = TREE_OPERAND (rhs, 0);
1819   if (TREE_CODE (rec) != ADDR_EXPR)
1820     return NULL_TREE;
1821   rec = TREE_OPERAND (rec, 0);
1822   if (TREE_CODE (rec) != PARM_DECL
1823       || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1824     return NULL_TREE;
1825   ref_offset = TREE_OPERAND (rhs, 1);
1826 
1827   if (use_delta)
1828     fld = delta_field;
1829   else
1830     fld = ptr_field;
1831   if (offset_p)
1832     *offset_p = int_bit_position (fld);
1833 
1834   if (ref_field)
1835     {
1836       if (integer_nonzerop (ref_offset))
1837 	return NULL_TREE;
1838       return ref_field == fld ? rec : NULL_TREE;
1839     }
1840   else
1841     return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1842       : NULL_TREE;
1843 }
1844 
1845 /* Returns true iff T is an SSA_NAME defined by a statement.  */
1846 
1847 static bool
1848 ipa_is_ssa_with_stmt_def (tree t)
1849 {
1850   if (TREE_CODE (t) == SSA_NAME
1851       && !SSA_NAME_IS_DEFAULT_DEF (t))
1852     return true;
1853   else
1854     return false;
1855 }
1856 
1857 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1858    call to a parameter number PARAM_INDEX.  NODE is the caller.  Return the
1859    indirect call graph edge.  */
1860 
1861 static struct cgraph_edge *
1862 ipa_note_param_call (struct cgraph_node *node, int param_index,
1863 		     gcall *stmt)
1864 {
1865   struct cgraph_edge *cs;
1866 
1867   cs = node->get_edge (stmt);
1868   cs->indirect_info->param_index = param_index;
1869   cs->indirect_info->agg_contents = 0;
1870   cs->indirect_info->member_ptr = 0;
1871   return cs;
1872 }
1873 
1874 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1875    (described by INFO).  PARMS_AINFO is a pointer to a vector containing
1876    intermediate information about each formal parameter.  Currently it checks
1877    whether the call calls a pointer that is a formal parameter and if so, the
1878    parameter is marked with the called flag and an indirect call graph edge
1879    describing the call is created.  This is very simple for ordinary pointers
1880    represented in SSA but not-so-nice when it comes to member pointers.  The
1881    ugly part of this function does nothing more than trying to match the
1882    pattern of such a call.  An example of such a pattern is the gimple dump
1883    below, the call is on the last line:
1884 
1885      <bb 2>:
1886        f$__delta_5 = f.__delta;
1887        f$__pfn_24 = f.__pfn;
1888 
1889    or
1890      <bb 2>:
1891        f$__delta_5 = MEM[(struct  *)&f];
1892        f$__pfn_24 = MEM[(struct  *)&f + 4B];
1893 
1894    and a few lines below:
1895 
1896      <bb 5>
1897        D.2496_3 = (int) f$__pfn_24;
1898        D.2497_4 = D.2496_3 & 1;
1899        if (D.2497_4 != 0)
1900          goto <bb 3>;
1901        else
1902          goto <bb 4>;
1903 
1904      <bb 6>:
1905        D.2500_7 = (unsigned int) f$__delta_5;
1906        D.2501_8 = &S + D.2500_7;
1907        D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1908        D.2503_10 = *D.2502_9;
1909        D.2504_12 = f$__pfn_24 + -1;
1910        D.2505_13 = (unsigned int) D.2504_12;
1911        D.2506_14 = D.2503_10 + D.2505_13;
1912        D.2507_15 = *D.2506_14;
1913        iftmp.11_16 = (String:: *) D.2507_15;
1914 
1915      <bb 7>:
1916        # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1917        D.2500_19 = (unsigned int) f$__delta_5;
1918        D.2508_20 = &S + D.2500_19;
1919        D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1920 
1921    Such patterns are results of simple calls to a member pointer:
1922 
1923      int doprinting (int (MyString::* f)(int) const)
1924      {
1925        MyString S ("somestring");
1926 
1927        return (S.*f)(4);
1928      }
1929 
1930    Moreover, the function also looks for called pointers loaded from aggregates
1931    passed by value or reference.  */
1932 
1933 static void
1934 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1935 				tree target)
1936 {
1937   struct ipa_node_params *info = fbi->info;
1938   HOST_WIDE_INT offset;
1939   bool by_ref;
1940 
1941   if (SSA_NAME_IS_DEFAULT_DEF (target))
1942     {
1943       tree var = SSA_NAME_VAR (target);
1944       int index = ipa_get_param_decl_index (info, var);
1945       if (index >= 0)
1946 	ipa_note_param_call (fbi->node, index, call);
1947       return;
1948     }
1949 
1950   int index;
1951   gimple def = SSA_NAME_DEF_STMT (target);
1952   if (gimple_assign_single_p (def)
1953       && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1954 				 gimple_assign_rhs1 (def), &index, &offset,
1955 				 NULL, &by_ref))
1956     {
1957       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1958       cs->indirect_info->offset = offset;
1959       cs->indirect_info->agg_contents = 1;
1960       cs->indirect_info->by_ref = by_ref;
1961       return;
1962     }
1963 
1964   /* Now we need to try to match the complex pattern of calling a member
1965      pointer. */
1966   if (gimple_code (def) != GIMPLE_PHI
1967       || gimple_phi_num_args (def) != 2
1968       || !POINTER_TYPE_P (TREE_TYPE (target))
1969       || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1970     return;
1971 
1972   /* First, we need to check whether one of these is a load from a member
1973      pointer that is a parameter to this function. */
1974   tree n1 = PHI_ARG_DEF (def, 0);
1975   tree n2 = PHI_ARG_DEF (def, 1);
1976   if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1977     return;
1978   gimple d1 = SSA_NAME_DEF_STMT (n1);
1979   gimple d2 = SSA_NAME_DEF_STMT (n2);
1980 
1981   tree rec;
1982   basic_block bb, virt_bb;
1983   basic_block join = gimple_bb (def);
1984   if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1985     {
1986       if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1987 	return;
1988 
1989       bb = EDGE_PRED (join, 0)->src;
1990       virt_bb = gimple_bb (d2);
1991     }
1992   else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1993     {
1994       bb = EDGE_PRED (join, 1)->src;
1995       virt_bb = gimple_bb (d1);
1996     }
1997   else
1998     return;
1999 
2000   /* Second, we need to check that the basic blocks are laid out in the way
2001      corresponding to the pattern. */
2002 
2003   if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2004       || single_pred (virt_bb) != bb
2005       || single_succ (virt_bb) != join)
2006     return;
2007 
2008   /* Third, let's see that the branching is done depending on the least
2009      significant bit of the pfn. */
2010 
2011   gimple branch = last_stmt (bb);
2012   if (!branch || gimple_code (branch) != GIMPLE_COND)
2013     return;
2014 
2015   if ((gimple_cond_code (branch) != NE_EXPR
2016        && gimple_cond_code (branch) != EQ_EXPR)
2017       || !integer_zerop (gimple_cond_rhs (branch)))
2018     return;
2019 
2020   tree cond = gimple_cond_lhs (branch);
2021   if (!ipa_is_ssa_with_stmt_def (cond))
2022     return;
2023 
2024   def = SSA_NAME_DEF_STMT (cond);
2025   if (!is_gimple_assign (def)
2026       || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2027       || !integer_onep (gimple_assign_rhs2 (def)))
2028     return;
2029 
2030   cond = gimple_assign_rhs1 (def);
2031   if (!ipa_is_ssa_with_stmt_def (cond))
2032     return;
2033 
2034   def = SSA_NAME_DEF_STMT (cond);
2035 
2036   if (is_gimple_assign (def)
2037       && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2038     {
2039       cond = gimple_assign_rhs1 (def);
2040       if (!ipa_is_ssa_with_stmt_def (cond))
2041 	return;
2042       def = SSA_NAME_DEF_STMT (cond);
2043     }
2044 
2045   tree rec2;
2046   rec2 = ipa_get_stmt_member_ptr_load_param (def,
2047 					     (TARGET_PTRMEMFUNC_VBIT_LOCATION
2048 					      == ptrmemfunc_vbit_in_delta),
2049 					     NULL);
2050   if (rec != rec2)
2051     return;
2052 
2053   index = ipa_get_param_decl_index (info, rec);
2054   if (index >= 0
2055       && parm_preserved_before_stmt_p (fbi, index, call, rec))
2056     {
2057       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2058       cs->indirect_info->offset = offset;
2059       cs->indirect_info->agg_contents = 1;
2060       cs->indirect_info->member_ptr = 1;
2061     }
2062 
2063   return;
2064 }
2065 
2066 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2067    object referenced in the expression is a formal parameter of the caller
2068    FBI->node (described by FBI->info), create a call note for the
2069    statement.  */
2070 
2071 static void
2072 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2073 			       gcall *call, tree target)
2074 {
2075   tree obj = OBJ_TYPE_REF_OBJECT (target);
2076   int index;
2077   HOST_WIDE_INT anc_offset;
2078 
2079   if (!flag_devirtualize)
2080     return;
2081 
2082   if (TREE_CODE (obj) != SSA_NAME)
2083     return;
2084 
2085   struct ipa_node_params *info = fbi->info;
2086   if (SSA_NAME_IS_DEFAULT_DEF (obj))
2087     {
2088       struct ipa_jump_func jfunc;
2089       if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2090 	return;
2091 
2092       anc_offset = 0;
2093       index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2094       gcc_assert (index >= 0);
2095       if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2096 				  call, &jfunc))
2097 	return;
2098     }
2099   else
2100     {
2101       struct ipa_jump_func jfunc;
2102       gimple stmt = SSA_NAME_DEF_STMT (obj);
2103       tree expr;
2104 
2105       expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2106       if (!expr)
2107 	return;
2108       index = ipa_get_param_decl_index (info,
2109 					SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2110       gcc_assert (index >= 0);
2111       if (detect_type_change (obj, expr, obj_type_ref_class (target),
2112 			      call, &jfunc, anc_offset))
2113 	return;
2114     }
2115 
2116   struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2117   struct cgraph_indirect_call_info *ii = cs->indirect_info;
2118   ii->offset = anc_offset;
2119   ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2120   ii->otr_type = obj_type_ref_class (target);
2121   ii->polymorphic = 1;
2122 }
2123 
2124 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2125    of the caller (described by INFO).  PARMS_AINFO is a pointer to a vector
2126    containing intermediate information about each formal parameter.  */
2127 
2128 static void
2129 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2130 {
2131   tree target = gimple_call_fn (call);
2132 
2133   if (!target
2134       || (TREE_CODE (target) != SSA_NAME
2135           && !virtual_method_call_p (target)))
2136     return;
2137 
2138   struct cgraph_edge *cs = fbi->node->get_edge (call);
2139   /* If we previously turned the call into a direct call, there is
2140      no need to analyze.  */
2141   if (cs && !cs->indirect_unknown_callee)
2142     return;
2143 
2144   if (cs->indirect_info->polymorphic && flag_devirtualize)
2145     {
2146       tree instance;
2147       tree target = gimple_call_fn (call);
2148       ipa_polymorphic_call_context context (current_function_decl,
2149 					    target, call, &instance);
2150 
2151       gcc_checking_assert (cs->indirect_info->otr_type
2152 			   == obj_type_ref_class (target));
2153       gcc_checking_assert (cs->indirect_info->otr_token
2154 			   == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2155 
2156       cs->indirect_info->vptr_changed
2157 	= !context.get_dynamic_type (instance,
2158 				     OBJ_TYPE_REF_OBJECT (target),
2159 				     obj_type_ref_class (target), call);
2160       cs->indirect_info->context = context;
2161     }
2162 
2163   if (TREE_CODE (target) == SSA_NAME)
2164     ipa_analyze_indirect_call_uses (fbi, call, target);
2165   else if (virtual_method_call_p (target))
2166     ipa_analyze_virtual_call_uses (fbi, call, target);
2167 }
2168 
2169 
2170 /* Analyze the call statement STMT with respect to formal parameters (described
2171    in INFO) of caller given by FBI->NODE.  Currently it only checks whether
2172    formal parameters are called.  */
2173 
2174 static void
2175 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple stmt)
2176 {
2177   if (is_gimple_call (stmt))
2178     ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2179 }
2180 
2181 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2182    If OP is a parameter declaration, mark it as used in the info structure
2183    passed in DATA.  */
2184 
2185 static bool
2186 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2187 {
2188   struct ipa_node_params *info = (struct ipa_node_params *) data;
2189 
2190   op = get_base_address (op);
2191   if (op
2192       && TREE_CODE (op) == PARM_DECL)
2193     {
2194       int index = ipa_get_param_decl_index (info, op);
2195       gcc_assert (index >= 0);
2196       ipa_set_param_used (info, index, true);
2197     }
2198 
2199   return false;
2200 }
2201 
2202 /* Scan the statements in BB and inspect the uses of formal parameters.  Store
2203    the findings in various structures of the associated ipa_node_params
2204    structure, such as parameter flags, notes etc.  FBI holds various data about
2205    the function being analyzed.  */
2206 
2207 static void
2208 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2209 {
2210   gimple_stmt_iterator gsi;
2211   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2212     {
2213       gimple stmt = gsi_stmt (gsi);
2214 
2215       if (is_gimple_debug (stmt))
2216 	continue;
2217 
2218       ipa_analyze_stmt_uses (fbi, stmt);
2219       walk_stmt_load_store_addr_ops (stmt, fbi->info,
2220 				     visit_ref_for_mod_analysis,
2221 				     visit_ref_for_mod_analysis,
2222 				     visit_ref_for_mod_analysis);
2223     }
2224   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2225     walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2226 				   visit_ref_for_mod_analysis,
2227 				   visit_ref_for_mod_analysis,
2228 				   visit_ref_for_mod_analysis);
2229 }
2230 
2231 /* Calculate controlled uses of parameters of NODE.  */
2232 
2233 static void
2234 ipa_analyze_controlled_uses (struct cgraph_node *node)
2235 {
2236   struct ipa_node_params *info = IPA_NODE_REF (node);
2237 
2238   for (int i = 0; i < ipa_get_param_count (info); i++)
2239     {
2240       tree parm = ipa_get_param (info, i);
2241       int controlled_uses = 0;
2242 
2243       /* For SSA regs see if parameter is used.  For non-SSA we compute
2244 	 the flag during modification analysis.  */
2245       if (is_gimple_reg (parm))
2246 	{
2247 	  tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2248 				       parm);
2249 	  if (ddef && !has_zero_uses (ddef))
2250 	    {
2251 	      imm_use_iterator imm_iter;
2252 	      use_operand_p use_p;
2253 
2254 	      ipa_set_param_used (info, i, true);
2255 	      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2256 		if (!is_gimple_call (USE_STMT (use_p)))
2257 		  {
2258 		    if (!is_gimple_debug (USE_STMT (use_p)))
2259 		      {
2260 			controlled_uses = IPA_UNDESCRIBED_USE;
2261 			break;
2262 		      }
2263 		  }
2264 		else
2265 		  controlled_uses++;
2266 	    }
2267 	  else
2268 	    controlled_uses = 0;
2269 	}
2270       else
2271 	controlled_uses = IPA_UNDESCRIBED_USE;
2272       ipa_set_controlled_uses (info, i, controlled_uses);
2273     }
2274 }
2275 
2276 /* Free stuff in BI.  */
2277 
2278 static void
2279 free_ipa_bb_info (struct ipa_bb_info *bi)
2280 {
2281   bi->cg_edges.release ();
2282   bi->param_aa_statuses.release ();
2283 }
2284 
2285 /* Dominator walker driving the analysis.  */
2286 
2287 class analysis_dom_walker : public dom_walker
2288 {
2289 public:
2290   analysis_dom_walker (struct ipa_func_body_info *fbi)
2291     : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2292 
2293   virtual void before_dom_children (basic_block);
2294 
2295 private:
2296   struct ipa_func_body_info *m_fbi;
2297 };
2298 
2299 void
2300 analysis_dom_walker::before_dom_children (basic_block bb)
2301 {
2302   ipa_analyze_params_uses_in_bb (m_fbi, bb);
2303   ipa_compute_jump_functions_for_bb (m_fbi, bb);
2304 }
2305 
2306 /* Initialize the array describing properties of of formal parameters
2307    of NODE, analyze their uses and compute jump functions associated
2308    with actual arguments of calls from within NODE.  */
2309 
2310 void
2311 ipa_analyze_node (struct cgraph_node *node)
2312 {
2313   struct ipa_func_body_info fbi;
2314   struct ipa_node_params *info;
2315 
2316   ipa_check_create_node_params ();
2317   ipa_check_create_edge_args ();
2318   info = IPA_NODE_REF (node);
2319 
2320   if (info->analysis_done)
2321     return;
2322   info->analysis_done = 1;
2323 
2324   if (ipa_func_spec_opts_forbid_analysis_p (node))
2325     {
2326       for (int i = 0; i < ipa_get_param_count (info); i++)
2327 	{
2328 	  ipa_set_param_used (info, i, true);
2329 	  ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2330 	}
2331       return;
2332     }
2333 
2334   struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2335   push_cfun (func);
2336   calculate_dominance_info (CDI_DOMINATORS);
2337   ipa_initialize_node_params (node);
2338   ipa_analyze_controlled_uses (node);
2339 
2340   fbi.node = node;
2341   fbi.info = IPA_NODE_REF (node);
2342   fbi.bb_infos = vNULL;
2343   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2344   fbi.param_count = ipa_get_param_count (info);
2345   fbi.aa_walked = 0;
2346 
2347   for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2348     {
2349       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2350       bi->cg_edges.safe_push (cs);
2351     }
2352 
2353   for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2354     {
2355       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2356       bi->cg_edges.safe_push (cs);
2357     }
2358 
2359   analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2360 
2361   int i;
2362   struct ipa_bb_info *bi;
2363   FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2364     free_ipa_bb_info (bi);
2365   fbi.bb_infos.release ();
2366   free_dominance_info (CDI_DOMINATORS);
2367   pop_cfun ();
2368 }
2369 
2370 /* Update the jump functions associated with call graph edge E when the call
2371    graph edge CS is being inlined, assuming that E->caller is already (possibly
2372    indirectly) inlined into CS->callee and that E has not been inlined.  */
2373 
2374 static void
2375 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2376 				      struct cgraph_edge *e)
2377 {
2378   struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2379   struct ipa_edge_args *args = IPA_EDGE_REF (e);
2380   int count = ipa_get_cs_argument_count (args);
2381   int i;
2382 
2383   for (i = 0; i < count; i++)
2384     {
2385       struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2386       struct ipa_polymorphic_call_context *dst_ctx
2387 	= ipa_get_ith_polymorhic_call_context (args, i);
2388 
2389       if (dst->type == IPA_JF_ANCESTOR)
2390 	{
2391 	  struct ipa_jump_func *src;
2392 	  int dst_fid = dst->value.ancestor.formal_id;
2393 	  struct ipa_polymorphic_call_context *src_ctx
2394 	    = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2395 
2396 	  /* Variable number of arguments can cause havoc if we try to access
2397 	     one that does not exist in the inlined edge.  So make sure we
2398 	     don't.  */
2399 	  if (dst_fid >= ipa_get_cs_argument_count (top))
2400 	    {
2401 	      ipa_set_jf_unknown (dst);
2402 	      continue;
2403 	    }
2404 
2405 	  src = ipa_get_ith_jump_func (top, dst_fid);
2406 
2407 	  if (src_ctx && !src_ctx->useless_p ())
2408 	    {
2409 	      struct ipa_polymorphic_call_context ctx = *src_ctx;
2410 
2411 	      /* TODO: Make type preserved safe WRT contexts.  */
2412 	      if (!ipa_get_jf_ancestor_type_preserved (dst))
2413 		ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2414 	      ctx.offset_by (dst->value.ancestor.offset);
2415 	      if (!ctx.useless_p ())
2416 		{
2417 		  if (!dst_ctx)
2418 		    {
2419 		      vec_safe_grow_cleared (args->polymorphic_call_contexts,
2420 					     count);
2421 		      dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2422 		    }
2423 
2424 		  dst_ctx->combine_with (ctx);
2425 		}
2426 	    }
2427 
2428 	  if (src->agg.items
2429 	      && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2430 	    {
2431 	      struct ipa_agg_jf_item *item;
2432 	      int j;
2433 
2434 	      /* Currently we do not produce clobber aggregate jump functions,
2435 		 replace with merging when we do.  */
2436 	      gcc_assert (!dst->agg.items);
2437 
2438 	      dst->agg.items = vec_safe_copy (src->agg.items);
2439 	      dst->agg.by_ref = src->agg.by_ref;
2440 	      FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2441 		item->offset -= dst->value.ancestor.offset;
2442 	    }
2443 
2444 	  if (src->type == IPA_JF_PASS_THROUGH
2445 	      && src->value.pass_through.operation == NOP_EXPR)
2446 	    {
2447 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2448 	      dst->value.ancestor.agg_preserved &=
2449 		src->value.pass_through.agg_preserved;
2450 	    }
2451 	  else if (src->type == IPA_JF_ANCESTOR)
2452 	    {
2453 	      dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2454 	      dst->value.ancestor.offset += src->value.ancestor.offset;
2455 	      dst->value.ancestor.agg_preserved &=
2456 		src->value.ancestor.agg_preserved;
2457 	    }
2458 	  else
2459 	    ipa_set_jf_unknown (dst);
2460 	}
2461       else if (dst->type == IPA_JF_PASS_THROUGH)
2462 	{
2463 	  struct ipa_jump_func *src;
2464 	  /* We must check range due to calls with variable number of arguments
2465 	     and we cannot combine jump functions with operations.  */
2466 	  if (dst->value.pass_through.operation == NOP_EXPR
2467 	      && (dst->value.pass_through.formal_id
2468 		  < ipa_get_cs_argument_count (top)))
2469 	    {
2470 	      int dst_fid = dst->value.pass_through.formal_id;
2471 	      src = ipa_get_ith_jump_func (top, dst_fid);
2472 	      bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2473 	      struct ipa_polymorphic_call_context *src_ctx
2474 		= ipa_get_ith_polymorhic_call_context (top, dst_fid);
2475 
2476 	      if (src_ctx && !src_ctx->useless_p ())
2477 		{
2478 		  struct ipa_polymorphic_call_context ctx = *src_ctx;
2479 
2480 		  /* TODO: Make type preserved safe WRT contexts.  */
2481 		  if (!ipa_get_jf_pass_through_type_preserved (dst))
2482 		    ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2483 		  if (!ctx.useless_p ())
2484 		    {
2485 		      if (!dst_ctx)
2486 			{
2487 			  vec_safe_grow_cleared (args->polymorphic_call_contexts,
2488 					         count);
2489 			  dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2490 			}
2491 		      dst_ctx->combine_with (ctx);
2492 		    }
2493 		}
2494 	      switch (src->type)
2495 		{
2496 		case IPA_JF_UNKNOWN:
2497 		  ipa_set_jf_unknown (dst);
2498 		  break;
2499 		case IPA_JF_CONST:
2500 		  ipa_set_jf_cst_copy (dst, src);
2501 		  break;
2502 
2503 		case IPA_JF_PASS_THROUGH:
2504 		  {
2505 		    int formal_id = ipa_get_jf_pass_through_formal_id (src);
2506 		    enum tree_code operation;
2507 		    operation = ipa_get_jf_pass_through_operation (src);
2508 
2509 		    if (operation == NOP_EXPR)
2510 		      {
2511 			bool agg_p;
2512 			agg_p = dst_agg_p
2513 			  && ipa_get_jf_pass_through_agg_preserved (src);
2514 			ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2515 		      }
2516 		    else
2517 		      {
2518 			tree operand = ipa_get_jf_pass_through_operand (src);
2519 			ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2520 						       operation);
2521 		      }
2522 		    break;
2523 		  }
2524 		case IPA_JF_ANCESTOR:
2525 		  {
2526 		    bool agg_p;
2527 		    agg_p = dst_agg_p
2528 		      && ipa_get_jf_ancestor_agg_preserved (src);
2529 		    ipa_set_ancestor_jf (dst,
2530 					 ipa_get_jf_ancestor_offset (src),
2531 					 ipa_get_jf_ancestor_formal_id (src),
2532 					 agg_p);
2533 		    break;
2534 		  }
2535 		default:
2536 		  gcc_unreachable ();
2537 		}
2538 
2539 	      if (src->agg.items
2540 		  && (dst_agg_p || !src->agg.by_ref))
2541 		{
2542 		  /* Currently we do not produce clobber aggregate jump
2543 		     functions, replace with merging when we do.  */
2544 		  gcc_assert (!dst->agg.items);
2545 
2546 		  dst->agg.by_ref = src->agg.by_ref;
2547 		  dst->agg.items = vec_safe_copy (src->agg.items);
2548 		}
2549 	    }
2550 	  else
2551 	    ipa_set_jf_unknown (dst);
2552 	}
2553     }
2554 }
2555 
2556 /* If TARGET is an addr_expr of a function declaration, make it the
2557    (SPECULATIVE)destination of an indirect edge IE and return the edge.
2558    Otherwise, return NULL.  */
2559 
2560 struct cgraph_edge *
2561 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2562 				bool speculative)
2563 {
2564   struct cgraph_node *callee;
2565   struct inline_edge_summary *es = inline_edge_summary (ie);
2566   bool unreachable = false;
2567 
2568   if (TREE_CODE (target) == ADDR_EXPR)
2569     target = TREE_OPERAND (target, 0);
2570   if (TREE_CODE (target) != FUNCTION_DECL)
2571     {
2572       target = canonicalize_constructor_val (target, NULL);
2573       if (!target || TREE_CODE (target) != FUNCTION_DECL)
2574 	{
2575 	  /* Member pointer call that goes through a VMT lookup.  */
2576 	  if (ie->indirect_info->member_ptr
2577 	      /* Or if target is not an invariant expression and we do not
2578 		 know if it will evaulate to function at runtime.
2579 		 This can happen when folding through &VAR, where &VAR
2580 		 is IP invariant, but VAR itself is not.
2581 
2582 		 TODO: Revisit this when GCC 5 is branched.  It seems that
2583 		 member_ptr check is not needed and that we may try to fold
2584 		 the expression and see if VAR is readonly.  */
2585 	      || !is_gimple_ip_invariant (target))
2586 	    {
2587 	      if (dump_enabled_p ())
2588 		{
2589 		  location_t loc = gimple_location_safe (ie->call_stmt);
2590 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2591 				   "discovered direct call non-invariant "
2592 				   "%s/%i\n",
2593 				   ie->caller->name (), ie->caller->order);
2594 		}
2595 	      return NULL;
2596 	    }
2597 
2598 
2599           if (dump_enabled_p ())
2600 	    {
2601 	      location_t loc = gimple_location_safe (ie->call_stmt);
2602 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2603 			       "discovered direct call to non-function in %s/%i, "
2604 			       "making it __builtin_unreachable\n",
2605 			       ie->caller->name (), ie->caller->order);
2606 	    }
2607 
2608 	  target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2609 	  callee = cgraph_node::get_create (target);
2610 	  unreachable = true;
2611 	}
2612       else
2613 	callee = cgraph_node::get (target);
2614     }
2615   else
2616     callee = cgraph_node::get (target);
2617 
2618   /* Because may-edges are not explicitely represented and vtable may be external,
2619      we may create the first reference to the object in the unit.  */
2620   if (!callee || callee->global.inlined_to)
2621     {
2622 
2623       /* We are better to ensure we can refer to it.
2624 	 In the case of static functions we are out of luck, since we already
2625 	 removed its body.  In the case of public functions we may or may
2626 	 not introduce the reference.  */
2627       if (!canonicalize_constructor_val (target, NULL)
2628 	  || !TREE_PUBLIC (target))
2629 	{
2630 	  if (dump_file)
2631 	    fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2632 		     "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2633 		     xstrdup_for_dump (ie->caller->name ()),
2634 		     ie->caller->order,
2635 		     xstrdup_for_dump (ie->callee->name ()),
2636 		     ie->callee->order);
2637 	  return NULL;
2638 	}
2639       callee = cgraph_node::get_create (target);
2640     }
2641 
2642   /* If the edge is already speculated.  */
2643   if (speculative && ie->speculative)
2644     {
2645       struct cgraph_edge *e2;
2646       struct ipa_ref *ref;
2647       ie->speculative_call_info (e2, ie, ref);
2648       if (e2->callee->ultimate_alias_target ()
2649 	  != callee->ultimate_alias_target ())
2650 	{
2651 	  if (dump_file)
2652 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2653 		     "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2654 		     xstrdup_for_dump (ie->caller->name ()),
2655 		     ie->caller->order,
2656 		     xstrdup_for_dump (callee->name ()),
2657 		     callee->order,
2658 		     xstrdup_for_dump (e2->callee->name ()),
2659 		     e2->callee->order);
2660 	}
2661       else
2662 	{
2663 	  if (dump_file)
2664 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2665 		     "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2666 		     xstrdup_for_dump (ie->caller->name ()),
2667 		     ie->caller->order,
2668 		     xstrdup_for_dump (callee->name ()),
2669 		     callee->order);
2670 	}
2671       return NULL;
2672     }
2673 
2674   if (!dbg_cnt (devirt))
2675     return NULL;
2676 
2677   ipa_check_create_node_params ();
2678 
2679   /* We can not make edges to inline clones.  It is bug that someone removed
2680      the cgraph node too early.  */
2681   gcc_assert (!callee->global.inlined_to);
2682 
2683   if (dump_file && !unreachable)
2684     {
2685       fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2686 	       "(%s/%i -> %s/%i), for stmt ",
2687 	       ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2688 	       speculative ? "speculative" : "known",
2689 	       xstrdup_for_dump (ie->caller->name ()),
2690 	       ie->caller->order,
2691 	       xstrdup_for_dump (callee->name ()),
2692 	       callee->order);
2693       if (ie->call_stmt)
2694 	print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2695       else
2696 	fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2697      }
2698   if (dump_enabled_p ())
2699     {
2700       location_t loc = gimple_location_safe (ie->call_stmt);
2701 
2702       dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2703 		       "converting indirect call in %s to direct call to %s\n",
2704 		       ie->caller->name (), callee->name ());
2705     }
2706   if (!speculative)
2707     {
2708       struct cgraph_edge *orig = ie;
2709       ie = ie->make_direct (callee);
2710       /* If we resolved speculative edge the cost is already up to date
2711 	 for direct call (adjusted by inline_edge_duplication_hook).  */
2712       if (ie == orig)
2713 	{
2714 	  es = inline_edge_summary (ie);
2715 	  es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2716 				 - eni_size_weights.call_cost);
2717 	  es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2718 				 - eni_time_weights.call_cost);
2719 	}
2720     }
2721   else
2722     {
2723       if (!callee->can_be_discarded_p ())
2724 	{
2725 	  cgraph_node *alias;
2726 	  alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2727 	  if (alias)
2728 	    callee = alias;
2729 	}
2730       /* make_speculative will update ie's cost to direct call cost. */
2731       ie = ie->make_speculative
2732 	     (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2733     }
2734 
2735   return ie;
2736 }
2737 
2738 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2739    return NULL if there is not any.  BY_REF specifies whether the value has to
2740    be passed by reference or by value.  */
2741 
2742 tree
2743 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2744 			    HOST_WIDE_INT offset, bool by_ref)
2745 {
2746   struct ipa_agg_jf_item *item;
2747   int i;
2748 
2749   if (by_ref != agg->by_ref)
2750     return NULL;
2751 
2752   FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2753     if (item->offset == offset)
2754       {
2755 	/* Currently we do not have clobber values, return NULL for them once
2756 	   we do.  */
2757 	gcc_checking_assert (is_gimple_ip_invariant (item->value));
2758 	return item->value;
2759       }
2760   return NULL;
2761 }
2762 
2763 /* Remove a reference to SYMBOL from the list of references of a node given by
2764    reference description RDESC.  Return true if the reference has been
2765    successfully found and removed.  */
2766 
2767 static bool
2768 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2769 {
2770   struct ipa_ref *to_del;
2771   struct cgraph_edge *origin;
2772 
2773   origin = rdesc->cs;
2774   if (!origin)
2775     return false;
2776   to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2777 					   origin->lto_stmt_uid);
2778   if (!to_del)
2779     return false;
2780 
2781   to_del->remove_reference ();
2782   if (dump_file)
2783     fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2784 	     xstrdup_for_dump (origin->caller->name ()),
2785 	     origin->caller->order, xstrdup_for_dump (symbol->name ()));
2786   return true;
2787 }
2788 
2789 /* If JFUNC has a reference description with refcount different from
2790    IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2791    NULL.  JFUNC must be a constant jump function.  */
2792 
2793 static struct ipa_cst_ref_desc *
2794 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2795 {
2796   struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2797   if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2798     return rdesc;
2799   else
2800     return NULL;
2801 }
2802 
2803 /* If the value of constant jump function JFUNC is an address of a function
2804    declaration, return the associated call graph node.  Otherwise return
2805    NULL.  */
2806 
2807 static cgraph_node *
2808 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2809 {
2810   gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2811   tree cst = ipa_get_jf_constant (jfunc);
2812   if (TREE_CODE (cst) != ADDR_EXPR
2813       || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2814     return NULL;
2815 
2816   return cgraph_node::get (TREE_OPERAND (cst, 0));
2817 }
2818 
2819 
2820 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2821    refcount and if it hits zero, remove reference to SYMBOL from the caller of
2822    the edge specified in the rdesc.  Return false if either the symbol or the
2823    reference could not be found, otherwise return true.  */
2824 
2825 static bool
2826 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2827 {
2828   struct ipa_cst_ref_desc *rdesc;
2829   if (jfunc->type == IPA_JF_CONST
2830       && (rdesc = jfunc_rdesc_usable (jfunc))
2831       && --rdesc->refcount == 0)
2832     {
2833       symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2834       if (!symbol)
2835 	return false;
2836 
2837       return remove_described_reference (symbol, rdesc);
2838     }
2839   return true;
2840 }
2841 
2842 /* Try to find a destination for indirect edge IE that corresponds to a simple
2843    call or a call of a member function pointer and where the destination is a
2844    pointer formal parameter described by jump function JFUNC.  If it can be
2845    determined, return the newly direct edge, otherwise return NULL.
2846    NEW_ROOT_INFO is the node info that JFUNC lattices are relative to.  */
2847 
2848 static struct cgraph_edge *
2849 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2850 				  struct ipa_jump_func *jfunc,
2851 				  struct ipa_node_params *new_root_info)
2852 {
2853   struct cgraph_edge *cs;
2854   tree target;
2855   bool agg_contents = ie->indirect_info->agg_contents;
2856 
2857   if (ie->indirect_info->agg_contents)
2858     target = ipa_find_agg_cst_for_param (&jfunc->agg,
2859 					 ie->indirect_info->offset,
2860 					 ie->indirect_info->by_ref);
2861   else
2862     target = ipa_value_from_jfunc (new_root_info, jfunc);
2863   if (!target)
2864     return NULL;
2865   cs = ipa_make_edge_direct_to_target (ie, target);
2866 
2867   if (cs && !agg_contents)
2868     {
2869       bool ok;
2870       gcc_checking_assert (cs->callee
2871 			   && (cs != ie
2872 			       || jfunc->type != IPA_JF_CONST
2873 			       || !cgraph_node_for_jfunc (jfunc)
2874 			       || cs->callee == cgraph_node_for_jfunc (jfunc)));
2875       ok = try_decrement_rdesc_refcount (jfunc);
2876       gcc_checking_assert (ok);
2877     }
2878 
2879   return cs;
2880 }
2881 
2882 /* Return the target to be used in cases of impossible devirtualization.  IE
2883    and target (the latter can be NULL) are dumped when dumping is enabled.  */
2884 
2885 tree
2886 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2887 {
2888   if (dump_file)
2889     {
2890       if (target)
2891 	fprintf (dump_file,
2892 		 "Type inconsistent devirtualization: %s/%i->%s\n",
2893 		 ie->caller->name (), ie->caller->order,
2894 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2895       else
2896 	fprintf (dump_file,
2897 		 "No devirtualization target in %s/%i\n",
2898 		 ie->caller->name (), ie->caller->order);
2899     }
2900   tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2901   cgraph_node::get_create (new_target);
2902   return new_target;
2903 }
2904 
2905 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2906    call based on a formal parameter which is described by jump function JFUNC
2907    and if it can be determined, make it direct and return the direct edge.
2908    Otherwise, return NULL.  CTX describes the polymorphic context that the
2909    parameter the call is based on brings along with it.  */
2910 
2911 static struct cgraph_edge *
2912 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2913 				   struct ipa_jump_func *jfunc,
2914 				   struct ipa_polymorphic_call_context ctx)
2915 {
2916   tree target = NULL;
2917   bool speculative = false;
2918 
2919   if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2920     return NULL;
2921 
2922   gcc_assert (!ie->indirect_info->by_ref);
2923 
2924   /* Try to do lookup via known virtual table pointer value.  */
2925   if (!ie->indirect_info->vptr_changed
2926       || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2927     {
2928       tree vtable;
2929       unsigned HOST_WIDE_INT offset;
2930       tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2931 					   ie->indirect_info->offset,
2932 					   true);
2933       if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2934 	{
2935 	  t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2936 						      vtable, offset);
2937 	  if (t)
2938 	    {
2939 	      if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2940 		   && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2941 		  || !possible_polymorphic_call_target_p
2942 		       (ie, cgraph_node::get (t)))
2943 		{
2944 		  /* Do not speculate builtin_unreachable, it is stupid!  */
2945 		  if (!ie->indirect_info->vptr_changed)
2946 		    target = ipa_impossible_devirt_target (ie, target);
2947 		}
2948 	      else
2949 		{
2950 		  target = t;
2951 		  speculative = ie->indirect_info->vptr_changed;
2952 		}
2953 	    }
2954 	}
2955     }
2956 
2957   ipa_polymorphic_call_context ie_context (ie);
2958   vec <cgraph_node *>targets;
2959   bool final;
2960 
2961   ctx.offset_by (ie->indirect_info->offset);
2962   if (ie->indirect_info->vptr_changed)
2963     ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2964 				      ie->indirect_info->otr_type);
2965   ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2966   targets = possible_polymorphic_call_targets
2967     (ie->indirect_info->otr_type,
2968      ie->indirect_info->otr_token,
2969      ctx, &final);
2970   if (final && targets.length () <= 1)
2971     {
2972       speculative = false;
2973       if (targets.length () == 1)
2974 	target = targets[0]->decl;
2975       else
2976 	target = ipa_impossible_devirt_target (ie, NULL_TREE);
2977     }
2978   else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2979 	   && !ie->speculative && ie->maybe_hot_p ())
2980     {
2981       cgraph_node *n;
2982       n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2983 					    ie->indirect_info->otr_token,
2984 					    ie->indirect_info->context);
2985       if (n)
2986 	{
2987 	  target = n->decl;
2988 	  speculative = true;
2989 	}
2990     }
2991 
2992   if (target)
2993     {
2994       if (!possible_polymorphic_call_target_p
2995 	  (ie, cgraph_node::get_create (target)))
2996 	{
2997 	  if (speculative)
2998 	    return NULL;
2999 	  target = ipa_impossible_devirt_target (ie, target);
3000 	}
3001       return ipa_make_edge_direct_to_target (ie, target, speculative);
3002     }
3003   else
3004     return NULL;
3005 }
3006 
3007 /* Update the param called notes associated with NODE when CS is being inlined,
3008    assuming NODE is (potentially indirectly) inlined into CS->callee.
3009    Moreover, if the callee is discovered to be constant, create a new cgraph
3010    edge for it.  Newly discovered indirect edges will be added to *NEW_EDGES,
3011    unless NEW_EDGES is NULL.  Return true iff a new edge(s) were created.  */
3012 
3013 static bool
3014 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3015 				      struct cgraph_node *node,
3016 				      vec<cgraph_edge *> *new_edges)
3017 {
3018   struct ipa_edge_args *top;
3019   struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3020   struct ipa_node_params *new_root_info;
3021   bool res = false;
3022 
3023   ipa_check_create_edge_args ();
3024   top = IPA_EDGE_REF (cs);
3025   new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3026 				? cs->caller->global.inlined_to
3027 				: cs->caller);
3028 
3029   for (ie = node->indirect_calls; ie; ie = next_ie)
3030     {
3031       struct cgraph_indirect_call_info *ici = ie->indirect_info;
3032       struct ipa_jump_func *jfunc;
3033       int param_index;
3034       cgraph_node *spec_target = NULL;
3035 
3036       next_ie = ie->next_callee;
3037 
3038       if (ici->param_index == -1)
3039 	continue;
3040 
3041       /* We must check range due to calls with variable number of arguments:  */
3042       if (ici->param_index >= ipa_get_cs_argument_count (top))
3043 	{
3044 	  ici->param_index = -1;
3045 	  continue;
3046 	}
3047 
3048       param_index = ici->param_index;
3049       jfunc = ipa_get_ith_jump_func (top, param_index);
3050 
3051       if (ie->speculative)
3052 	{
3053 	  struct cgraph_edge *de;
3054           struct ipa_ref *ref;
3055 	  ie->speculative_call_info (de, ie, ref);
3056 	  spec_target = de->callee;
3057 	}
3058 
3059       if (!opt_for_fn (node->decl, flag_indirect_inlining))
3060 	new_direct_edge = NULL;
3061       else if (ici->polymorphic)
3062 	{
3063           ipa_polymorphic_call_context ctx;
3064 	  ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3065 	  new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3066 	}
3067       else
3068 	new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3069 							    new_root_info);
3070       /* If speculation was removed, then we need to do nothing.  */
3071       if (new_direct_edge && new_direct_edge != ie
3072 	  && new_direct_edge->callee == spec_target)
3073 	{
3074 	  new_direct_edge->indirect_inlining_edge = 1;
3075 	  top = IPA_EDGE_REF (cs);
3076 	  res = true;
3077 	  if (!new_direct_edge->speculative)
3078 	    continue;
3079 	}
3080       else if (new_direct_edge)
3081 	{
3082 	  new_direct_edge->indirect_inlining_edge = 1;
3083 	  if (new_direct_edge->call_stmt)
3084 	    new_direct_edge->call_stmt_cannot_inline_p
3085 	      = !gimple_check_call_matching_types (
3086 		  new_direct_edge->call_stmt,
3087 		  new_direct_edge->callee->decl, false);
3088 	  if (new_edges)
3089 	    {
3090 	      new_edges->safe_push (new_direct_edge);
3091 	      res = true;
3092 	    }
3093 	  top = IPA_EDGE_REF (cs);
3094 	  /* If speculative edge was introduced we still need to update
3095 	     call info of the indirect edge.  */
3096 	  if (!new_direct_edge->speculative)
3097 	    continue;
3098 	}
3099       if (jfunc->type == IPA_JF_PASS_THROUGH
3100           && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3101 	{
3102 	  if (ici->agg_contents
3103 	      && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3104 	      && !ici->polymorphic)
3105 	    ici->param_index = -1;
3106 	  else
3107 	    {
3108 	      ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3109 	      if (ici->polymorphic
3110 		  && !ipa_get_jf_pass_through_type_preserved (jfunc))
3111 		ici->vptr_changed = true;
3112 	    }
3113 	}
3114       else if (jfunc->type == IPA_JF_ANCESTOR)
3115 	{
3116 	  if (ici->agg_contents
3117 	      && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3118 	      && !ici->polymorphic)
3119 	    ici->param_index = -1;
3120 	  else
3121 	    {
3122 	      ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3123 	      ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3124 	      if (ici->polymorphic
3125 		  && !ipa_get_jf_ancestor_type_preserved (jfunc))
3126 		ici->vptr_changed = true;
3127 	    }
3128 	}
3129       else
3130 	/* Either we can find a destination for this edge now or never. */
3131 	ici->param_index = -1;
3132     }
3133 
3134   return res;
3135 }
3136 
3137 /* Recursively traverse subtree of NODE (including node) made of inlined
3138    cgraph_edges when CS has been inlined and invoke
3139    update_indirect_edges_after_inlining on all nodes and
3140    update_jump_functions_after_inlining on all non-inlined edges that lead out
3141    of this subtree.  Newly discovered indirect edges will be added to
3142    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were
3143    created.  */
3144 
3145 static bool
3146 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3147 				   struct cgraph_node *node,
3148 				   vec<cgraph_edge *> *new_edges)
3149 {
3150   struct cgraph_edge *e;
3151   bool res;
3152 
3153   res = update_indirect_edges_after_inlining (cs, node, new_edges);
3154 
3155   for (e = node->callees; e; e = e->next_callee)
3156     if (!e->inline_failed)
3157       res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3158     else
3159       update_jump_functions_after_inlining (cs, e);
3160   for (e = node->indirect_calls; e; e = e->next_callee)
3161     update_jump_functions_after_inlining (cs, e);
3162 
3163   return res;
3164 }
3165 
3166 /* Combine two controlled uses counts as done during inlining.  */
3167 
3168 static int
3169 combine_controlled_uses_counters (int c, int d)
3170 {
3171   if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3172     return IPA_UNDESCRIBED_USE;
3173   else
3174     return c + d - 1;
3175 }
3176 
3177 /* Propagate number of controlled users from CS->caleee to the new root of the
3178    tree of inlined nodes.  */
3179 
3180 static void
3181 propagate_controlled_uses (struct cgraph_edge *cs)
3182 {
3183   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3184   struct cgraph_node *new_root = cs->caller->global.inlined_to
3185     ? cs->caller->global.inlined_to : cs->caller;
3186   struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3187   struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3188   int count, i;
3189 
3190   count = MIN (ipa_get_cs_argument_count (args),
3191 	       ipa_get_param_count (old_root_info));
3192   for (i = 0; i < count; i++)
3193     {
3194       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3195       struct ipa_cst_ref_desc *rdesc;
3196 
3197       if (jf->type == IPA_JF_PASS_THROUGH)
3198 	{
3199 	  int src_idx, c, d;
3200 	  src_idx = ipa_get_jf_pass_through_formal_id (jf);
3201 	  c = ipa_get_controlled_uses (new_root_info, src_idx);
3202 	  d = ipa_get_controlled_uses (old_root_info, i);
3203 
3204 	  gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3205 			       == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3206 	  c = combine_controlled_uses_counters (c, d);
3207 	  ipa_set_controlled_uses (new_root_info, src_idx, c);
3208 	  if (c == 0 && new_root_info->ipcp_orig_node)
3209 	    {
3210 	      struct cgraph_node *n;
3211 	      struct ipa_ref *ref;
3212 	      tree t = new_root_info->known_csts[src_idx];
3213 
3214 	      if (t && TREE_CODE (t) == ADDR_EXPR
3215 		  && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3216 		  && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3217 		  && (ref = new_root->find_reference (n, NULL, 0)))
3218 		{
3219 		  if (dump_file)
3220 		    fprintf (dump_file, "ipa-prop: Removing cloning-created "
3221 			     "reference from %s/%i to %s/%i.\n",
3222 			     xstrdup_for_dump (new_root->name ()),
3223 			     new_root->order,
3224 			     xstrdup_for_dump (n->name ()), n->order);
3225 		  ref->remove_reference ();
3226 		}
3227 	    }
3228 	}
3229       else if (jf->type == IPA_JF_CONST
3230 	       && (rdesc = jfunc_rdesc_usable (jf)))
3231 	{
3232 	  int d = ipa_get_controlled_uses (old_root_info, i);
3233 	  int c = rdesc->refcount;
3234 	  rdesc->refcount = combine_controlled_uses_counters (c, d);
3235 	  if (rdesc->refcount == 0)
3236 	    {
3237 	      tree cst = ipa_get_jf_constant (jf);
3238 	      struct cgraph_node *n;
3239 	      gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3240 				   && TREE_CODE (TREE_OPERAND (cst, 0))
3241 				   == FUNCTION_DECL);
3242 	      n = cgraph_node::get (TREE_OPERAND (cst, 0));
3243 	      if (n)
3244 		{
3245 		  struct cgraph_node *clone;
3246 		  bool ok;
3247 		  ok = remove_described_reference (n, rdesc);
3248 		  gcc_checking_assert (ok);
3249 
3250 		  clone = cs->caller;
3251 		  while (clone->global.inlined_to
3252 			 && clone != rdesc->cs->caller
3253 			 && IPA_NODE_REF (clone)->ipcp_orig_node)
3254 		    {
3255 		      struct ipa_ref *ref;
3256 		      ref = clone->find_reference (n, NULL, 0);
3257 		      if (ref)
3258 			{
3259 			  if (dump_file)
3260 			    fprintf (dump_file, "ipa-prop: Removing "
3261 				     "cloning-created reference "
3262 				     "from %s/%i to %s/%i.\n",
3263 				     xstrdup_for_dump (clone->name ()),
3264 				     clone->order,
3265 				     xstrdup_for_dump (n->name ()),
3266 				     n->order);
3267 			  ref->remove_reference ();
3268 			}
3269 		      clone = clone->callers->caller;
3270 		    }
3271 		}
3272 	    }
3273 	}
3274     }
3275 
3276   for (i = ipa_get_param_count (old_root_info);
3277        i < ipa_get_cs_argument_count (args);
3278        i++)
3279     {
3280       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3281 
3282       if (jf->type == IPA_JF_CONST)
3283 	{
3284 	  struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3285 	  if (rdesc)
3286 	    rdesc->refcount = IPA_UNDESCRIBED_USE;
3287 	}
3288       else if (jf->type == IPA_JF_PASS_THROUGH)
3289 	ipa_set_controlled_uses (new_root_info,
3290 				 jf->value.pass_through.formal_id,
3291 				 IPA_UNDESCRIBED_USE);
3292     }
3293 }
3294 
3295 /* Update jump functions and call note functions on inlining the call site CS.
3296    CS is expected to lead to a node already cloned by
3297    cgraph_clone_inline_nodes.  Newly discovered indirect edges will be added to
3298    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were +
3299    created.  */
3300 
3301 bool
3302 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3303 				   vec<cgraph_edge *> *new_edges)
3304 {
3305   bool changed;
3306   /* Do nothing if the preparation phase has not been carried out yet
3307      (i.e. during early inlining).  */
3308   if (!ipa_node_params_sum)
3309     return false;
3310   gcc_assert (ipa_edge_args_vector);
3311 
3312   propagate_controlled_uses (cs);
3313   changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3314 
3315   return changed;
3316 }
3317 
3318 /* Frees all dynamically allocated structures that the argument info points
3319    to.  */
3320 
3321 void
3322 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3323 {
3324   vec_free (args->jump_functions);
3325   memset (args, 0, sizeof (*args));
3326 }
3327 
3328 /* Free all ipa_edge structures.  */
3329 
3330 void
3331 ipa_free_all_edge_args (void)
3332 {
3333   int i;
3334   struct ipa_edge_args *args;
3335 
3336   if (!ipa_edge_args_vector)
3337     return;
3338 
3339   FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3340     ipa_free_edge_args_substructures (args);
3341 
3342   vec_free (ipa_edge_args_vector);
3343 }
3344 
3345 /* Frees all dynamically allocated structures that the param info points
3346    to.  */
3347 
3348 ipa_node_params::~ipa_node_params ()
3349 {
3350   descriptors.release ();
3351   free (lattices);
3352   /* Lattice values and their sources are deallocated with their alocation
3353      pool.  */
3354   known_contexts.release ();
3355 
3356   lattices = NULL;
3357   ipcp_orig_node = NULL;
3358   analysis_done = 0;
3359   node_enqueued = 0;
3360   do_clone_for_all_contexts = 0;
3361   is_all_contexts_clone = 0;
3362   node_dead = 0;
3363 }
3364 
3365 /* Free all ipa_node_params structures.  */
3366 
3367 void
3368 ipa_free_all_node_params (void)
3369 {
3370   delete ipa_node_params_sum;
3371   ipa_node_params_sum = NULL;
3372 }
3373 
3374 /* Grow ipcp_transformations if necessary.  */
3375 
3376 void
3377 ipcp_grow_transformations_if_necessary (void)
3378 {
3379   if (vec_safe_length (ipcp_transformations)
3380       <= (unsigned) symtab->cgraph_max_uid)
3381     vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3382 }
3383 
3384 /* Set the aggregate replacements of NODE to be AGGVALS.  */
3385 
3386 void
3387 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3388 			      struct ipa_agg_replacement_value *aggvals)
3389 {
3390   ipcp_grow_transformations_if_necessary ();
3391   (*ipcp_transformations)[node->uid].agg_values = aggvals;
3392 }
3393 
3394 /* Hook that is called by cgraph.c when an edge is removed.  */
3395 
3396 static void
3397 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3398 {
3399   struct ipa_edge_args *args;
3400 
3401   /* During IPA-CP updating we can be called on not-yet analyzed clones.  */
3402   if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3403     return;
3404 
3405   args = IPA_EDGE_REF (cs);
3406   if (args->jump_functions)
3407     {
3408       struct ipa_jump_func *jf;
3409       int i;
3410       FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3411 	{
3412 	  struct ipa_cst_ref_desc *rdesc;
3413 	  try_decrement_rdesc_refcount (jf);
3414 	  if (jf->type == IPA_JF_CONST
3415 	      && (rdesc = ipa_get_jf_constant_rdesc (jf))
3416 	      && rdesc->cs == cs)
3417 	    rdesc->cs = NULL;
3418 	}
3419     }
3420 
3421   ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3422 }
3423 
3424 /* Hook that is called by cgraph.c when an edge is duplicated.  */
3425 
3426 static void
3427 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3428 			   void *)
3429 {
3430   struct ipa_edge_args *old_args, *new_args;
3431   unsigned int i;
3432 
3433   ipa_check_create_edge_args ();
3434 
3435   old_args = IPA_EDGE_REF (src);
3436   new_args = IPA_EDGE_REF (dst);
3437 
3438   new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3439   if (old_args->polymorphic_call_contexts)
3440     new_args->polymorphic_call_contexts
3441       = vec_safe_copy (old_args->polymorphic_call_contexts);
3442 
3443   for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3444     {
3445       struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3446       struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3447 
3448       dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3449 
3450       if (src_jf->type == IPA_JF_CONST)
3451 	{
3452 	  struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3453 
3454 	  if (!src_rdesc)
3455 	    dst_jf->value.constant.rdesc = NULL;
3456 	  else if (src->caller == dst->caller)
3457 	    {
3458 	      struct ipa_ref *ref;
3459 	      symtab_node *n = cgraph_node_for_jfunc (src_jf);
3460 	      gcc_checking_assert (n);
3461 	      ref = src->caller->find_reference (n, src->call_stmt,
3462 						 src->lto_stmt_uid);
3463 	      gcc_checking_assert (ref);
3464 	      dst->caller->clone_reference (ref, ref->stmt);
3465 
3466 	      gcc_checking_assert (ipa_refdesc_pool);
3467 	      struct ipa_cst_ref_desc *dst_rdesc
3468 		= (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3469 	      dst_rdesc->cs = dst;
3470 	      dst_rdesc->refcount = src_rdesc->refcount;
3471 	      dst_rdesc->next_duplicate = NULL;
3472 	      dst_jf->value.constant.rdesc = dst_rdesc;
3473 	    }
3474 	  else if (src_rdesc->cs == src)
3475 	    {
3476 	      struct ipa_cst_ref_desc *dst_rdesc;
3477 	      gcc_checking_assert (ipa_refdesc_pool);
3478 	      dst_rdesc
3479 		= (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3480 	      dst_rdesc->cs = dst;
3481 	      dst_rdesc->refcount = src_rdesc->refcount;
3482 	      dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3483 	      src_rdesc->next_duplicate = dst_rdesc;
3484 	      dst_jf->value.constant.rdesc = dst_rdesc;
3485 	    }
3486 	  else
3487 	    {
3488 	      struct ipa_cst_ref_desc *dst_rdesc;
3489 	      /* This can happen during inlining, when a JFUNC can refer to a
3490 		 reference taken in a function up in the tree of inline clones.
3491 		 We need to find the duplicate that refers to our tree of
3492 		 inline clones.  */
3493 
3494 	      gcc_assert (dst->caller->global.inlined_to);
3495 	      for (dst_rdesc = src_rdesc->next_duplicate;
3496 		   dst_rdesc;
3497 		   dst_rdesc = dst_rdesc->next_duplicate)
3498 		{
3499 		  struct cgraph_node *top;
3500 		  top = dst_rdesc->cs->caller->global.inlined_to
3501 		    ? dst_rdesc->cs->caller->global.inlined_to
3502 		    : dst_rdesc->cs->caller;
3503 		  if (dst->caller->global.inlined_to == top)
3504 		    break;
3505 		}
3506 	      gcc_assert (dst_rdesc);
3507 	      dst_jf->value.constant.rdesc = dst_rdesc;
3508 	    }
3509 	}
3510       else if (dst_jf->type == IPA_JF_PASS_THROUGH
3511 	       && src->caller == dst->caller)
3512 	{
3513 	  struct cgraph_node *inline_root = dst->caller->global.inlined_to
3514 	    ? dst->caller->global.inlined_to : dst->caller;
3515 	  struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3516 	  int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3517 
3518 	  int c = ipa_get_controlled_uses (root_info, idx);
3519 	  if (c != IPA_UNDESCRIBED_USE)
3520 	    {
3521 	      c++;
3522 	      ipa_set_controlled_uses (root_info, idx, c);
3523 	    }
3524 	}
3525     }
3526 }
3527 
3528 /* Analyze newly added function into callgraph.  */
3529 
3530 static void
3531 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3532 {
3533   if (node->has_gimple_body_p ())
3534     ipa_analyze_node (node);
3535 }
3536 
3537 /* Hook that is called by summary when a node is duplicated.  */
3538 
3539 void
3540 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3541 			     ipa_node_params *old_info,
3542 			     ipa_node_params *new_info)
3543 {
3544   ipa_agg_replacement_value *old_av, *new_av;
3545 
3546   new_info->descriptors = old_info->descriptors.copy ();
3547   new_info->lattices = NULL;
3548   new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3549 
3550   new_info->analysis_done = old_info->analysis_done;
3551   new_info->node_enqueued = old_info->node_enqueued;
3552 
3553   old_av = ipa_get_agg_replacements_for_node (src);
3554   if (old_av)
3555     {
3556       new_av = NULL;
3557       while (old_av)
3558 	{
3559 	  struct ipa_agg_replacement_value *v;
3560 
3561 	  v = ggc_alloc<ipa_agg_replacement_value> ();
3562 	  memcpy (v, old_av, sizeof (*v));
3563 	  v->next = new_av;
3564 	  new_av = v;
3565 	  old_av = old_av->next;
3566 	}
3567       ipa_set_node_agg_value_chain (dst, new_av);
3568     }
3569 
3570   ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3571 
3572   if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3573     {
3574       ipcp_grow_transformations_if_necessary ();
3575       src_trans = ipcp_get_transformation_summary (src);
3576       const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3577       vec<ipa_alignment, va_gc> *&dst_alignments
3578 	= ipcp_get_transformation_summary (dst)->alignments;
3579       vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3580       for (unsigned i = 0; i < src_alignments->length (); ++i)
3581 	dst_alignments->quick_push ((*src_alignments)[i]);
3582     }
3583 }
3584 
3585 /* Register our cgraph hooks if they are not already there.  */
3586 
3587 void
3588 ipa_register_cgraph_hooks (void)
3589 {
3590   ipa_check_create_node_params ();
3591 
3592   if (!edge_removal_hook_holder)
3593     edge_removal_hook_holder =
3594       symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3595   if (!edge_duplication_hook_holder)
3596     edge_duplication_hook_holder =
3597       symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3598   function_insertion_hook_holder =
3599       symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3600 }
3601 
3602 /* Unregister our cgraph hooks if they are not already there.  */
3603 
3604 static void
3605 ipa_unregister_cgraph_hooks (void)
3606 {
3607   symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3608   edge_removal_hook_holder = NULL;
3609   symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3610   edge_duplication_hook_holder = NULL;
3611   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3612   function_insertion_hook_holder = NULL;
3613 }
3614 
3615 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3616    longer needed after ipa-cp.  */
3617 
3618 void
3619 ipa_free_all_structures_after_ipa_cp (void)
3620 {
3621   if (!optimize && !in_lto_p)
3622     {
3623       ipa_free_all_edge_args ();
3624       ipa_free_all_node_params ();
3625       free_alloc_pool (ipcp_sources_pool);
3626       free_alloc_pool (ipcp_cst_values_pool);
3627       free_alloc_pool (ipcp_poly_ctx_values_pool);
3628       free_alloc_pool (ipcp_agg_lattice_pool);
3629       ipa_unregister_cgraph_hooks ();
3630       if (ipa_refdesc_pool)
3631 	free_alloc_pool (ipa_refdesc_pool);
3632     }
3633 }
3634 
3635 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3636    longer needed after indirect inlining.  */
3637 
3638 void
3639 ipa_free_all_structures_after_iinln (void)
3640 {
3641   ipa_free_all_edge_args ();
3642   ipa_free_all_node_params ();
3643   ipa_unregister_cgraph_hooks ();
3644   if (ipcp_sources_pool)
3645     free_alloc_pool (ipcp_sources_pool);
3646   if (ipcp_cst_values_pool)
3647     free_alloc_pool (ipcp_cst_values_pool);
3648   if (ipcp_poly_ctx_values_pool)
3649     free_alloc_pool (ipcp_poly_ctx_values_pool);
3650   if (ipcp_agg_lattice_pool)
3651     free_alloc_pool (ipcp_agg_lattice_pool);
3652   if (ipa_refdesc_pool)
3653     free_alloc_pool (ipa_refdesc_pool);
3654 }
3655 
3656 /* Print ipa_tree_map data structures of all functions in the
3657    callgraph to F.  */
3658 
3659 void
3660 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3661 {
3662   int i, count;
3663   struct ipa_node_params *info;
3664 
3665   if (!node->definition)
3666     return;
3667   info = IPA_NODE_REF (node);
3668   fprintf (f, "  function  %s/%i parameter descriptors:\n",
3669 	   node->name (), node->order);
3670   count = ipa_get_param_count (info);
3671   for (i = 0; i < count; i++)
3672     {
3673       int c;
3674 
3675       fprintf (f, "    ");
3676       ipa_dump_param (f, info, i);
3677       if (ipa_is_param_used (info, i))
3678 	fprintf (f, " used");
3679       c = ipa_get_controlled_uses (info, i);
3680       if (c == IPA_UNDESCRIBED_USE)
3681 	fprintf (f, " undescribed_use");
3682       else
3683 	fprintf (f, "  controlled_uses=%i", c);
3684       fprintf (f, "\n");
3685     }
3686 }
3687 
3688 /* Print ipa_tree_map data structures of all functions in the
3689    callgraph to F.  */
3690 
3691 void
3692 ipa_print_all_params (FILE * f)
3693 {
3694   struct cgraph_node *node;
3695 
3696   fprintf (f, "\nFunction parameters:\n");
3697   FOR_EACH_FUNCTION (node)
3698     ipa_print_node_params (f, node);
3699 }
3700 
3701 /* Return a heap allocated vector containing formal parameters of FNDECL.  */
3702 
3703 vec<tree>
3704 ipa_get_vector_of_formal_parms (tree fndecl)
3705 {
3706   vec<tree> args;
3707   int count;
3708   tree parm;
3709 
3710   gcc_assert (!flag_wpa);
3711   count = count_formal_params (fndecl);
3712   args.create (count);
3713   for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3714     args.quick_push (parm);
3715 
3716   return args;
3717 }
3718 
3719 /* Return a heap allocated vector containing types of formal parameters of
3720    function type FNTYPE.  */
3721 
3722 vec<tree>
3723 ipa_get_vector_of_formal_parm_types (tree fntype)
3724 {
3725   vec<tree> types;
3726   int count = 0;
3727   tree t;
3728 
3729   for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3730     count++;
3731 
3732   types.create (count);
3733   for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3734     types.quick_push (TREE_VALUE (t));
3735 
3736   return types;
3737 }
3738 
3739 /* Modify the function declaration FNDECL and its type according to the plan in
3740    ADJUSTMENTS.  It also sets base fields of individual adjustments structures
3741    to reflect the actual parameters being modified which are determined by the
3742    base_index field.  */
3743 
3744 void
3745 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3746 {
3747   vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3748   tree orig_type = TREE_TYPE (fndecl);
3749   tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3750 
3751   /* The following test is an ugly hack, some functions simply don't have any
3752      arguments in their type.  This is probably a bug but well... */
3753   bool care_for_types = (old_arg_types != NULL_TREE);
3754   bool last_parm_void;
3755   vec<tree> otypes;
3756   if (care_for_types)
3757     {
3758       last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3759 			== void_type_node);
3760       otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3761       if (last_parm_void)
3762 	gcc_assert (oparms.length () + 1 == otypes.length ());
3763       else
3764 	gcc_assert (oparms.length () == otypes.length ());
3765     }
3766   else
3767     {
3768       last_parm_void = false;
3769       otypes.create (0);
3770     }
3771 
3772   int len = adjustments.length ();
3773   tree *link = &DECL_ARGUMENTS (fndecl);
3774   tree new_arg_types = NULL;
3775   for (int i = 0; i < len; i++)
3776     {
3777       struct ipa_parm_adjustment *adj;
3778       gcc_assert (link);
3779 
3780       adj = &adjustments[i];
3781       tree parm;
3782       if (adj->op == IPA_PARM_OP_NEW)
3783 	parm = NULL;
3784       else
3785 	parm = oparms[adj->base_index];
3786       adj->base = parm;
3787 
3788       if (adj->op == IPA_PARM_OP_COPY)
3789 	{
3790 	  if (care_for_types)
3791 	    new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3792 				       new_arg_types);
3793 	  *link = parm;
3794 	  link = &DECL_CHAIN (parm);
3795 	}
3796       else if (adj->op != IPA_PARM_OP_REMOVE)
3797 	{
3798 	  tree new_parm;
3799 	  tree ptype;
3800 
3801 	  if (adj->by_ref)
3802 	    ptype = build_pointer_type (adj->type);
3803 	  else
3804 	    {
3805 	      ptype = adj->type;
3806 	      if (is_gimple_reg_type (ptype))
3807 		{
3808 		  unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3809 		  if (TYPE_ALIGN (ptype) < malign)
3810 		    ptype = build_aligned_type (ptype, malign);
3811 		}
3812 	    }
3813 
3814 	  if (care_for_types)
3815 	    new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3816 
3817 	  new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3818 				 ptype);
3819 	  const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3820 	  DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3821 	  DECL_ARTIFICIAL (new_parm) = 1;
3822 	  DECL_ARG_TYPE (new_parm) = ptype;
3823 	  DECL_CONTEXT (new_parm) = fndecl;
3824 	  TREE_USED (new_parm) = 1;
3825 	  DECL_IGNORED_P (new_parm) = 1;
3826 	  layout_decl (new_parm, 0);
3827 
3828 	  if (adj->op == IPA_PARM_OP_NEW)
3829 	    adj->base = NULL;
3830 	  else
3831 	    adj->base = parm;
3832 	  adj->new_decl = new_parm;
3833 
3834 	  *link = new_parm;
3835 	  link = &DECL_CHAIN (new_parm);
3836 	}
3837     }
3838 
3839   *link = NULL_TREE;
3840 
3841   tree new_reversed = NULL;
3842   if (care_for_types)
3843     {
3844       new_reversed = nreverse (new_arg_types);
3845       if (last_parm_void)
3846 	{
3847 	  if (new_reversed)
3848 	    TREE_CHAIN (new_arg_types) = void_list_node;
3849 	  else
3850 	    new_reversed = void_list_node;
3851 	}
3852     }
3853 
3854   /* Use copy_node to preserve as much as possible from original type
3855      (debug info, attribute lists etc.)
3856      Exception is METHOD_TYPEs must have THIS argument.
3857      When we are asked to remove it, we need to build new FUNCTION_TYPE
3858      instead.  */
3859   tree new_type = NULL;
3860   if (TREE_CODE (orig_type) != METHOD_TYPE
3861        || (adjustments[0].op == IPA_PARM_OP_COPY
3862 	  && adjustments[0].base_index == 0))
3863     {
3864       new_type = build_distinct_type_copy (orig_type);
3865       TYPE_ARG_TYPES (new_type) = new_reversed;
3866     }
3867   else
3868     {
3869       new_type
3870         = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3871 							 new_reversed));
3872       TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3873       DECL_VINDEX (fndecl) = NULL_TREE;
3874     }
3875 
3876   /* When signature changes, we need to clear builtin info.  */
3877   if (DECL_BUILT_IN (fndecl))
3878     {
3879       DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3880       DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3881     }
3882 
3883   TREE_TYPE (fndecl) = new_type;
3884   DECL_VIRTUAL_P (fndecl) = 0;
3885   DECL_LANG_SPECIFIC (fndecl) = NULL;
3886   otypes.release ();
3887   oparms.release ();
3888 }
3889 
3890 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3891    If this is a directly recursive call, CS must be NULL.  Otherwise it must
3892    contain the corresponding call graph edge.  */
3893 
3894 void
3895 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3896 			   ipa_parm_adjustment_vec adjustments)
3897 {
3898   struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3899   vec<tree> vargs;
3900   vec<tree, va_gc> **debug_args = NULL;
3901   gcall *new_stmt;
3902   gimple_stmt_iterator gsi, prev_gsi;
3903   tree callee_decl;
3904   int i, len;
3905 
3906   len = adjustments.length ();
3907   vargs.create (len);
3908   callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3909   current_node->remove_stmt_references (stmt);
3910 
3911   gsi = gsi_for_stmt (stmt);
3912   prev_gsi = gsi;
3913   gsi_prev (&prev_gsi);
3914   for (i = 0; i < len; i++)
3915     {
3916       struct ipa_parm_adjustment *adj;
3917 
3918       adj = &adjustments[i];
3919 
3920       if (adj->op == IPA_PARM_OP_COPY)
3921 	{
3922 	  tree arg = gimple_call_arg (stmt, adj->base_index);
3923 
3924 	  vargs.quick_push (arg);
3925 	}
3926       else if (adj->op != IPA_PARM_OP_REMOVE)
3927 	{
3928 	  tree expr, base, off;
3929 	  location_t loc;
3930 	  unsigned int deref_align = 0;
3931 	  bool deref_base = false;
3932 
3933 	  /* We create a new parameter out of the value of the old one, we can
3934 	     do the following kind of transformations:
3935 
3936 	     - A scalar passed by reference is converted to a scalar passed by
3937                value.  (adj->by_ref is false and the type of the original
3938                actual argument is a pointer to a scalar).
3939 
3940              - A part of an aggregate is passed instead of the whole aggregate.
3941                The part can be passed either by value or by reference, this is
3942                determined by value of adj->by_ref.  Moreover, the code below
3943                handles both situations when the original aggregate is passed by
3944                value (its type is not a pointer) and when it is passed by
3945                reference (it is a pointer to an aggregate).
3946 
3947 	     When the new argument is passed by reference (adj->by_ref is true)
3948 	     it must be a part of an aggregate and therefore we form it by
3949 	     simply taking the address of a reference inside the original
3950 	     aggregate.  */
3951 
3952 	  gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3953 	  base = gimple_call_arg (stmt, adj->base_index);
3954 	  loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3955 			      : EXPR_LOCATION (base);
3956 
3957 	  if (TREE_CODE (base) != ADDR_EXPR
3958 	      && POINTER_TYPE_P (TREE_TYPE (base)))
3959 	    off = build_int_cst (adj->alias_ptr_type,
3960 				 adj->offset / BITS_PER_UNIT);
3961 	  else
3962 	    {
3963 	      HOST_WIDE_INT base_offset;
3964 	      tree prev_base;
3965 	      bool addrof;
3966 
3967 	      if (TREE_CODE (base) == ADDR_EXPR)
3968 		{
3969 		  base = TREE_OPERAND (base, 0);
3970 		  addrof = true;
3971 		}
3972 	      else
3973 		addrof = false;
3974 	      prev_base = base;
3975 	      base = get_addr_base_and_unit_offset (base, &base_offset);
3976 	      /* Aggregate arguments can have non-invariant addresses.  */
3977 	      if (!base)
3978 		{
3979 		  base = build_fold_addr_expr (prev_base);
3980 		  off = build_int_cst (adj->alias_ptr_type,
3981 				       adj->offset / BITS_PER_UNIT);
3982 		}
3983 	      else if (TREE_CODE (base) == MEM_REF)
3984 		{
3985 		  if (!addrof)
3986 		    {
3987 		      deref_base = true;
3988 		      deref_align = TYPE_ALIGN (TREE_TYPE (base));
3989 		    }
3990 		  off = build_int_cst (adj->alias_ptr_type,
3991 				       base_offset
3992 				       + adj->offset / BITS_PER_UNIT);
3993 		  off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3994 					 off);
3995 		  base = TREE_OPERAND (base, 0);
3996 		}
3997 	      else
3998 		{
3999 		  off = build_int_cst (adj->alias_ptr_type,
4000 				       base_offset
4001 				       + adj->offset / BITS_PER_UNIT);
4002 		  base = build_fold_addr_expr (base);
4003 		}
4004 	    }
4005 
4006 	  if (!adj->by_ref)
4007 	    {
4008 	      tree type = adj->type;
4009 	      unsigned int align;
4010 	      unsigned HOST_WIDE_INT misalign;
4011 
4012 	      if (deref_base)
4013 		{
4014 		  align = deref_align;
4015 		  misalign = 0;
4016 		}
4017 	      else
4018 		{
4019 		  get_pointer_alignment_1 (base, &align, &misalign);
4020 		  if (TYPE_ALIGN (type) > align)
4021 		    align = TYPE_ALIGN (type);
4022 		}
4023 	      misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4024 			   * BITS_PER_UNIT);
4025 	      misalign = misalign & (align - 1);
4026 	      if (misalign != 0)
4027 		align = (misalign & -misalign);
4028 	      if (align < TYPE_ALIGN (type))
4029 		type = build_aligned_type (type, align);
4030 	      base = force_gimple_operand_gsi (&gsi, base,
4031 					       true, NULL, true, GSI_SAME_STMT);
4032 	      expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4033 	      /* If expr is not a valid gimple call argument emit
4034 	         a load into a temporary.  */
4035 	      if (is_gimple_reg_type (TREE_TYPE (expr)))
4036 		{
4037 		  gimple tem = gimple_build_assign (NULL_TREE, expr);
4038 		  if (gimple_in_ssa_p (cfun))
4039 		    {
4040 		      gimple_set_vuse (tem, gimple_vuse (stmt));
4041 		      expr = make_ssa_name (TREE_TYPE (expr), tem);
4042 		    }
4043 		  else
4044 		    expr = create_tmp_reg (TREE_TYPE (expr));
4045 		  gimple_assign_set_lhs (tem, expr);
4046 		  gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4047 		}
4048 	    }
4049 	  else
4050 	    {
4051 	      expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4052 	      expr = build_fold_addr_expr (expr);
4053 	      expr = force_gimple_operand_gsi (&gsi, expr,
4054 					       true, NULL, true, GSI_SAME_STMT);
4055 	    }
4056 	  vargs.quick_push (expr);
4057 	}
4058       if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4059 	{
4060 	  unsigned int ix;
4061 	  tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4062 	  gimple def_temp;
4063 
4064 	  arg = gimple_call_arg (stmt, adj->base_index);
4065 	  if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4066 	    {
4067 	      if (!fold_convertible_p (TREE_TYPE (origin), arg))
4068 		continue;
4069 	      arg = fold_convert_loc (gimple_location (stmt),
4070 				      TREE_TYPE (origin), arg);
4071 	    }
4072 	  if (debug_args == NULL)
4073 	    debug_args = decl_debug_args_insert (callee_decl);
4074 	  for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4075 	    if (ddecl == origin)
4076 	      {
4077 		ddecl = (**debug_args)[ix + 1];
4078 		break;
4079 	      }
4080 	  if (ddecl == NULL)
4081 	    {
4082 	      ddecl = make_node (DEBUG_EXPR_DECL);
4083 	      DECL_ARTIFICIAL (ddecl) = 1;
4084 	      TREE_TYPE (ddecl) = TREE_TYPE (origin);
4085 	      DECL_MODE (ddecl) = DECL_MODE (origin);
4086 
4087 	      vec_safe_push (*debug_args, origin);
4088 	      vec_safe_push (*debug_args, ddecl);
4089 	    }
4090 	  def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4091 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4092 	}
4093     }
4094 
4095   if (dump_file && (dump_flags & TDF_DETAILS))
4096     {
4097       fprintf (dump_file, "replacing stmt:");
4098       print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4099     }
4100 
4101   new_stmt = gimple_build_call_vec (callee_decl, vargs);
4102   vargs.release ();
4103   if (gimple_call_lhs (stmt))
4104     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4105 
4106   gimple_set_block (new_stmt, gimple_block (stmt));
4107   if (gimple_has_location (stmt))
4108     gimple_set_location (new_stmt, gimple_location (stmt));
4109   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4110   gimple_call_copy_flags (new_stmt, stmt);
4111   if (gimple_in_ssa_p (cfun))
4112     {
4113       gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4114       if (gimple_vdef (stmt))
4115 	{
4116 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4117 	  SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4118 	}
4119     }
4120 
4121   if (dump_file && (dump_flags & TDF_DETAILS))
4122     {
4123       fprintf (dump_file, "with stmt:");
4124       print_gimple_stmt (dump_file, new_stmt, 0, 0);
4125       fprintf (dump_file, "\n");
4126     }
4127   gsi_replace (&gsi, new_stmt, true);
4128   if (cs)
4129     cs->set_call_stmt (new_stmt);
4130   do
4131     {
4132       current_node->record_stmt_references (gsi_stmt (gsi));
4133       gsi_prev (&gsi);
4134     }
4135   while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4136 }
4137 
4138 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4139    so.  ADJUSTMENTS is a pointer to a vector of adjustments.  CONVERT
4140    specifies whether the function should care about type incompatibility the
4141    current and new expressions.  If it is false, the function will leave
4142    incompatibility issues to the caller.  Return true iff the expression
4143    was modified. */
4144 
4145 bool
4146 ipa_modify_expr (tree *expr, bool convert,
4147 		 ipa_parm_adjustment_vec adjustments)
4148 {
4149   struct ipa_parm_adjustment *cand
4150     = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4151   if (!cand)
4152     return false;
4153 
4154   tree src;
4155   if (cand->by_ref)
4156     src = build_simple_mem_ref (cand->new_decl);
4157   else
4158     src = cand->new_decl;
4159 
4160   if (dump_file && (dump_flags & TDF_DETAILS))
4161     {
4162       fprintf (dump_file, "About to replace expr ");
4163       print_generic_expr (dump_file, *expr, 0);
4164       fprintf (dump_file, " with ");
4165       print_generic_expr (dump_file, src, 0);
4166       fprintf (dump_file, "\n");
4167     }
4168 
4169   if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4170     {
4171       tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4172       *expr = vce;
4173     }
4174   else
4175     *expr = src;
4176   return true;
4177 }
4178 
4179 /* If T is an SSA_NAME, return NULL if it is not a default def or
4180    return its base variable if it is.  If IGNORE_DEFAULT_DEF is true,
4181    the base variable is always returned, regardless if it is a default
4182    def.  Return T if it is not an SSA_NAME.  */
4183 
4184 static tree
4185 get_ssa_base_param (tree t, bool ignore_default_def)
4186 {
4187   if (TREE_CODE (t) == SSA_NAME)
4188     {
4189       if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4190 	return SSA_NAME_VAR (t);
4191       else
4192 	return NULL_TREE;
4193     }
4194   return t;
4195 }
4196 
4197 /* Given an expression, return an adjustment entry specifying the
4198    transformation to be done on EXPR.  If no suitable adjustment entry
4199    was found, returns NULL.
4200 
4201    If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4202    default def, otherwise bail on them.
4203 
4204    If CONVERT is non-NULL, this function will set *CONVERT if the
4205    expression provided is a component reference.  ADJUSTMENTS is the
4206    adjustments vector.  */
4207 
4208 ipa_parm_adjustment *
4209 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4210 			      ipa_parm_adjustment_vec adjustments,
4211 			      bool ignore_default_def)
4212 {
4213   if (TREE_CODE (**expr) == BIT_FIELD_REF
4214       || TREE_CODE (**expr) == IMAGPART_EXPR
4215       || TREE_CODE (**expr) == REALPART_EXPR)
4216     {
4217       *expr = &TREE_OPERAND (**expr, 0);
4218       if (convert)
4219 	*convert = true;
4220     }
4221 
4222   HOST_WIDE_INT offset, size, max_size;
4223   tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4224   if (!base || size == -1 || max_size == -1)
4225     return NULL;
4226 
4227   if (TREE_CODE (base) == MEM_REF)
4228     {
4229       offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4230       base = TREE_OPERAND (base, 0);
4231     }
4232 
4233   base = get_ssa_base_param (base, ignore_default_def);
4234   if (!base || TREE_CODE (base) != PARM_DECL)
4235     return NULL;
4236 
4237   struct ipa_parm_adjustment *cand = NULL;
4238   unsigned int len = adjustments.length ();
4239   for (unsigned i = 0; i < len; i++)
4240     {
4241       struct ipa_parm_adjustment *adj = &adjustments[i];
4242 
4243       if (adj->base == base
4244 	  && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4245 	{
4246 	  cand = adj;
4247 	  break;
4248 	}
4249     }
4250 
4251   if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4252     return NULL;
4253   return cand;
4254 }
4255 
4256 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once.  */
4257 
4258 static bool
4259 index_in_adjustments_multiple_times_p (int base_index,
4260 				       ipa_parm_adjustment_vec adjustments)
4261 {
4262   int i, len = adjustments.length ();
4263   bool one = false;
4264 
4265   for (i = 0; i < len; i++)
4266     {
4267       struct ipa_parm_adjustment *adj;
4268       adj = &adjustments[i];
4269 
4270       if (adj->base_index == base_index)
4271 	{
4272 	  if (one)
4273 	    return true;
4274 	  else
4275 	    one = true;
4276 	}
4277     }
4278   return false;
4279 }
4280 
4281 
4282 /* Return adjustments that should have the same effect on function parameters
4283    and call arguments as if they were first changed according to adjustments in
4284    INNER and then by adjustments in OUTER.  */
4285 
4286 ipa_parm_adjustment_vec
4287 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4288 			 ipa_parm_adjustment_vec outer)
4289 {
4290   int i, outlen = outer.length ();
4291   int inlen = inner.length ();
4292   int removals = 0;
4293   ipa_parm_adjustment_vec adjustments, tmp;
4294 
4295   tmp.create (inlen);
4296   for (i = 0; i < inlen; i++)
4297     {
4298       struct ipa_parm_adjustment *n;
4299       n = &inner[i];
4300 
4301       if (n->op == IPA_PARM_OP_REMOVE)
4302 	removals++;
4303       else
4304 	{
4305 	  /* FIXME: Handling of new arguments are not implemented yet.  */
4306 	  gcc_assert (n->op != IPA_PARM_OP_NEW);
4307 	  tmp.quick_push (*n);
4308 	}
4309     }
4310 
4311   adjustments.create (outlen + removals);
4312   for (i = 0; i < outlen; i++)
4313     {
4314       struct ipa_parm_adjustment r;
4315       struct ipa_parm_adjustment *out = &outer[i];
4316       struct ipa_parm_adjustment *in = &tmp[out->base_index];
4317 
4318       memset (&r, 0, sizeof (r));
4319       gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4320       if (out->op == IPA_PARM_OP_REMOVE)
4321 	{
4322 	  if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4323 	    {
4324 	      r.op = IPA_PARM_OP_REMOVE;
4325 	      adjustments.quick_push (r);
4326 	    }
4327 	  continue;
4328 	}
4329       else
4330 	{
4331 	  /* FIXME: Handling of new arguments are not implemented yet.  */
4332 	  gcc_assert (out->op != IPA_PARM_OP_NEW);
4333 	}
4334 
4335       r.base_index = in->base_index;
4336       r.type = out->type;
4337 
4338       /* FIXME:  Create nonlocal value too.  */
4339 
4340       if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4341 	r.op = IPA_PARM_OP_COPY;
4342       else if (in->op == IPA_PARM_OP_COPY)
4343 	r.offset = out->offset;
4344       else if (out->op == IPA_PARM_OP_COPY)
4345 	r.offset = in->offset;
4346       else
4347 	r.offset = in->offset + out->offset;
4348       adjustments.quick_push (r);
4349     }
4350 
4351   for (i = 0; i < inlen; i++)
4352     {
4353       struct ipa_parm_adjustment *n = &inner[i];
4354 
4355       if (n->op == IPA_PARM_OP_REMOVE)
4356 	adjustments.quick_push (*n);
4357     }
4358 
4359   tmp.release ();
4360   return adjustments;
4361 }
4362 
4363 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4364    friendly way, assuming they are meant to be applied to FNDECL.  */
4365 
4366 void
4367 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4368 			    tree fndecl)
4369 {
4370   int i, len = adjustments.length ();
4371   bool first = true;
4372   vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4373 
4374   fprintf (file, "IPA param adjustments: ");
4375   for (i = 0; i < len; i++)
4376     {
4377       struct ipa_parm_adjustment *adj;
4378       adj = &adjustments[i];
4379 
4380       if (!first)
4381 	fprintf (file, "                 ");
4382       else
4383 	first = false;
4384 
4385       fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4386       print_generic_expr (file, parms[adj->base_index], 0);
4387       if (adj->base)
4388 	{
4389 	  fprintf (file, ", base: ");
4390 	  print_generic_expr (file, adj->base, 0);
4391 	}
4392       if (adj->new_decl)
4393 	{
4394 	  fprintf (file, ", new_decl: ");
4395 	  print_generic_expr (file, adj->new_decl, 0);
4396 	}
4397       if (adj->new_ssa_base)
4398 	{
4399 	  fprintf (file, ", new_ssa_base: ");
4400 	  print_generic_expr (file, adj->new_ssa_base, 0);
4401 	}
4402 
4403       if (adj->op == IPA_PARM_OP_COPY)
4404 	fprintf (file, ", copy_param");
4405       else if (adj->op == IPA_PARM_OP_REMOVE)
4406 	fprintf (file, ", remove_param");
4407       else
4408 	fprintf (file, ", offset %li", (long) adj->offset);
4409       if (adj->by_ref)
4410 	fprintf (file, ", by_ref");
4411       print_node_brief (file, ", type: ", adj->type, 0);
4412       fprintf (file, "\n");
4413     }
4414   parms.release ();
4415 }
4416 
4417 /* Dump the AV linked list.  */
4418 
4419 void
4420 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4421 {
4422   bool comma = false;
4423   fprintf (f, "     Aggregate replacements:");
4424   for (; av; av = av->next)
4425     {
4426       fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4427 	       av->index, av->offset);
4428       print_generic_expr (f, av->value, 0);
4429       comma = true;
4430     }
4431   fprintf (f, "\n");
4432 }
4433 
4434 /* Stream out jump function JUMP_FUNC to OB.  */
4435 
4436 static void
4437 ipa_write_jump_function (struct output_block *ob,
4438 			 struct ipa_jump_func *jump_func)
4439 {
4440   struct ipa_agg_jf_item *item;
4441   struct bitpack_d bp;
4442   int i, count;
4443 
4444   streamer_write_uhwi (ob, jump_func->type);
4445   switch (jump_func->type)
4446     {
4447     case IPA_JF_UNKNOWN:
4448       break;
4449     case IPA_JF_CONST:
4450       gcc_assert (
4451 	  EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4452       stream_write_tree (ob, jump_func->value.constant.value, true);
4453       break;
4454     case IPA_JF_PASS_THROUGH:
4455       streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4456       if (jump_func->value.pass_through.operation == NOP_EXPR)
4457 	{
4458 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4459 	  bp = bitpack_create (ob->main_stream);
4460 	  bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4461 	  streamer_write_bitpack (&bp);
4462 	}
4463       else
4464 	{
4465 	  stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4466 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4467 	}
4468       break;
4469     case IPA_JF_ANCESTOR:
4470       streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4471       streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4472       bp = bitpack_create (ob->main_stream);
4473       bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4474       streamer_write_bitpack (&bp);
4475       break;
4476     }
4477 
4478   count = vec_safe_length (jump_func->agg.items);
4479   streamer_write_uhwi (ob, count);
4480   if (count)
4481     {
4482       bp = bitpack_create (ob->main_stream);
4483       bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4484       streamer_write_bitpack (&bp);
4485     }
4486 
4487   FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4488     {
4489       streamer_write_uhwi (ob, item->offset);
4490       stream_write_tree (ob, item->value, true);
4491     }
4492 
4493   bp = bitpack_create (ob->main_stream);
4494   bp_pack_value (&bp, jump_func->alignment.known, 1);
4495   streamer_write_bitpack (&bp);
4496   if (jump_func->alignment.known)
4497     {
4498       streamer_write_uhwi (ob, jump_func->alignment.align);
4499       streamer_write_uhwi (ob, jump_func->alignment.misalign);
4500     }
4501 }
4502 
4503 /* Read in jump function JUMP_FUNC from IB.  */
4504 
4505 static void
4506 ipa_read_jump_function (struct lto_input_block *ib,
4507 			struct ipa_jump_func *jump_func,
4508 			struct cgraph_edge *cs,
4509 			struct data_in *data_in)
4510 {
4511   enum jump_func_type jftype;
4512   enum tree_code operation;
4513   int i, count;
4514 
4515   jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4516   switch (jftype)
4517     {
4518     case IPA_JF_UNKNOWN:
4519       ipa_set_jf_unknown (jump_func);
4520       break;
4521     case IPA_JF_CONST:
4522       ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4523       break;
4524     case IPA_JF_PASS_THROUGH:
4525       operation = (enum tree_code) streamer_read_uhwi (ib);
4526       if (operation == NOP_EXPR)
4527 	{
4528 	  int formal_id =  streamer_read_uhwi (ib);
4529 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4530 	  bool agg_preserved = bp_unpack_value (&bp, 1);
4531 	  ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4532 	}
4533       else
4534 	{
4535 	  tree operand = stream_read_tree (ib, data_in);
4536 	  int formal_id =  streamer_read_uhwi (ib);
4537 	  ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4538 					 operation);
4539 	}
4540       break;
4541     case IPA_JF_ANCESTOR:
4542       {
4543 	HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4544 	int formal_id = streamer_read_uhwi (ib);
4545 	struct bitpack_d bp = streamer_read_bitpack (ib);
4546 	bool agg_preserved = bp_unpack_value (&bp, 1);
4547 	ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4548 	break;
4549       }
4550     }
4551 
4552   count = streamer_read_uhwi (ib);
4553   vec_alloc (jump_func->agg.items, count);
4554   if (count)
4555     {
4556       struct bitpack_d bp = streamer_read_bitpack (ib);
4557       jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4558     }
4559   for (i = 0; i < count; i++)
4560     {
4561       struct ipa_agg_jf_item item;
4562       item.offset = streamer_read_uhwi (ib);
4563       item.value = stream_read_tree (ib, data_in);
4564       jump_func->agg.items->quick_push (item);
4565     }
4566 
4567   struct bitpack_d bp = streamer_read_bitpack (ib);
4568   bool alignment_known = bp_unpack_value (&bp, 1);
4569   if (alignment_known)
4570     {
4571       jump_func->alignment.known = true;
4572       jump_func->alignment.align = streamer_read_uhwi (ib);
4573       jump_func->alignment.misalign = streamer_read_uhwi (ib);
4574     }
4575   else
4576     jump_func->alignment.known = false;
4577 }
4578 
4579 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4580    relevant to indirect inlining to OB.  */
4581 
4582 static void
4583 ipa_write_indirect_edge_info (struct output_block *ob,
4584 			      struct cgraph_edge *cs)
4585 {
4586   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4587   struct bitpack_d bp;
4588 
4589   streamer_write_hwi (ob, ii->param_index);
4590   bp = bitpack_create (ob->main_stream);
4591   bp_pack_value (&bp, ii->polymorphic, 1);
4592   bp_pack_value (&bp, ii->agg_contents, 1);
4593   bp_pack_value (&bp, ii->member_ptr, 1);
4594   bp_pack_value (&bp, ii->by_ref, 1);
4595   bp_pack_value (&bp, ii->vptr_changed, 1);
4596   streamer_write_bitpack (&bp);
4597   if (ii->agg_contents || ii->polymorphic)
4598     streamer_write_hwi (ob, ii->offset);
4599   else
4600     gcc_assert (ii->offset == 0);
4601 
4602   if (ii->polymorphic)
4603     {
4604       streamer_write_hwi (ob, ii->otr_token);
4605       stream_write_tree (ob, ii->otr_type, true);
4606       ii->context.stream_out (ob);
4607     }
4608 }
4609 
4610 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4611    relevant to indirect inlining from IB.  */
4612 
4613 static void
4614 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4615 			     struct data_in *data_in,
4616 			     struct cgraph_edge *cs)
4617 {
4618   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4619   struct bitpack_d bp;
4620 
4621   ii->param_index = (int) streamer_read_hwi (ib);
4622   bp = streamer_read_bitpack (ib);
4623   ii->polymorphic = bp_unpack_value (&bp, 1);
4624   ii->agg_contents = bp_unpack_value (&bp, 1);
4625   ii->member_ptr = bp_unpack_value (&bp, 1);
4626   ii->by_ref = bp_unpack_value (&bp, 1);
4627   ii->vptr_changed = bp_unpack_value (&bp, 1);
4628   if (ii->agg_contents || ii->polymorphic)
4629     ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4630   else
4631     ii->offset = 0;
4632   if (ii->polymorphic)
4633     {
4634       ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4635       ii->otr_type = stream_read_tree (ib, data_in);
4636       ii->context.stream_in (ib, data_in);
4637     }
4638 }
4639 
4640 /* Stream out NODE info to OB.  */
4641 
4642 static void
4643 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4644 {
4645   int node_ref;
4646   lto_symtab_encoder_t encoder;
4647   struct ipa_node_params *info = IPA_NODE_REF (node);
4648   int j;
4649   struct cgraph_edge *e;
4650   struct bitpack_d bp;
4651 
4652   encoder = ob->decl_state->symtab_node_encoder;
4653   node_ref = lto_symtab_encoder_encode (encoder, node);
4654   streamer_write_uhwi (ob, node_ref);
4655 
4656   streamer_write_uhwi (ob, ipa_get_param_count (info));
4657   for (j = 0; j < ipa_get_param_count (info); j++)
4658     streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4659   bp = bitpack_create (ob->main_stream);
4660   gcc_assert (info->analysis_done
4661 	      || ipa_get_param_count (info) == 0);
4662   gcc_assert (!info->node_enqueued);
4663   gcc_assert (!info->ipcp_orig_node);
4664   for (j = 0; j < ipa_get_param_count (info); j++)
4665     bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4666   streamer_write_bitpack (&bp);
4667   for (j = 0; j < ipa_get_param_count (info); j++)
4668     streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4669   for (e = node->callees; e; e = e->next_callee)
4670     {
4671       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4672 
4673       streamer_write_uhwi (ob,
4674 			   ipa_get_cs_argument_count (args) * 2
4675 			   + (args->polymorphic_call_contexts != NULL));
4676       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4677 	{
4678 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4679 	  if (args->polymorphic_call_contexts != NULL)
4680 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4681 	}
4682     }
4683   for (e = node->indirect_calls; e; e = e->next_callee)
4684     {
4685       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4686 
4687       streamer_write_uhwi (ob,
4688 			   ipa_get_cs_argument_count (args) * 2
4689   			   + (args->polymorphic_call_contexts != NULL));
4690       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4691 	{
4692 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4693 	  if (args->polymorphic_call_contexts != NULL)
4694 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4695 	}
4696       ipa_write_indirect_edge_info (ob, e);
4697     }
4698 }
4699 
4700 /* Stream in NODE info from IB.  */
4701 
4702 static void
4703 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4704 		    struct data_in *data_in)
4705 {
4706   struct ipa_node_params *info = IPA_NODE_REF (node);
4707   int k;
4708   struct cgraph_edge *e;
4709   struct bitpack_d bp;
4710 
4711   ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4712 
4713   for (k = 0; k < ipa_get_param_count (info); k++)
4714     info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4715 
4716   bp = streamer_read_bitpack (ib);
4717   if (ipa_get_param_count (info) != 0)
4718     info->analysis_done = true;
4719   info->node_enqueued = false;
4720   for (k = 0; k < ipa_get_param_count (info); k++)
4721     ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4722   for (k = 0; k < ipa_get_param_count (info); k++)
4723     ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4724   for (e = node->callees; e; e = e->next_callee)
4725     {
4726       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4727       int count = streamer_read_uhwi (ib);
4728       bool contexts_computed = count & 1;
4729       count /= 2;
4730 
4731       if (!count)
4732 	continue;
4733       vec_safe_grow_cleared (args->jump_functions, count);
4734       if (contexts_computed)
4735 	vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4736 
4737       for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4738 	{
4739 	  ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4740 				  data_in);
4741 	  if (contexts_computed)
4742 	    ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4743 	}
4744     }
4745   for (e = node->indirect_calls; e; e = e->next_callee)
4746     {
4747       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4748       int count = streamer_read_uhwi (ib);
4749       bool contexts_computed = count & 1;
4750       count /= 2;
4751 
4752       if (count)
4753 	{
4754 	  vec_safe_grow_cleared (args->jump_functions, count);
4755 	  if (contexts_computed)
4756 	    vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4757           for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4758 	    {
4759 	      ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4760 				      data_in);
4761 	      if (contexts_computed)
4762 		ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4763 	    }
4764 	}
4765       ipa_read_indirect_edge_info (ib, data_in, e);
4766     }
4767 }
4768 
4769 /* Write jump functions for nodes in SET.  */
4770 
4771 void
4772 ipa_prop_write_jump_functions (void)
4773 {
4774   struct cgraph_node *node;
4775   struct output_block *ob;
4776   unsigned int count = 0;
4777   lto_symtab_encoder_iterator lsei;
4778   lto_symtab_encoder_t encoder;
4779 
4780   if (!ipa_node_params_sum || !ipa_edge_args_vector)
4781     return;
4782 
4783   ob = create_output_block (LTO_section_jump_functions);
4784   encoder = ob->decl_state->symtab_node_encoder;
4785   ob->symbol = NULL;
4786   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4787        lsei_next_function_in_partition (&lsei))
4788     {
4789       node = lsei_cgraph_node (lsei);
4790       if (node->has_gimple_body_p ()
4791 	  && IPA_NODE_REF (node) != NULL)
4792 	count++;
4793     }
4794 
4795   streamer_write_uhwi (ob, count);
4796 
4797   /* Process all of the functions.  */
4798   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4799        lsei_next_function_in_partition (&lsei))
4800     {
4801       node = lsei_cgraph_node (lsei);
4802       if (node->has_gimple_body_p ()
4803 	  && IPA_NODE_REF (node) != NULL)
4804         ipa_write_node_info (ob, node);
4805     }
4806   streamer_write_char_stream (ob->main_stream, 0);
4807   produce_asm (ob, NULL);
4808   destroy_output_block (ob);
4809 }
4810 
4811 /* Read section in file FILE_DATA of length LEN with data DATA.  */
4812 
4813 static void
4814 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4815 		       size_t len)
4816 {
4817   const struct lto_function_header *header =
4818     (const struct lto_function_header *) data;
4819   const int cfg_offset = sizeof (struct lto_function_header);
4820   const int main_offset = cfg_offset + header->cfg_size;
4821   const int string_offset = main_offset + header->main_size;
4822   struct data_in *data_in;
4823   unsigned int i;
4824   unsigned int count;
4825 
4826   lto_input_block ib_main ((const char *) data + main_offset,
4827 			   header->main_size, file_data->mode_table);
4828 
4829   data_in =
4830     lto_data_in_create (file_data, (const char *) data + string_offset,
4831 			header->string_size, vNULL);
4832   count = streamer_read_uhwi (&ib_main);
4833 
4834   for (i = 0; i < count; i++)
4835     {
4836       unsigned int index;
4837       struct cgraph_node *node;
4838       lto_symtab_encoder_t encoder;
4839 
4840       index = streamer_read_uhwi (&ib_main);
4841       encoder = file_data->symtab_node_encoder;
4842       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4843 								index));
4844       gcc_assert (node->definition);
4845       ipa_read_node_info (&ib_main, node, data_in);
4846     }
4847   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4848 			 len);
4849   lto_data_in_delete (data_in);
4850 }
4851 
4852 /* Read ipcp jump functions.  */
4853 
4854 void
4855 ipa_prop_read_jump_functions (void)
4856 {
4857   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4858   struct lto_file_decl_data *file_data;
4859   unsigned int j = 0;
4860 
4861   ipa_check_create_node_params ();
4862   ipa_check_create_edge_args ();
4863   ipa_register_cgraph_hooks ();
4864 
4865   while ((file_data = file_data_vec[j++]))
4866     {
4867       size_t len;
4868       const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4869 
4870       if (data)
4871         ipa_prop_read_section (file_data, data, len);
4872     }
4873 }
4874 
4875 /* After merging units, we can get mismatch in argument counts.
4876    Also decl merging might've rendered parameter lists obsolete.
4877    Also compute called_with_variable_arg info.  */
4878 
4879 void
4880 ipa_update_after_lto_read (void)
4881 {
4882   ipa_check_create_node_params ();
4883   ipa_check_create_edge_args ();
4884 }
4885 
4886 void
4887 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4888 {
4889   int node_ref;
4890   unsigned int count = 0;
4891   lto_symtab_encoder_t encoder;
4892   struct ipa_agg_replacement_value *aggvals, *av;
4893 
4894   aggvals = ipa_get_agg_replacements_for_node (node);
4895   encoder = ob->decl_state->symtab_node_encoder;
4896   node_ref = lto_symtab_encoder_encode (encoder, node);
4897   streamer_write_uhwi (ob, node_ref);
4898 
4899   for (av = aggvals; av; av = av->next)
4900     count++;
4901   streamer_write_uhwi (ob, count);
4902 
4903   for (av = aggvals; av; av = av->next)
4904     {
4905       struct bitpack_d bp;
4906 
4907       streamer_write_uhwi (ob, av->offset);
4908       streamer_write_uhwi (ob, av->index);
4909       stream_write_tree (ob, av->value, true);
4910 
4911       bp = bitpack_create (ob->main_stream);
4912       bp_pack_value (&bp, av->by_ref, 1);
4913       streamer_write_bitpack (&bp);
4914     }
4915 
4916   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4917   if (ts && vec_safe_length (ts->alignments) > 0)
4918     {
4919       count = ts->alignments->length ();
4920 
4921       streamer_write_uhwi (ob, count);
4922       for (unsigned i = 0; i < count; ++i)
4923 	{
4924 	  ipa_alignment *parm_al = &(*ts->alignments)[i];
4925 
4926 	  struct bitpack_d bp;
4927 	  bp = bitpack_create (ob->main_stream);
4928 	  bp_pack_value (&bp, parm_al->known, 1);
4929 	  streamer_write_bitpack (&bp);
4930 	  if (parm_al->known)
4931 	    {
4932 	      streamer_write_uhwi (ob, parm_al->align);
4933 	      streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4934 					   parm_al->misalign);
4935 	    }
4936 	}
4937     }
4938   else
4939     streamer_write_uhwi (ob, 0);
4940 }
4941 
4942 /* Stream in the aggregate value replacement chain for NODE from IB.  */
4943 
4944 static void
4945 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4946 			       data_in *data_in)
4947 {
4948   struct ipa_agg_replacement_value *aggvals = NULL;
4949   unsigned int count, i;
4950 
4951   count = streamer_read_uhwi (ib);
4952   for (i = 0; i <count; i++)
4953     {
4954       struct ipa_agg_replacement_value *av;
4955       struct bitpack_d bp;
4956 
4957       av = ggc_alloc<ipa_agg_replacement_value> ();
4958       av->offset = streamer_read_uhwi (ib);
4959       av->index = streamer_read_uhwi (ib);
4960       av->value = stream_read_tree (ib, data_in);
4961       bp = streamer_read_bitpack (ib);
4962       av->by_ref = bp_unpack_value (&bp, 1);
4963       av->next = aggvals;
4964       aggvals = av;
4965     }
4966   ipa_set_node_agg_value_chain (node, aggvals);
4967 
4968   count = streamer_read_uhwi (ib);
4969   if (count > 0)
4970     {
4971       ipcp_grow_transformations_if_necessary ();
4972 
4973       ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4974       vec_safe_grow_cleared (ts->alignments, count);
4975 
4976       for (i = 0; i < count; i++)
4977 	{
4978 	  ipa_alignment *parm_al;
4979 	  parm_al = &(*ts->alignments)[i];
4980 	  struct bitpack_d bp;
4981 	  bp = streamer_read_bitpack (ib);
4982 	  parm_al->known = bp_unpack_value (&bp, 1);
4983 	  if (parm_al->known)
4984 	    {
4985 	      parm_al->align = streamer_read_uhwi (ib);
4986 	      parm_al->misalign
4987 		= streamer_read_hwi_in_range (ib, "ipa-prop misalign",
4988 					      0, parm_al->align);
4989 	    }
4990 	}
4991     }
4992 }
4993 
4994 /* Write all aggregate replacement for nodes in set.  */
4995 
4996 void
4997 ipcp_write_transformation_summaries (void)
4998 {
4999   struct cgraph_node *node;
5000   struct output_block *ob;
5001   unsigned int count = 0;
5002   lto_symtab_encoder_iterator lsei;
5003   lto_symtab_encoder_t encoder;
5004 
5005   ob = create_output_block (LTO_section_ipcp_transform);
5006   encoder = ob->decl_state->symtab_node_encoder;
5007   ob->symbol = NULL;
5008   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5009        lsei_next_function_in_partition (&lsei))
5010     {
5011       node = lsei_cgraph_node (lsei);
5012       if (node->has_gimple_body_p ())
5013 	count++;
5014     }
5015 
5016   streamer_write_uhwi (ob, count);
5017 
5018   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5019        lsei_next_function_in_partition (&lsei))
5020     {
5021       node = lsei_cgraph_node (lsei);
5022       if (node->has_gimple_body_p ())
5023 	write_ipcp_transformation_info (ob, node);
5024     }
5025   streamer_write_char_stream (ob->main_stream, 0);
5026   produce_asm (ob, NULL);
5027   destroy_output_block (ob);
5028 }
5029 
5030 /* Read replacements section in file FILE_DATA of length LEN with data
5031    DATA.  */
5032 
5033 static void
5034 read_replacements_section (struct lto_file_decl_data *file_data,
5035 			   const char *data,
5036 			   size_t len)
5037 {
5038   const struct lto_function_header *header =
5039     (const struct lto_function_header *) data;
5040   const int cfg_offset = sizeof (struct lto_function_header);
5041   const int main_offset = cfg_offset + header->cfg_size;
5042   const int string_offset = main_offset + header->main_size;
5043   struct data_in *data_in;
5044   unsigned int i;
5045   unsigned int count;
5046 
5047   lto_input_block ib_main ((const char *) data + main_offset,
5048 			   header->main_size, file_data->mode_table);
5049 
5050   data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5051 				header->string_size, vNULL);
5052   count = streamer_read_uhwi (&ib_main);
5053 
5054   for (i = 0; i < count; i++)
5055     {
5056       unsigned int index;
5057       struct cgraph_node *node;
5058       lto_symtab_encoder_t encoder;
5059 
5060       index = streamer_read_uhwi (&ib_main);
5061       encoder = file_data->symtab_node_encoder;
5062       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5063 								index));
5064       gcc_assert (node->definition);
5065       read_ipcp_transformation_info (&ib_main, node, data_in);
5066     }
5067   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5068 			 len);
5069   lto_data_in_delete (data_in);
5070 }
5071 
5072 /* Read IPA-CP aggregate replacements.  */
5073 
5074 void
5075 ipcp_read_transformation_summaries (void)
5076 {
5077   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5078   struct lto_file_decl_data *file_data;
5079   unsigned int j = 0;
5080 
5081   while ((file_data = file_data_vec[j++]))
5082     {
5083       size_t len;
5084       const char *data = lto_get_section_data (file_data,
5085 					       LTO_section_ipcp_transform,
5086 					       NULL, &len);
5087       if (data)
5088         read_replacements_section (file_data, data, len);
5089     }
5090 }
5091 
5092 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5093    NODE.  */
5094 
5095 static void
5096 adjust_agg_replacement_values (struct cgraph_node *node,
5097 			       struct ipa_agg_replacement_value *aggval)
5098 {
5099   struct ipa_agg_replacement_value *v;
5100   int i, c = 0, d = 0, *adj;
5101 
5102   if (!node->clone.combined_args_to_skip)
5103     return;
5104 
5105   for (v = aggval; v; v = v->next)
5106     {
5107       gcc_assert (v->index >= 0);
5108       if (c < v->index)
5109 	c = v->index;
5110     }
5111   c++;
5112 
5113   adj = XALLOCAVEC (int, c);
5114   for (i = 0; i < c; i++)
5115     if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5116       {
5117 	adj[i] = -1;
5118 	d++;
5119       }
5120     else
5121       adj[i] = i - d;
5122 
5123   for (v = aggval; v; v = v->next)
5124     v->index = adj[v->index];
5125 }
5126 
5127 /* Dominator walker driving the ipcp modification phase.  */
5128 
5129 class ipcp_modif_dom_walker : public dom_walker
5130 {
5131 public:
5132   ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5133 			 vec<ipa_param_descriptor> descs,
5134 			 struct ipa_agg_replacement_value *av,
5135 			 bool *sc, bool *cc)
5136     : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5137       m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5138 
5139   virtual void before_dom_children (basic_block);
5140 
5141 private:
5142   struct ipa_func_body_info *m_fbi;
5143   vec<ipa_param_descriptor> m_descriptors;
5144   struct ipa_agg_replacement_value *m_aggval;
5145   bool *m_something_changed, *m_cfg_changed;
5146 };
5147 
5148 void
5149 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5150 {
5151   gimple_stmt_iterator gsi;
5152   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5153     {
5154       struct ipa_agg_replacement_value *v;
5155       gimple stmt = gsi_stmt (gsi);
5156       tree rhs, val, t;
5157       HOST_WIDE_INT offset, size;
5158       int index;
5159       bool by_ref, vce;
5160 
5161       if (!gimple_assign_load_p (stmt))
5162 	continue;
5163       rhs = gimple_assign_rhs1 (stmt);
5164       if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5165 	continue;
5166 
5167       vce = false;
5168       t = rhs;
5169       while (handled_component_p (t))
5170 	{
5171 	  /* V_C_E can do things like convert an array of integers to one
5172 	     bigger integer and similar things we do not handle below.  */
5173 	  if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5174 	    {
5175 	      vce = true;
5176 	      break;
5177 	    }
5178 	  t = TREE_OPERAND (t, 0);
5179 	}
5180       if (vce)
5181 	continue;
5182 
5183       if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5184 				   &offset, &size, &by_ref))
5185 	continue;
5186       for (v = m_aggval; v; v = v->next)
5187 	if (v->index == index
5188 	    && v->offset == offset)
5189 	  break;
5190       if (!v
5191 	  || v->by_ref != by_ref
5192 	  || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5193 	continue;
5194 
5195       gcc_checking_assert (is_gimple_ip_invariant (v->value));
5196       if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5197 	{
5198 	  if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5199 	    val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5200 	  else if (TYPE_SIZE (TREE_TYPE (rhs))
5201 		   == TYPE_SIZE (TREE_TYPE (v->value)))
5202 	    val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5203 	  else
5204 	    {
5205 	      if (dump_file)
5206 		{
5207 		  fprintf (dump_file, "    const ");
5208 		  print_generic_expr (dump_file, v->value, 0);
5209 		  fprintf (dump_file, "  can't be converted to type of ");
5210 		  print_generic_expr (dump_file, rhs, 0);
5211 		  fprintf (dump_file, "\n");
5212 		}
5213 	      continue;
5214 	    }
5215 	}
5216       else
5217 	val = v->value;
5218 
5219       if (dump_file && (dump_flags & TDF_DETAILS))
5220 	{
5221 	  fprintf (dump_file, "Modifying stmt:\n  ");
5222 	  print_gimple_stmt (dump_file, stmt, 0, 0);
5223 	}
5224       gimple_assign_set_rhs_from_tree (&gsi, val);
5225       update_stmt (stmt);
5226 
5227       if (dump_file && (dump_flags & TDF_DETAILS))
5228 	{
5229 	  fprintf (dump_file, "into:\n  ");
5230 	  print_gimple_stmt (dump_file, stmt, 0, 0);
5231 	  fprintf (dump_file, "\n");
5232 	}
5233 
5234       *m_something_changed = true;
5235       if (maybe_clean_eh_stmt (stmt)
5236 	  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5237 	*m_cfg_changed = true;
5238     }
5239 
5240 }
5241 
5242 /* Update alignment of formal parameters as described in
5243    ipcp_transformation_summary.  */
5244 
5245 static void
5246 ipcp_update_alignments (struct cgraph_node *node)
5247 {
5248   tree fndecl = node->decl;
5249   tree parm = DECL_ARGUMENTS (fndecl);
5250   tree next_parm = parm;
5251   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5252   if (!ts || vec_safe_length (ts->alignments) == 0)
5253     return;
5254   const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5255   unsigned count = alignments.length ();
5256 
5257   for (unsigned i = 0; i < count; ++i, parm = next_parm)
5258     {
5259       if (node->clone.combined_args_to_skip
5260 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5261 	continue;
5262       gcc_checking_assert (parm);
5263       next_parm = DECL_CHAIN (parm);
5264 
5265       if (!alignments[i].known || !is_gimple_reg (parm))
5266 	continue;
5267       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5268       if (!ddef)
5269 	continue;
5270 
5271       if (dump_file)
5272 	fprintf (dump_file, "  Adjusting alignment of param %u to %u, "
5273 		 "misalignment to %u\n", i, alignments[i].align,
5274 		 alignments[i].misalign);
5275 
5276       struct ptr_info_def *pi = get_ptr_info (ddef);
5277       gcc_checking_assert (pi);
5278       unsigned old_align;
5279       unsigned old_misalign;
5280       bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5281 
5282       if (old_known
5283 	  && old_align >= alignments[i].align)
5284 	{
5285 	  if (dump_file)
5286 	    fprintf (dump_file, "    But the alignment was already %u.\n",
5287 		     old_align);
5288 	  continue;
5289 	}
5290       set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5291     }
5292 }
5293 
5294 /* IPCP transformation phase doing propagation of aggregate values.  */
5295 
5296 unsigned int
5297 ipcp_transform_function (struct cgraph_node *node)
5298 {
5299   vec<ipa_param_descriptor> descriptors = vNULL;
5300   struct ipa_func_body_info fbi;
5301   struct ipa_agg_replacement_value *aggval;
5302   int param_count;
5303   bool cfg_changed = false, something_changed = false;
5304 
5305   gcc_checking_assert (cfun);
5306   gcc_checking_assert (current_function_decl);
5307 
5308   if (dump_file)
5309     fprintf (dump_file, "Modification phase of node %s/%i\n",
5310 	     node->name (), node->order);
5311 
5312   ipcp_update_alignments (node);
5313   aggval = ipa_get_agg_replacements_for_node (node);
5314   if (!aggval)
5315       return 0;
5316   param_count = count_formal_params (node->decl);
5317   if (param_count == 0)
5318     return 0;
5319   adjust_agg_replacement_values (node, aggval);
5320   if (dump_file)
5321     ipa_dump_agg_replacement_values (dump_file, aggval);
5322 
5323   fbi.node = node;
5324   fbi.info = NULL;
5325   fbi.bb_infos = vNULL;
5326   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5327   fbi.param_count = param_count;
5328   fbi.aa_walked = 0;
5329 
5330   descriptors.safe_grow_cleared (param_count);
5331   ipa_populate_param_decls (node, descriptors);
5332   calculate_dominance_info (CDI_DOMINATORS);
5333   ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5334 			 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5335 
5336   int i;
5337   struct ipa_bb_info *bi;
5338   FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5339     free_ipa_bb_info (bi);
5340   fbi.bb_infos.release ();
5341   free_dominance_info (CDI_DOMINATORS);
5342   (*ipcp_transformations)[node->uid].agg_values = NULL;
5343   (*ipcp_transformations)[node->uid].alignments = NULL;
5344   descriptors.release ();
5345 
5346   if (!something_changed)
5347     return 0;
5348   else if (cfg_changed)
5349     return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5350   else
5351     return TODO_update_ssa_only_virtuals;
5352 }
5353