xref: /dflybsd-src/contrib/gcc-4.7/gcc/tree-inline.c (revision 5ce9237cb80ae957ad31704a6f6ce5b9ae2dac53)
1 /* Tree inlining.
2    Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
3    2012 Free Software Foundation, Inc.
4    Contributed by Alexandre Oliva <aoliva@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "diagnostic-core.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
52 
53 #include "rtl.h"	/* FIXME: For asm_str_count.  */
54 
55 /* I'm not real happy about this, but we need to handle gimple and
56    non-gimple trees.  */
57 #include "gimple.h"
58 
59 /* Inlining, Cloning, Versioning, Parallelization
60 
61    Inlining: a function body is duplicated, but the PARM_DECLs are
62    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63    MODIFY_EXPRs that store to a dedicated returned-value variable.
64    The duplicated eh_region info of the copy will later be appended
65    to the info for the caller; the eh_region info in copied throwing
66    statements and RESX statements are adjusted accordingly.
67 
68    Cloning: (only in C++) We have one body for a con/de/structor, and
69    multiple function decls, each with a unique parameter list.
70    Duplicate the body, using the given splay tree; some parameters
71    will become constants (like 0 or 1).
72 
73    Versioning: a function body is duplicated and the result is a new
74    function rather than into blocks of an existing function as with
75    inlining.  Some parameters will become constants.
76 
77    Parallelization: a region of a function is duplicated resulting in
78    a new function.  Variables may be replaced with complex expressions
79    to enable shared variable semantics.
80 
81    All of these will simultaneously lookup any callgraph edges.  If
82    we're going to inline the duplicated function body, and the given
83    function has some cloned callgraph nodes (one for each place this
84    function will be inlined) those callgraph edges will be duplicated.
85    If we're cloning the body, those callgraph edges will be
86    updated to point into the new body.  (Note that the original
87    callgraph node and edge list will not be altered.)
88 
89    See the CALL_EXPR handling case in copy_tree_body_r ().  */
90 
91 /* To Do:
92 
93    o In order to make inlining-on-trees work, we pessimized
94      function-local static constants.  In particular, they are now
95      always output, even when not addressed.  Fix this by treating
96      function-local static constants just like global static
97      constants; the back-end already knows not to output them if they
98      are not needed.
99 
100    o Provide heuristics to clamp inlining of recursive template
101      calls?  */
102 
103 
104 /* Weights that estimate_num_insns uses to estimate the size of the
105    produced code.  */
106 
107 eni_weights eni_size_weights;
108 
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110    to execute the produced code.  */
111 
112 eni_weights eni_time_weights;
113 
114 /* Prototypes.  */
115 
116 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130 
131 /* Insert a tree->tree mapping for ID.  Despite the name suggests
132    that the trees should be variables, it is used for more than that.  */
133 
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
136 {
137   *pointer_map_insert (id->decl_map, key) = value;
138 
139   /* Always insert an identity map as well.  If we see this same new
140      node again, we won't want to duplicate it a second time.  */
141   if (key != value)
142     *pointer_map_insert (id->decl_map, value) = value;
143 }
144 
145 /* Insert a tree->tree mapping for ID.  This is only used for
146    variables.  */
147 
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150 {
151   if (!gimple_in_ssa_p (id->src_cfun))
152     return;
153 
154   if (!MAY_HAVE_DEBUG_STMTS)
155     return;
156 
157   if (!target_for_debug_bind (key))
158     return;
159 
160   gcc_assert (TREE_CODE (key) == PARM_DECL);
161   gcc_assert (TREE_CODE (value) == VAR_DECL);
162 
163   if (!id->debug_map)
164     id->debug_map = pointer_map_create ();
165 
166   *pointer_map_insert (id->debug_map, key) = value;
167 }
168 
169 /* If nonzero, we're remapping the contents of inlined debug
170    statements.  If negative, an error has occurred, such as a
171    reference to a variable that isn't available in the inlined
172    context.  */
173 static int processing_debug_stmt = 0;
174 
175 /* Construct new SSA name for old NAME. ID is the inline context.  */
176 
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
179 {
180   tree new_tree;
181   tree *n;
182 
183   gcc_assert (TREE_CODE (name) == SSA_NAME);
184 
185   n = (tree *) pointer_map_contains (id->decl_map, name);
186   if (n)
187     return unshare_expr (*n);
188 
189   if (processing_debug_stmt)
190     {
191       if (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
192 	  && SSA_NAME_IS_DEFAULT_DEF (name)
193 	  && id->entry_bb == NULL
194 	  && single_succ_p (ENTRY_BLOCK_PTR))
195 	{
196 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
197 	  gimple def_temp;
198 	  gimple_stmt_iterator gsi;
199 	  tree val = SSA_NAME_VAR (name);
200 
201 	  n = (tree *) pointer_map_contains (id->decl_map, val);
202 	  if (n != NULL)
203 	    val = *n;
204 	  if (TREE_CODE (val) != PARM_DECL)
205 	    {
206 	      processing_debug_stmt = -1;
207 	      return name;
208 	    }
209 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210 	  DECL_ARTIFICIAL (vexpr) = 1;
211 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
212 	  DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215 	  return vexpr;
216 	}
217 
218       processing_debug_stmt = -1;
219       return name;
220     }
221 
222   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
223      in copy_bb.  */
224   new_tree = remap_decl (SSA_NAME_VAR (name), id);
225 
226   /* We might've substituted constant or another SSA_NAME for
227      the variable.
228 
229      Replace the SSA name representing RESULT_DECL by variable during
230      inlining:  this saves us from need to introduce PHI node in a case
231      return value is just partly initialized.  */
232   if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
233       && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
234 	  || !id->transform_return_to_modify))
235     {
236       struct ptr_info_def *pi;
237       new_tree = make_ssa_name (new_tree, NULL);
238       insert_decl_map (id, name, new_tree);
239       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
240 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
241       TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
242       /* At least IPA points-to info can be directly transferred.  */
243       if (id->src_cfun->gimple_df
244 	  && id->src_cfun->gimple_df->ipa_pta
245 	  && (pi = SSA_NAME_PTR_INFO (name))
246 	  && !pi->pt.anything)
247 	{
248 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
249 	  new_pi->pt = pi->pt;
250 	}
251       if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
252 	{
253 	  /* By inlining function having uninitialized variable, we might
254 	     extend the lifetime (variable might get reused).  This cause
255 	     ICE in the case we end up extending lifetime of SSA name across
256 	     abnormal edge, but also increase register pressure.
257 
258 	     We simply initialize all uninitialized vars by 0 except
259 	     for case we are inlining to very first BB.  We can avoid
260 	     this for all BBs that are not inside strongly connected
261 	     regions of the CFG, but this is expensive to test.  */
262 	  if (id->entry_bb
263 	      && is_gimple_reg (SSA_NAME_VAR (name))
264 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
265 	      && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
266 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
267 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
268 	    {
269 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
270 	      gimple init_stmt;
271 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
272 
273 	      init_stmt = gimple_build_assign (new_tree, zero);
274 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
275 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
276 	    }
277 	  else
278 	    {
279 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
280 	      if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
281 		  == name)
282 	        set_default_def (SSA_NAME_VAR (new_tree), new_tree);
283 	    }
284 	}
285     }
286   else
287     insert_decl_map (id, name, new_tree);
288   return new_tree;
289 }
290 
291 /* Remap DECL during the copying of the BLOCK tree for the function.  */
292 
293 tree
294 remap_decl (tree decl, copy_body_data *id)
295 {
296   tree *n;
297 
298   /* We only remap local variables in the current function.  */
299 
300   /* See if we have remapped this declaration.  */
301 
302   n = (tree *) pointer_map_contains (id->decl_map, decl);
303 
304   if (!n && processing_debug_stmt)
305     {
306       processing_debug_stmt = -1;
307       return decl;
308     }
309 
310   /* If we didn't already have an equivalent for this declaration,
311      create one now.  */
312   if (!n)
313     {
314       /* Make a copy of the variable or label.  */
315       tree t = id->copy_decl (decl, id);
316 
317       /* Remember it, so that if we encounter this local entity again
318 	 we can reuse this copy.  Do this early because remap_type may
319 	 need this decl for TYPE_STUB_DECL.  */
320       insert_decl_map (id, decl, t);
321 
322       if (!DECL_P (t))
323 	return t;
324 
325       /* Remap types, if necessary.  */
326       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
327       if (TREE_CODE (t) == TYPE_DECL)
328         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
329 
330       /* Remap sizes as necessary.  */
331       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
332       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
333 
334       /* If fields, do likewise for offset and qualifier.  */
335       if (TREE_CODE (t) == FIELD_DECL)
336 	{
337 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
338 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
339 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
340 	}
341 
342       if ((TREE_CODE (t) == VAR_DECL
343 	   || TREE_CODE (t) == RESULT_DECL
344 	   || TREE_CODE (t) == PARM_DECL)
345 	  && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
346 	  && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
347 	  /* We don't want to mark as referenced VAR_DECLs that were
348 	     not marked as such in the src function.  */
349 	  && (TREE_CODE (decl) != VAR_DECL
350 	      || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
351 					DECL_UID (decl))))
352 	add_referenced_var (t);
353       return t;
354     }
355 
356   if (id->do_not_unshare)
357     return *n;
358   else
359     return unshare_expr (*n);
360 }
361 
362 static tree
363 remap_type_1 (tree type, copy_body_data *id)
364 {
365   tree new_tree, t;
366 
367   /* We do need a copy.  build and register it now.  If this is a pointer or
368      reference type, remap the designated type and make a new pointer or
369      reference type.  */
370   if (TREE_CODE (type) == POINTER_TYPE)
371     {
372       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
373 					 TYPE_MODE (type),
374 					 TYPE_REF_CAN_ALIAS_ALL (type));
375       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
376 	new_tree = build_type_attribute_qual_variant (new_tree,
377 						      TYPE_ATTRIBUTES (type),
378 						      TYPE_QUALS (type));
379       insert_decl_map (id, type, new_tree);
380       return new_tree;
381     }
382   else if (TREE_CODE (type) == REFERENCE_TYPE)
383     {
384       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
385 					    TYPE_MODE (type),
386 					    TYPE_REF_CAN_ALIAS_ALL (type));
387       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
388 	new_tree = build_type_attribute_qual_variant (new_tree,
389 						      TYPE_ATTRIBUTES (type),
390 						      TYPE_QUALS (type));
391       insert_decl_map (id, type, new_tree);
392       return new_tree;
393     }
394   else
395     new_tree = copy_node (type);
396 
397   insert_decl_map (id, type, new_tree);
398 
399   /* This is a new type, not a copy of an old type.  Need to reassociate
400      variants.  We can handle everything except the main variant lazily.  */
401   t = TYPE_MAIN_VARIANT (type);
402   if (type != t)
403     {
404       t = remap_type (t, id);
405       TYPE_MAIN_VARIANT (new_tree) = t;
406       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
407       TYPE_NEXT_VARIANT (t) = new_tree;
408     }
409   else
410     {
411       TYPE_MAIN_VARIANT (new_tree) = new_tree;
412       TYPE_NEXT_VARIANT (new_tree) = NULL;
413     }
414 
415   if (TYPE_STUB_DECL (type))
416     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
417 
418   /* Lazily create pointer and reference types.  */
419   TYPE_POINTER_TO (new_tree) = NULL;
420   TYPE_REFERENCE_TO (new_tree) = NULL;
421 
422   switch (TREE_CODE (new_tree))
423     {
424     case INTEGER_TYPE:
425     case REAL_TYPE:
426     case FIXED_POINT_TYPE:
427     case ENUMERAL_TYPE:
428     case BOOLEAN_TYPE:
429       t = TYPE_MIN_VALUE (new_tree);
430       if (t && TREE_CODE (t) != INTEGER_CST)
431         walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
432 
433       t = TYPE_MAX_VALUE (new_tree);
434       if (t && TREE_CODE (t) != INTEGER_CST)
435         walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
436       return new_tree;
437 
438     case FUNCTION_TYPE:
439       TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
440       walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
441       return new_tree;
442 
443     case ARRAY_TYPE:
444       TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
445       TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
446       break;
447 
448     case RECORD_TYPE:
449     case UNION_TYPE:
450     case QUAL_UNION_TYPE:
451       {
452 	tree f, nf = NULL;
453 
454 	for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
455 	  {
456 	    t = remap_decl (f, id);
457 	    DECL_CONTEXT (t) = new_tree;
458 	    DECL_CHAIN (t) = nf;
459 	    nf = t;
460 	  }
461 	TYPE_FIELDS (new_tree) = nreverse (nf);
462       }
463       break;
464 
465     case OFFSET_TYPE:
466     default:
467       /* Shouldn't have been thought variable sized.  */
468       gcc_unreachable ();
469     }
470 
471   walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
472   walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
473 
474   return new_tree;
475 }
476 
477 tree
478 remap_type (tree type, copy_body_data *id)
479 {
480   tree *node;
481   tree tmp;
482 
483   if (type == NULL)
484     return type;
485 
486   /* See if we have remapped this type.  */
487   node = (tree *) pointer_map_contains (id->decl_map, type);
488   if (node)
489     return *node;
490 
491   /* The type only needs remapping if it's variably modified.  */
492   if (! variably_modified_type_p (type, id->src_fn))
493     {
494       insert_decl_map (id, type, type);
495       return type;
496     }
497 
498   id->remapping_type_depth++;
499   tmp = remap_type_1 (type, id);
500   id->remapping_type_depth--;
501 
502   return tmp;
503 }
504 
505 /* Return previously remapped type of TYPE in ID.  Return NULL if TYPE
506    is NULL or TYPE has not been remapped before.  */
507 
508 static tree
509 remapped_type (tree type, copy_body_data *id)
510 {
511   tree *node;
512 
513   if (type == NULL)
514     return type;
515 
516   /* See if we have remapped this type.  */
517   node = (tree *) pointer_map_contains (id->decl_map, type);
518   if (node)
519     return *node;
520   else
521     return NULL;
522 }
523 
524   /* The type only needs remapping if it's variably modified.  */
525 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
526 
527 static bool
528 can_be_nonlocal (tree decl, copy_body_data *id)
529 {
530   /* We can not duplicate function decls.  */
531   if (TREE_CODE (decl) == FUNCTION_DECL)
532     return true;
533 
534   /* Local static vars must be non-local or we get multiple declaration
535      problems.  */
536   if (TREE_CODE (decl) == VAR_DECL
537       && !auto_var_in_fn_p (decl, id->src_fn))
538     return true;
539 
540   /* At the moment dwarf2out can handle only these types of nodes.  We
541      can support more later.  */
542   if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
543     return false;
544 
545   /* We must use global type.  We call remapped_type instead of
546      remap_type since we don't want to remap this type here if it
547      hasn't been remapped before.  */
548   if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
549     return false;
550 
551   /* Wihtout SSA we can't tell if variable is used.  */
552   if (!gimple_in_ssa_p (cfun))
553     return false;
554 
555   /* Live variables must be copied so we can attach DECL_RTL.  */
556   if (var_ann (decl))
557     return false;
558 
559   return true;
560 }
561 
562 static tree
563 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
564 {
565   tree old_var;
566   tree new_decls = NULL_TREE;
567 
568   /* Remap its variables.  */
569   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
570     {
571       tree new_var;
572 
573       if (can_be_nonlocal (old_var, id))
574 	{
575 	  if (TREE_CODE (old_var) == VAR_DECL
576 	      && ! DECL_EXTERNAL (old_var)
577 	      && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
578 	    add_local_decl (cfun, old_var);
579 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
580 	      && !DECL_IGNORED_P (old_var)
581 	      && nonlocalized_list)
582 	    VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
583 	  continue;
584 	}
585 
586       /* Remap the variable.  */
587       new_var = remap_decl (old_var, id);
588 
589       /* If we didn't remap this variable, we can't mess with its
590 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
591 	 already declared somewhere else, so don't declare it here.  */
592 
593       if (new_var == id->retvar)
594 	;
595       else if (!new_var)
596         {
597 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
598 	      && !DECL_IGNORED_P (old_var)
599 	      && nonlocalized_list)
600 	    VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
601 	}
602       else
603 	{
604 	  gcc_assert (DECL_P (new_var));
605 	  DECL_CHAIN (new_var) = new_decls;
606 	  new_decls = new_var;
607 
608 	  /* Also copy value-expressions.  */
609 	  if (TREE_CODE (new_var) == VAR_DECL
610 	      && DECL_HAS_VALUE_EXPR_P (new_var))
611 	    {
612 	      tree tem = DECL_VALUE_EXPR (new_var);
613 	      bool old_regimplify = id->regimplify;
614 	      id->remapping_type_depth++;
615 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
616 	      id->remapping_type_depth--;
617 	      id->regimplify = old_regimplify;
618 	      SET_DECL_VALUE_EXPR (new_var, tem);
619 	    }
620 	}
621     }
622 
623   return nreverse (new_decls);
624 }
625 
626 /* Copy the BLOCK to contain remapped versions of the variables
627    therein.  And hook the new block into the block-tree.  */
628 
629 static void
630 remap_block (tree *block, copy_body_data *id)
631 {
632   tree old_block;
633   tree new_block;
634 
635   /* Make the new block.  */
636   old_block = *block;
637   new_block = make_node (BLOCK);
638   TREE_USED (new_block) = TREE_USED (old_block);
639   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
640   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
641   BLOCK_NONLOCALIZED_VARS (new_block)
642     = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
643   *block = new_block;
644 
645   /* Remap its variables.  */
646   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
647   					&BLOCK_NONLOCALIZED_VARS (new_block),
648 					id);
649 
650   if (id->transform_lang_insert_block)
651     id->transform_lang_insert_block (new_block);
652 
653   /* Remember the remapped block.  */
654   insert_decl_map (id, old_block, new_block);
655 }
656 
657 /* Copy the whole block tree and root it in id->block.  */
658 static tree
659 remap_blocks (tree block, copy_body_data *id)
660 {
661   tree t;
662   tree new_tree = block;
663 
664   if (!block)
665     return NULL;
666 
667   remap_block (&new_tree, id);
668   gcc_assert (new_tree != block);
669   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
670     prepend_lexical_block (new_tree, remap_blocks (t, id));
671   /* Blocks are in arbitrary order, but make things slightly prettier and do
672      not swap order when producing a copy.  */
673   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
674   return new_tree;
675 }
676 
677 static void
678 copy_statement_list (tree *tp)
679 {
680   tree_stmt_iterator oi, ni;
681   tree new_tree;
682 
683   new_tree = alloc_stmt_list ();
684   ni = tsi_start (new_tree);
685   oi = tsi_start (*tp);
686   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
687   *tp = new_tree;
688 
689   for (; !tsi_end_p (oi); tsi_next (&oi))
690     {
691       tree stmt = tsi_stmt (oi);
692       if (TREE_CODE (stmt) == STATEMENT_LIST)
693 	/* This copy is not redundant; tsi_link_after will smash this
694 	   STATEMENT_LIST into the end of the one we're building, and we
695 	   don't want to do that with the original.  */
696 	copy_statement_list (&stmt);
697       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
698     }
699 }
700 
701 static void
702 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
703 {
704   tree block = BIND_EXPR_BLOCK (*tp);
705   /* Copy (and replace) the statement.  */
706   copy_tree_r (tp, walk_subtrees, NULL);
707   if (block)
708     {
709       remap_block (&block, id);
710       BIND_EXPR_BLOCK (*tp) = block;
711     }
712 
713   if (BIND_EXPR_VARS (*tp))
714     /* This will remap a lot of the same decls again, but this should be
715        harmless.  */
716     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
717 }
718 
719 
720 /* Create a new gimple_seq by remapping all the statements in BODY
721    using the inlining information in ID.  */
722 
723 static gimple_seq
724 remap_gimple_seq (gimple_seq body, copy_body_data *id)
725 {
726   gimple_stmt_iterator si;
727   gimple_seq new_body = NULL;
728 
729   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
730     {
731       gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
732       gimple_seq_add_stmt (&new_body, new_stmt);
733     }
734 
735   return new_body;
736 }
737 
738 
739 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
740    block using the mapping information in ID.  */
741 
742 static gimple
743 copy_gimple_bind (gimple stmt, copy_body_data *id)
744 {
745   gimple new_bind;
746   tree new_block, new_vars;
747   gimple_seq body, new_body;
748 
749   /* Copy the statement.  Note that we purposely don't use copy_stmt
750      here because we need to remap statements as we copy.  */
751   body = gimple_bind_body (stmt);
752   new_body = remap_gimple_seq (body, id);
753 
754   new_block = gimple_bind_block (stmt);
755   if (new_block)
756     remap_block (&new_block, id);
757 
758   /* This will remap a lot of the same decls again, but this should be
759      harmless.  */
760   new_vars = gimple_bind_vars (stmt);
761   if (new_vars)
762     new_vars = remap_decls (new_vars, NULL, id);
763 
764   new_bind = gimple_build_bind (new_vars, new_body, new_block);
765 
766   return new_bind;
767 }
768 
769 
770 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
771    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
772    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
773    recursing into the children nodes of *TP.  */
774 
775 static tree
776 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
777 {
778   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
779   copy_body_data *id = (copy_body_data *) wi_p->info;
780   tree fn = id->src_fn;
781 
782   if (TREE_CODE (*tp) == SSA_NAME)
783     {
784       *tp = remap_ssa_name (*tp, id);
785       *walk_subtrees = 0;
786       return NULL;
787     }
788   else if (auto_var_in_fn_p (*tp, fn))
789     {
790       /* Local variables and labels need to be replaced by equivalent
791 	 variables.  We don't want to copy static variables; there's
792 	 only one of those, no matter how many times we inline the
793 	 containing function.  Similarly for globals from an outer
794 	 function.  */
795       tree new_decl;
796 
797       /* Remap the declaration.  */
798       new_decl = remap_decl (*tp, id);
799       gcc_assert (new_decl);
800       /* Replace this variable with the copy.  */
801       STRIP_TYPE_NOPS (new_decl);
802       /* ???  The C++ frontend uses void * pointer zero to initialize
803          any other type.  This confuses the middle-end type verification.
804 	 As cloned bodies do not go through gimplification again the fixup
805 	 there doesn't trigger.  */
806       if (TREE_CODE (new_decl) == INTEGER_CST
807 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
808 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
809       *tp = new_decl;
810       *walk_subtrees = 0;
811     }
812   else if (TREE_CODE (*tp) == STATEMENT_LIST)
813     gcc_unreachable ();
814   else if (TREE_CODE (*tp) == SAVE_EXPR)
815     gcc_unreachable ();
816   else if (TREE_CODE (*tp) == LABEL_DECL
817 	   && (!DECL_CONTEXT (*tp)
818 	       || decl_function_context (*tp) == id->src_fn))
819     /* These may need to be remapped for EH handling.  */
820     *tp = remap_decl (*tp, id);
821   else if (TREE_CODE (*tp) == FIELD_DECL)
822     {
823       /* If the enclosing record type is variably_modified_type_p, the field
824 	 has already been remapped.  Otherwise, it need not be.  */
825       tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
826       if (n)
827 	*tp = *n;
828       *walk_subtrees = 0;
829     }
830   else if (TYPE_P (*tp))
831     /* Types may need remapping as well.  */
832     *tp = remap_type (*tp, id);
833   else if (CONSTANT_CLASS_P (*tp))
834     {
835       /* If this is a constant, we have to copy the node iff the type
836 	 will be remapped.  copy_tree_r will not copy a constant.  */
837       tree new_type = remap_type (TREE_TYPE (*tp), id);
838 
839       if (new_type == TREE_TYPE (*tp))
840 	*walk_subtrees = 0;
841 
842       else if (TREE_CODE (*tp) == INTEGER_CST)
843 	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
844 				  TREE_INT_CST_HIGH (*tp));
845       else
846 	{
847 	  *tp = copy_node (*tp);
848 	  TREE_TYPE (*tp) = new_type;
849 	}
850     }
851   else
852     {
853       /* Otherwise, just copy the node.  Note that copy_tree_r already
854 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
855 
856       /* We should never have TREE_BLOCK set on non-statements.  */
857       if (EXPR_P (*tp))
858 	gcc_assert (!TREE_BLOCK (*tp));
859 
860       if (TREE_CODE (*tp) == MEM_REF)
861 	{
862 	  tree ptr = TREE_OPERAND (*tp, 0);
863 	  tree type = remap_type (TREE_TYPE (*tp), id);
864 	  tree old = *tp;
865 
866 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
867 	     that can happen when a pointer argument is an ADDR_EXPR.
868 	     Recurse here manually to allow that.  */
869 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
870 	  *tp = fold_build2 (MEM_REF, type,
871 			     ptr, TREE_OPERAND (*tp, 1));
872 	  TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
873 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
874 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
875 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
876 	  *walk_subtrees = 0;
877 	  return NULL;
878 	}
879 
880       /* Here is the "usual case".  Copy this tree node, and then
881 	 tweak some special cases.  */
882       copy_tree_r (tp, walk_subtrees, NULL);
883 
884       if (TREE_CODE (*tp) != OMP_CLAUSE)
885 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
886 
887       /* Global variables we haven't seen yet need to go into referenced
888 	 vars.  If not referenced from types only.  */
889       if (gimple_in_ssa_p (cfun)
890 	  && TREE_CODE (*tp) == VAR_DECL
891 	  && id->remapping_type_depth == 0
892 	  && !processing_debug_stmt)
893 	add_referenced_var (*tp);
894 
895       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
896 	{
897 	  /* The copied TARGET_EXPR has never been expanded, even if the
898 	     original node was expanded already.  */
899 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
900 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
901 	}
902       else if (TREE_CODE (*tp) == ADDR_EXPR)
903 	{
904 	  /* Variable substitution need not be simple.  In particular,
905 	     the MEM_REF substitution above.  Make sure that
906 	     TREE_CONSTANT and friends are up-to-date.  But make sure
907 	     to not improperly set TREE_BLOCK on some sub-expressions.  */
908 	  int invariant = is_gimple_min_invariant (*tp);
909 	  tree block = id->block;
910 	  id->block = NULL_TREE;
911 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
912 	  id->block = block;
913 	  recompute_tree_invariant_for_addr_expr (*tp);
914 
915 	  /* If this used to be invariant, but is not any longer,
916 	     then regimplification is probably needed.  */
917 	  if (invariant && !is_gimple_min_invariant (*tp))
918 	    id->regimplify = true;
919 
920 	  *walk_subtrees = 0;
921 	}
922     }
923 
924   /* Keep iterating.  */
925   return NULL_TREE;
926 }
927 
928 
929 /* Called from copy_body_id via walk_tree.  DATA is really a
930    `copy_body_data *'.  */
931 
932 tree
933 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
934 {
935   copy_body_data *id = (copy_body_data *) data;
936   tree fn = id->src_fn;
937   tree new_block;
938 
939   /* Begin by recognizing trees that we'll completely rewrite for the
940      inlining context.  Our output for these trees is completely
941      different from out input (e.g. RETURN_EXPR is deleted, and morphs
942      into an edge).  Further down, we'll handle trees that get
943      duplicated and/or tweaked.  */
944 
945   /* When requested, RETURN_EXPRs should be transformed to just the
946      contained MODIFY_EXPR.  The branch semantics of the return will
947      be handled elsewhere by manipulating the CFG rather than a statement.  */
948   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
949     {
950       tree assignment = TREE_OPERAND (*tp, 0);
951 
952       /* If we're returning something, just turn that into an
953 	 assignment into the equivalent of the original RESULT_DECL.
954 	 If the "assignment" is just the result decl, the result
955 	 decl has already been set (e.g. a recent "foo (&result_decl,
956 	 ...)"); just toss the entire RETURN_EXPR.  */
957       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
958 	{
959 	  /* Replace the RETURN_EXPR with (a copy of) the
960 	     MODIFY_EXPR hanging underneath.  */
961 	  *tp = copy_node (assignment);
962 	}
963       else /* Else the RETURN_EXPR returns no value.  */
964 	{
965 	  *tp = NULL;
966 	  return (tree) (void *)1;
967 	}
968     }
969   else if (TREE_CODE (*tp) == SSA_NAME)
970     {
971       *tp = remap_ssa_name (*tp, id);
972       *walk_subtrees = 0;
973       return NULL;
974     }
975 
976   /* Local variables and labels need to be replaced by equivalent
977      variables.  We don't want to copy static variables; there's only
978      one of those, no matter how many times we inline the containing
979      function.  Similarly for globals from an outer function.  */
980   else if (auto_var_in_fn_p (*tp, fn))
981     {
982       tree new_decl;
983 
984       /* Remap the declaration.  */
985       new_decl = remap_decl (*tp, id);
986       gcc_assert (new_decl);
987       /* Replace this variable with the copy.  */
988       STRIP_TYPE_NOPS (new_decl);
989       *tp = new_decl;
990       *walk_subtrees = 0;
991     }
992   else if (TREE_CODE (*tp) == STATEMENT_LIST)
993     copy_statement_list (tp);
994   else if (TREE_CODE (*tp) == SAVE_EXPR
995 	   || TREE_CODE (*tp) == TARGET_EXPR)
996     remap_save_expr (tp, id->decl_map, walk_subtrees);
997   else if (TREE_CODE (*tp) == LABEL_DECL
998 	   && (! DECL_CONTEXT (*tp)
999 	       || decl_function_context (*tp) == id->src_fn))
1000     /* These may need to be remapped for EH handling.  */
1001     *tp = remap_decl (*tp, id);
1002   else if (TREE_CODE (*tp) == BIND_EXPR)
1003     copy_bind_expr (tp, walk_subtrees, id);
1004   /* Types may need remapping as well.  */
1005   else if (TYPE_P (*tp))
1006     *tp = remap_type (*tp, id);
1007 
1008   /* If this is a constant, we have to copy the node iff the type will be
1009      remapped.  copy_tree_r will not copy a constant.  */
1010   else if (CONSTANT_CLASS_P (*tp))
1011     {
1012       tree new_type = remap_type (TREE_TYPE (*tp), id);
1013 
1014       if (new_type == TREE_TYPE (*tp))
1015 	*walk_subtrees = 0;
1016 
1017       else if (TREE_CODE (*tp) == INTEGER_CST)
1018 	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1019 				  TREE_INT_CST_HIGH (*tp));
1020       else
1021 	{
1022 	  *tp = copy_node (*tp);
1023 	  TREE_TYPE (*tp) = new_type;
1024 	}
1025     }
1026 
1027   /* Otherwise, just copy the node.  Note that copy_tree_r already
1028      knows not to copy VAR_DECLs, etc., so this is safe.  */
1029   else
1030     {
1031       /* Here we handle trees that are not completely rewritten.
1032 	 First we detect some inlining-induced bogosities for
1033 	 discarding.  */
1034       if (TREE_CODE (*tp) == MODIFY_EXPR
1035 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1036 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1037 	{
1038 	  /* Some assignments VAR = VAR; don't generate any rtl code
1039 	     and thus don't count as variable modification.  Avoid
1040 	     keeping bogosities like 0 = 0.  */
1041 	  tree decl = TREE_OPERAND (*tp, 0), value;
1042 	  tree *n;
1043 
1044 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1045 	  if (n)
1046 	    {
1047 	      value = *n;
1048 	      STRIP_TYPE_NOPS (value);
1049 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1050 		{
1051 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1052 		  return copy_tree_body_r (tp, walk_subtrees, data);
1053 		}
1054 	    }
1055 	}
1056       else if (TREE_CODE (*tp) == INDIRECT_REF)
1057 	{
1058 	  /* Get rid of *& from inline substitutions that can happen when a
1059 	     pointer argument is an ADDR_EXPR.  */
1060 	  tree decl = TREE_OPERAND (*tp, 0);
1061 	  tree *n;
1062 
1063 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1064 	  if (n)
1065 	    {
1066 	      tree new_tree;
1067 	      tree old;
1068 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1069 	         it manually here as we'll eventually get ADDR_EXPRs
1070 		 which lie about their types pointed to.  In this case
1071 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1072 		 but we absolutely rely on that.  As fold_indirect_ref
1073 	         does other useful transformations, try that first, though.  */
1074 	      tree type = TREE_TYPE (TREE_TYPE (*n));
1075 	      if (id->do_not_unshare)
1076 		new_tree = *n;
1077 	      else
1078 		new_tree = unshare_expr (*n);
1079 	      old = *tp;
1080 	      *tp = gimple_fold_indirect_ref (new_tree);
1081 	      if (! *tp)
1082 	        {
1083 		  if (TREE_CODE (new_tree) == ADDR_EXPR)
1084 		    {
1085 		      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1086 						 type, new_tree);
1087 		      /* ???  We should either assert here or build
1088 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1089 			 incompatible types to our IL.  */
1090 		      if (! *tp)
1091 			*tp = TREE_OPERAND (new_tree, 0);
1092 		    }
1093 	          else
1094 		    {
1095 	              *tp = build1 (INDIRECT_REF, type, new_tree);
1096 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1097 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1098 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1099 		      TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1100 		    }
1101 		}
1102 	      *walk_subtrees = 0;
1103 	      return NULL;
1104 	    }
1105 	}
1106       else if (TREE_CODE (*tp) == MEM_REF)
1107 	{
1108 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1109 	     that can happen when a pointer argument is an ADDR_EXPR.  */
1110 	  tree decl = TREE_OPERAND (*tp, 0);
1111 	  tree *n;
1112 
1113 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1114 	  if (n)
1115 	    {
1116 	      tree old = *tp;
1117 	      *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1118 				 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1119 	      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1120 	      TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1121 	      *walk_subtrees = 0;
1122 	      return NULL;
1123 	    }
1124 	}
1125 
1126       /* Here is the "usual case".  Copy this tree node, and then
1127 	 tweak some special cases.  */
1128       copy_tree_r (tp, walk_subtrees, NULL);
1129 
1130       /* Global variables we haven't seen yet needs to go into referenced
1131 	 vars.  If not referenced from types or debug stmts only.  */
1132       if (gimple_in_ssa_p (cfun)
1133 	  && TREE_CODE (*tp) == VAR_DECL
1134 	  && id->remapping_type_depth == 0
1135 	  && !processing_debug_stmt)
1136 	add_referenced_var (*tp);
1137 
1138       /* If EXPR has block defined, map it to newly constructed block.
1139          When inlining we want EXPRs without block appear in the block
1140 	 of function call if we are not remapping a type.  */
1141       if (EXPR_P (*tp))
1142 	{
1143 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1144 	  if (TREE_BLOCK (*tp))
1145 	    {
1146 	      tree *n;
1147 	      n = (tree *) pointer_map_contains (id->decl_map,
1148 						 TREE_BLOCK (*tp));
1149 	      gcc_assert (n || id->remapping_type_depth != 0);
1150 	      if (n)
1151 		new_block = *n;
1152 	    }
1153 	  TREE_BLOCK (*tp) = new_block;
1154 	}
1155 
1156       if (TREE_CODE (*tp) != OMP_CLAUSE)
1157 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1158 
1159       /* The copied TARGET_EXPR has never been expanded, even if the
1160 	 original node was expanded already.  */
1161       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1162 	{
1163 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1164 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1165 	}
1166 
1167       /* Variable substitution need not be simple.  In particular, the
1168 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1169 	 and friends are up-to-date.  */
1170       else if (TREE_CODE (*tp) == ADDR_EXPR)
1171 	{
1172 	  int invariant = is_gimple_min_invariant (*tp);
1173 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1174 
1175 	  /* Handle the case where we substituted an INDIRECT_REF
1176 	     into the operand of the ADDR_EXPR.  */
1177 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1178 	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1179 	  else
1180 	    recompute_tree_invariant_for_addr_expr (*tp);
1181 
1182 	  /* If this used to be invariant, but is not any longer,
1183 	     then regimplification is probably needed.  */
1184 	  if (invariant && !is_gimple_min_invariant (*tp))
1185 	    id->regimplify = true;
1186 
1187 	  *walk_subtrees = 0;
1188 	}
1189     }
1190 
1191   /* Keep iterating.  */
1192   return NULL_TREE;
1193 }
1194 
1195 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1196    source function, map that to the duplicate EH region number in
1197    the destination function.  */
1198 
1199 static int
1200 remap_eh_region_nr (int old_nr, copy_body_data *id)
1201 {
1202   eh_region old_r, new_r;
1203   void **slot;
1204 
1205   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1206   slot = pointer_map_contains (id->eh_map, old_r);
1207   new_r = (eh_region) *slot;
1208 
1209   return new_r->index;
1210 }
1211 
1212 /* Similar, but operate on INTEGER_CSTs.  */
1213 
1214 static tree
1215 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1216 {
1217   int old_nr, new_nr;
1218 
1219   old_nr = tree_low_cst (old_t_nr, 0);
1220   new_nr = remap_eh_region_nr (old_nr, id);
1221 
1222   return build_int_cst (integer_type_node, new_nr);
1223 }
1224 
1225 /* Helper for copy_bb.  Remap statement STMT using the inlining
1226    information in ID.  Return the new statement copy.  */
1227 
1228 static gimple
1229 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1230 {
1231   gimple copy = NULL;
1232   struct walk_stmt_info wi;
1233   tree new_block;
1234   bool skip_first = false;
1235 
1236   /* Begin by recognizing trees that we'll completely rewrite for the
1237      inlining context.  Our output for these trees is completely
1238      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1239      into an edge).  Further down, we'll handle trees that get
1240      duplicated and/or tweaked.  */
1241 
1242   /* When requested, GIMPLE_RETURNs should be transformed to just the
1243      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1244      be handled elsewhere by manipulating the CFG rather than the
1245      statement.  */
1246   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1247     {
1248       tree retval = gimple_return_retval (stmt);
1249 
1250       /* If we're returning something, just turn that into an
1251 	 assignment into the equivalent of the original RESULT_DECL.
1252 	 If RETVAL is just the result decl, the result decl has
1253 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1254 	 just toss the entire GIMPLE_RETURN.  */
1255       if (retval
1256 	  && (TREE_CODE (retval) != RESULT_DECL
1257 	      && (TREE_CODE (retval) != SSA_NAME
1258 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1259         {
1260 	  copy = gimple_build_assign (id->retvar, retval);
1261 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1262 	  skip_first = true;
1263 	}
1264       else
1265 	return gimple_build_nop ();
1266     }
1267   else if (gimple_has_substatements (stmt))
1268     {
1269       gimple_seq s1, s2;
1270 
1271       /* When cloning bodies from the C++ front end, we will be handed bodies
1272 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1273 	 have embedded statements.  */
1274       switch (gimple_code (stmt))
1275 	{
1276 	case GIMPLE_BIND:
1277 	  copy = copy_gimple_bind (stmt, id);
1278 	  break;
1279 
1280 	case GIMPLE_CATCH:
1281 	  s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1282 	  copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1283 	  break;
1284 
1285 	case GIMPLE_EH_FILTER:
1286 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1287 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1288 	  break;
1289 
1290 	case GIMPLE_TRY:
1291 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1292 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1293 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1294 	  break;
1295 
1296 	case GIMPLE_WITH_CLEANUP_EXPR:
1297 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1298 	  copy = gimple_build_wce (s1);
1299 	  break;
1300 
1301 	case GIMPLE_OMP_PARALLEL:
1302 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1303 	  copy = gimple_build_omp_parallel
1304 	           (s1,
1305 		    gimple_omp_parallel_clauses (stmt),
1306 		    gimple_omp_parallel_child_fn (stmt),
1307 		    gimple_omp_parallel_data_arg (stmt));
1308 	  break;
1309 
1310 	case GIMPLE_OMP_TASK:
1311 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1312 	  copy = gimple_build_omp_task
1313 	           (s1,
1314 		    gimple_omp_task_clauses (stmt),
1315 		    gimple_omp_task_child_fn (stmt),
1316 		    gimple_omp_task_data_arg (stmt),
1317 		    gimple_omp_task_copy_fn (stmt),
1318 		    gimple_omp_task_arg_size (stmt),
1319 		    gimple_omp_task_arg_align (stmt));
1320 	  break;
1321 
1322 	case GIMPLE_OMP_FOR:
1323 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1324 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1325 	  copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1326 				       gimple_omp_for_collapse (stmt), s2);
1327 	  {
1328 	    size_t i;
1329 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1330 	      {
1331 		gimple_omp_for_set_index (copy, i,
1332 					  gimple_omp_for_index (stmt, i));
1333 		gimple_omp_for_set_initial (copy, i,
1334 					    gimple_omp_for_initial (stmt, i));
1335 		gimple_omp_for_set_final (copy, i,
1336 					  gimple_omp_for_final (stmt, i));
1337 		gimple_omp_for_set_incr (copy, i,
1338 					 gimple_omp_for_incr (stmt, i));
1339 		gimple_omp_for_set_cond (copy, i,
1340 					 gimple_omp_for_cond (stmt, i));
1341 	      }
1342 	  }
1343 	  break;
1344 
1345 	case GIMPLE_OMP_MASTER:
1346 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347 	  copy = gimple_build_omp_master (s1);
1348 	  break;
1349 
1350 	case GIMPLE_OMP_ORDERED:
1351 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 	  copy = gimple_build_omp_ordered (s1);
1353 	  break;
1354 
1355 	case GIMPLE_OMP_SECTION:
1356 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1357 	  copy = gimple_build_omp_section (s1);
1358 	  break;
1359 
1360 	case GIMPLE_OMP_SECTIONS:
1361 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1362 	  copy = gimple_build_omp_sections
1363 	           (s1, gimple_omp_sections_clauses (stmt));
1364 	  break;
1365 
1366 	case GIMPLE_OMP_SINGLE:
1367 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1368 	  copy = gimple_build_omp_single
1369 	           (s1, gimple_omp_single_clauses (stmt));
1370 	  break;
1371 
1372 	case GIMPLE_OMP_CRITICAL:
1373 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1374 	  copy
1375 	    = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1376 	  break;
1377 
1378 	case GIMPLE_TRANSACTION:
1379 	  s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1380 	  copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1381 	  gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1382 	  break;
1383 
1384 	default:
1385 	  gcc_unreachable ();
1386 	}
1387     }
1388   else
1389     {
1390       if (gimple_assign_copy_p (stmt)
1391 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1392 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1393 	{
1394 	  /* Here we handle statements that are not completely rewritten.
1395 	     First we detect some inlining-induced bogosities for
1396 	     discarding.  */
1397 
1398 	  /* Some assignments VAR = VAR; don't generate any rtl code
1399 	     and thus don't count as variable modification.  Avoid
1400 	     keeping bogosities like 0 = 0.  */
1401 	  tree decl = gimple_assign_lhs (stmt), value;
1402 	  tree *n;
1403 
1404 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1405 	  if (n)
1406 	    {
1407 	      value = *n;
1408 	      STRIP_TYPE_NOPS (value);
1409 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1410 		return gimple_build_nop ();
1411 	    }
1412 	}
1413 
1414       if (gimple_debug_bind_p (stmt))
1415 	{
1416 	  copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1417 					  gimple_debug_bind_get_value (stmt),
1418 					  stmt);
1419 	  VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1420 	  return copy;
1421 	}
1422       if (gimple_debug_source_bind_p (stmt))
1423 	{
1424 	  copy = gimple_build_debug_source_bind
1425 		   (gimple_debug_source_bind_get_var (stmt),
1426 		    gimple_debug_source_bind_get_value (stmt), stmt);
1427 	  VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1428 	  return copy;
1429 	}
1430 
1431       /* Create a new deep copy of the statement.  */
1432       copy = gimple_copy (stmt);
1433 
1434       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1435 	 RESX and EH_DISPATCH.  */
1436       if (id->eh_map)
1437 	switch (gimple_code (copy))
1438 	  {
1439 	  case GIMPLE_CALL:
1440 	    {
1441 	      tree r, fndecl = gimple_call_fndecl (copy);
1442 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1443 		switch (DECL_FUNCTION_CODE (fndecl))
1444 		  {
1445 		  case BUILT_IN_EH_COPY_VALUES:
1446 		    r = gimple_call_arg (copy, 1);
1447 		    r = remap_eh_region_tree_nr (r, id);
1448 		    gimple_call_set_arg (copy, 1, r);
1449 		    /* FALLTHRU */
1450 
1451 		  case BUILT_IN_EH_POINTER:
1452 		  case BUILT_IN_EH_FILTER:
1453 		    r = gimple_call_arg (copy, 0);
1454 		    r = remap_eh_region_tree_nr (r, id);
1455 		    gimple_call_set_arg (copy, 0, r);
1456 		    break;
1457 
1458 		  default:
1459 		    break;
1460 		  }
1461 
1462 	      /* Reset alias info if we didn't apply measures to
1463 		 keep it valid over inlining by setting DECL_PT_UID.  */
1464 	      if (!id->src_cfun->gimple_df
1465 		  || !id->src_cfun->gimple_df->ipa_pta)
1466 		gimple_call_reset_alias_info (copy);
1467 	    }
1468 	    break;
1469 
1470 	  case GIMPLE_RESX:
1471 	    {
1472 	      int r = gimple_resx_region (copy);
1473 	      r = remap_eh_region_nr (r, id);
1474 	      gimple_resx_set_region (copy, r);
1475 	    }
1476 	    break;
1477 
1478 	  case GIMPLE_EH_DISPATCH:
1479 	    {
1480 	      int r = gimple_eh_dispatch_region (copy);
1481 	      r = remap_eh_region_nr (r, id);
1482 	      gimple_eh_dispatch_set_region (copy, r);
1483 	    }
1484 	    break;
1485 
1486 	  default:
1487 	    break;
1488 	  }
1489     }
1490 
1491   /* If STMT has a block defined, map it to the newly constructed
1492      block.  When inlining we want statements without a block to
1493      appear in the block of the function call.  */
1494   new_block = id->block;
1495   if (gimple_block (copy))
1496     {
1497       tree *n;
1498       n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1499       gcc_assert (n);
1500       new_block = *n;
1501     }
1502 
1503   gimple_set_block (copy, new_block);
1504 
1505   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1506     return copy;
1507 
1508   /* Remap all the operands in COPY.  */
1509   memset (&wi, 0, sizeof (wi));
1510   wi.info = id;
1511   if (skip_first)
1512     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1513   else
1514     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1515 
1516   /* Clear the copied virtual operands.  We are not remapping them here
1517      but are going to recreate them from scratch.  */
1518   if (gimple_has_mem_ops (copy))
1519     {
1520       gimple_set_vdef (copy, NULL_TREE);
1521       gimple_set_vuse (copy, NULL_TREE);
1522     }
1523 
1524   return copy;
1525 }
1526 
1527 
1528 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1529    later  */
1530 
1531 static basic_block
1532 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1533          gcov_type count_scale)
1534 {
1535   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1536   basic_block copy_basic_block;
1537   tree decl;
1538   gcov_type freq;
1539   basic_block prev;
1540 
1541   /* Search for previous copied basic block.  */
1542   prev = bb->prev_bb;
1543   while (!prev->aux)
1544     prev = prev->prev_bb;
1545 
1546   /* create_basic_block() will append every new block to
1547      basic_block_info automatically.  */
1548   copy_basic_block = create_basic_block (NULL, (void *) 0,
1549                                          (basic_block) prev->aux);
1550   copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1551 
1552   /* We are going to rebuild frequencies from scratch.  These values
1553      have just small importance to drive canonicalize_loop_headers.  */
1554   freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1555 
1556   /* We recompute frequencies after inlining, so this is quite safe.  */
1557   if (freq > BB_FREQ_MAX)
1558     freq = BB_FREQ_MAX;
1559   copy_basic_block->frequency = freq;
1560 
1561   copy_gsi = gsi_start_bb (copy_basic_block);
1562 
1563   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1564     {
1565       gimple stmt = gsi_stmt (gsi);
1566       gimple orig_stmt = stmt;
1567 
1568       id->regimplify = false;
1569       stmt = remap_gimple_stmt (stmt, id);
1570       if (gimple_nop_p (stmt))
1571 	continue;
1572 
1573       gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1574       seq_gsi = copy_gsi;
1575 
1576       /* With return slot optimization we can end up with
1577 	 non-gimple (foo *)&this->m, fix that here.  */
1578       if (is_gimple_assign (stmt)
1579 	  && gimple_assign_rhs_code (stmt) == NOP_EXPR
1580 	  && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1581 	{
1582 	  tree new_rhs;
1583 	  new_rhs = force_gimple_operand_gsi (&seq_gsi,
1584 					      gimple_assign_rhs1 (stmt),
1585 					      true, NULL, false,
1586 					      GSI_CONTINUE_LINKING);
1587 	  gimple_assign_set_rhs1 (stmt, new_rhs);
1588 	  id->regimplify = false;
1589 	}
1590 
1591       gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1592 
1593       if (id->regimplify)
1594 	gimple_regimplify_operands (stmt, &seq_gsi);
1595 
1596       /* If copy_basic_block has been empty at the start of this iteration,
1597 	 call gsi_start_bb again to get at the newly added statements.  */
1598       if (gsi_end_p (copy_gsi))
1599 	copy_gsi = gsi_start_bb (copy_basic_block);
1600       else
1601 	gsi_next (&copy_gsi);
1602 
1603       /* Process the new statement.  The call to gimple_regimplify_operands
1604 	 possibly turned the statement into multiple statements, we
1605 	 need to process all of them.  */
1606       do
1607 	{
1608 	  tree fn;
1609 
1610 	  stmt = gsi_stmt (copy_gsi);
1611 	  if (is_gimple_call (stmt)
1612 	      && gimple_call_va_arg_pack_p (stmt)
1613 	      && id->gimple_call)
1614 	    {
1615 	      /* __builtin_va_arg_pack () should be replaced by
1616 		 all arguments corresponding to ... in the caller.  */
1617 	      tree p;
1618 	      gimple new_call;
1619 	      VEC(tree, heap) *argarray;
1620 	      size_t nargs = gimple_call_num_args (id->gimple_call);
1621 	      size_t n;
1622 
1623 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1624 		nargs--;
1625 
1626 	      /* Create the new array of arguments.  */
1627 	      n = nargs + gimple_call_num_args (stmt);
1628 	      argarray = VEC_alloc (tree, heap, n);
1629 	      VEC_safe_grow (tree, heap, argarray, n);
1630 
1631 	      /* Copy all the arguments before '...'  */
1632 	      memcpy (VEC_address (tree, argarray),
1633 		      gimple_call_arg_ptr (stmt, 0),
1634 		      gimple_call_num_args (stmt) * sizeof (tree));
1635 
1636 	      /* Append the arguments passed in '...'  */
1637 	      memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1638 		      gimple_call_arg_ptr (id->gimple_call, 0)
1639 			+ (gimple_call_num_args (id->gimple_call) - nargs),
1640 		      nargs * sizeof (tree));
1641 
1642 	      new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1643 						argarray);
1644 
1645 	      VEC_free (tree, heap, argarray);
1646 
1647 	      /* Copy all GIMPLE_CALL flags, location and block, except
1648 		 GF_CALL_VA_ARG_PACK.  */
1649 	      gimple_call_copy_flags (new_call, stmt);
1650 	      gimple_call_set_va_arg_pack (new_call, false);
1651 	      gimple_set_location (new_call, gimple_location (stmt));
1652 	      gimple_set_block (new_call, gimple_block (stmt));
1653 	      gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1654 
1655 	      gsi_replace (&copy_gsi, new_call, false);
1656 	      stmt = new_call;
1657 	    }
1658 	  else if (is_gimple_call (stmt)
1659 		   && id->gimple_call
1660 		   && (decl = gimple_call_fndecl (stmt))
1661 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1662 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1663 	    {
1664 	      /* __builtin_va_arg_pack_len () should be replaced by
1665 		 the number of anonymous arguments.  */
1666 	      size_t nargs = gimple_call_num_args (id->gimple_call);
1667 	      tree count, p;
1668 	      gimple new_stmt;
1669 
1670 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1671 		nargs--;
1672 
1673 	      count = build_int_cst (integer_type_node, nargs);
1674 	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1675 	      gsi_replace (&copy_gsi, new_stmt, false);
1676 	      stmt = new_stmt;
1677 	    }
1678 
1679 	  /* Statements produced by inlining can be unfolded, especially
1680 	     when we constant propagated some operands.  We can't fold
1681 	     them right now for two reasons:
1682 	     1) folding require SSA_NAME_DEF_STMTs to be correct
1683 	     2) we can't change function calls to builtins.
1684 	     So we just mark statement for later folding.  We mark
1685 	     all new statements, instead just statements that has changed
1686 	     by some nontrivial substitution so even statements made
1687 	     foldable indirectly are updated.  If this turns out to be
1688 	     expensive, copy_body can be told to watch for nontrivial
1689 	     changes.  */
1690 	  if (id->statements_to_fold)
1691 	    pointer_set_insert (id->statements_to_fold, stmt);
1692 
1693 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1694 	     callgraph edges and update or duplicate them.  */
1695 	  if (is_gimple_call (stmt))
1696 	    {
1697 	      struct cgraph_edge *edge;
1698 	      int flags;
1699 
1700 	      switch (id->transform_call_graph_edges)
1701 		{
1702 		case CB_CGE_DUPLICATE:
1703 		  edge = cgraph_edge (id->src_node, orig_stmt);
1704 		  if (edge)
1705 		    {
1706 		      int edge_freq = edge->frequency;
1707 		      edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1708 					        gimple_uid (stmt),
1709 					        REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1710 					        true);
1711 		      /* We could also just rescale the frequency, but
1712 		         doing so would introduce roundoff errors and make
1713 			 verifier unhappy.  */
1714 		      edge->frequency
1715 		        = compute_call_stmt_bb_frequency (id->dst_node->decl,
1716 							  copy_basic_block);
1717 		      if (dump_file
1718 		      	  && profile_status_for_function (cfun) != PROFILE_ABSENT
1719 			  && (edge_freq > edge->frequency + 10
1720 			      || edge_freq < edge->frequency - 10))
1721 			{
1722 			  fprintf (dump_file, "Edge frequency estimated by "
1723 			           "cgraph %i diverge from inliner's estimate %i\n",
1724 			  	   edge_freq,
1725 				   edge->frequency);
1726 			  fprintf (dump_file,
1727 			  	   "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1728 				   bb->index,
1729 				   bb->frequency,
1730 				   copy_basic_block->frequency);
1731 			}
1732 		      stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1733 		    }
1734 		  break;
1735 
1736 		case CB_CGE_MOVE_CLONES:
1737 		  cgraph_set_call_stmt_including_clones (id->dst_node,
1738 							 orig_stmt, stmt);
1739 		  edge = cgraph_edge (id->dst_node, stmt);
1740 		  break;
1741 
1742 		case CB_CGE_MOVE:
1743 		  edge = cgraph_edge (id->dst_node, orig_stmt);
1744 		  if (edge)
1745 		    cgraph_set_call_stmt (edge, stmt);
1746 		  break;
1747 
1748 		default:
1749 		  gcc_unreachable ();
1750 		}
1751 
1752 	      /* Constant propagation on argument done during inlining
1753 		 may create new direct call.  Produce an edge for it.  */
1754 	      if ((!edge
1755 		   || (edge->indirect_inlining_edge
1756 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1757 		  && id->dst_node->analyzed
1758 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
1759 		{
1760 		  struct cgraph_node *dest = cgraph_get_node (fn);
1761 
1762 		  /* We have missing edge in the callgraph.  This can happen
1763 		     when previous inlining turned an indirect call into a
1764 		     direct call by constant propagating arguments or we are
1765 		     producing dead clone (for further cloning).  In all
1766 		     other cases we hit a bug (incorrect node sharing is the
1767 		     most common reason for missing edges).  */
1768 		  gcc_assert (dest->needed || !dest->analyzed
1769 			      || dest->address_taken
1770 		  	      || !id->src_node->analyzed
1771 			      || !id->dst_node->analyzed);
1772 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1773 		    cgraph_create_edge_including_clones
1774 		      (id->dst_node, dest, orig_stmt, stmt, bb->count,
1775 		       compute_call_stmt_bb_frequency (id->dst_node->decl,
1776 		       				       copy_basic_block),
1777 		       CIF_ORIGINALLY_INDIRECT_CALL);
1778 		  else
1779 		    cgraph_create_edge (id->dst_node, dest, stmt,
1780 					bb->count,
1781 					compute_call_stmt_bb_frequency
1782 					  (id->dst_node->decl, copy_basic_block))->inline_failed
1783 		      = CIF_ORIGINALLY_INDIRECT_CALL;
1784 		  if (dump_file)
1785 		    {
1786 		      fprintf (dump_file, "Created new direct edge to %s\n",
1787 			       cgraph_node_name (dest));
1788 		    }
1789 		}
1790 
1791 	      flags = gimple_call_flags (stmt);
1792 	      if (flags & ECF_MAY_BE_ALLOCA)
1793 		cfun->calls_alloca = true;
1794 	      if (flags & ECF_RETURNS_TWICE)
1795 		cfun->calls_setjmp = true;
1796 	    }
1797 
1798 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1799 				      id->eh_map, id->eh_lp_nr);
1800 
1801 	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1802 	    {
1803 	      ssa_op_iter i;
1804 	      tree def;
1805 
1806 	      find_new_referenced_vars (gsi_stmt (copy_gsi));
1807 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1808 		if (TREE_CODE (def) == SSA_NAME)
1809 		  SSA_NAME_DEF_STMT (def) = stmt;
1810 	    }
1811 
1812 	  gsi_next (&copy_gsi);
1813 	}
1814       while (!gsi_end_p (copy_gsi));
1815 
1816       copy_gsi = gsi_last_bb (copy_basic_block);
1817     }
1818 
1819   return copy_basic_block;
1820 }
1821 
1822 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1823    form is quite easy, since dominator relationship for old basic blocks does
1824    not change.
1825 
1826    There is however exception where inlining might change dominator relation
1827    across EH edges from basic block within inlined functions destinating
1828    to landing pads in function we inline into.
1829 
1830    The function fills in PHI_RESULTs of such PHI nodes if they refer
1831    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
1832    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
1833    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1834    set, and this means that there will be no overlapping live ranges
1835    for the underlying symbol.
1836 
1837    This might change in future if we allow redirecting of EH edges and
1838    we might want to change way build CFG pre-inlining to include
1839    all the possible edges then.  */
1840 static void
1841 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1842 				  bool can_throw, bool nonlocal_goto)
1843 {
1844   edge e;
1845   edge_iterator ei;
1846 
1847   FOR_EACH_EDGE (e, ei, bb->succs)
1848     if (!e->dest->aux
1849 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1850       {
1851 	gimple phi;
1852 	gimple_stmt_iterator si;
1853 
1854 	if (!nonlocal_goto)
1855 	  gcc_assert (e->flags & EDGE_EH);
1856 
1857 	if (!can_throw)
1858 	  gcc_assert (!(e->flags & EDGE_EH));
1859 
1860 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1861 	  {
1862 	    edge re;
1863 
1864 	    phi = gsi_stmt (si);
1865 
1866 	    /* There shouldn't be any PHI nodes in the ENTRY_BLOCK.  */
1867 	    gcc_assert (!e->dest->aux);
1868 
1869 	    gcc_assert ((e->flags & EDGE_EH)
1870 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1871 
1872 	    if (!is_gimple_reg (PHI_RESULT (phi)))
1873 	      {
1874 		mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1875 		continue;
1876 	      }
1877 
1878 	    re = find_edge (ret_bb, e->dest);
1879 	    gcc_assert (re);
1880 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1881 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1882 
1883 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1884 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1885 	  }
1886       }
1887 }
1888 
1889 
1890 /* Copy edges from BB into its copy constructed earlier, scale profile
1891    accordingly.  Edges will be taken care of later.  Assume aux
1892    pointers to point to the copies of each BB.  Return true if any
1893    debug stmts are left after a statement that must end the basic block.  */
1894 
1895 static bool
1896 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1897 {
1898   basic_block new_bb = (basic_block) bb->aux;
1899   edge_iterator ei;
1900   edge old_edge;
1901   gimple_stmt_iterator si;
1902   int flags;
1903   bool need_debug_cleanup = false;
1904 
1905   /* Use the indices from the original blocks to create edges for the
1906      new ones.  */
1907   FOR_EACH_EDGE (old_edge, ei, bb->succs)
1908     if (!(old_edge->flags & EDGE_EH))
1909       {
1910 	edge new_edge;
1911 
1912 	flags = old_edge->flags;
1913 
1914 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
1915 	if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1916 	    && old_edge->dest->aux != EXIT_BLOCK_PTR)
1917 	  flags |= EDGE_FALLTHRU;
1918 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1919 	new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1920 	new_edge->probability = old_edge->probability;
1921       }
1922 
1923   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1924     return false;
1925 
1926   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1927     {
1928       gimple copy_stmt;
1929       bool can_throw, nonlocal_goto;
1930 
1931       copy_stmt = gsi_stmt (si);
1932       if (!is_gimple_debug (copy_stmt))
1933 	{
1934 	  update_stmt (copy_stmt);
1935 	  if (gimple_in_ssa_p (cfun))
1936 	    mark_symbols_for_renaming (copy_stmt);
1937 	}
1938 
1939       /* Do this before the possible split_block.  */
1940       gsi_next (&si);
1941 
1942       /* If this tree could throw an exception, there are two
1943          cases where we need to add abnormal edge(s): the
1944          tree wasn't in a region and there is a "current
1945          region" in the caller; or the original tree had
1946          EH edges.  In both cases split the block after the tree,
1947          and add abnormal edge(s) as needed; we need both
1948          those from the callee and the caller.
1949          We check whether the copy can throw, because the const
1950          propagation can change an INDIRECT_REF which throws
1951          into a COMPONENT_REF which doesn't.  If the copy
1952          can throw, the original could also throw.  */
1953       can_throw = stmt_can_throw_internal (copy_stmt);
1954       nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1955 
1956       if (can_throw || nonlocal_goto)
1957 	{
1958 	  if (!gsi_end_p (si))
1959 	    {
1960 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1961 		gsi_next (&si);
1962 	      if (gsi_end_p (si))
1963 		need_debug_cleanup = true;
1964 	    }
1965 	  if (!gsi_end_p (si))
1966 	    /* Note that bb's predecessor edges aren't necessarily
1967 	       right at this point; split_block doesn't care.  */
1968 	    {
1969 	      edge e = split_block (new_bb, copy_stmt);
1970 
1971 	      new_bb = e->dest;
1972 	      new_bb->aux = e->src->aux;
1973 	      si = gsi_start_bb (new_bb);
1974 	    }
1975 	}
1976 
1977       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1978 	make_eh_dispatch_edges (copy_stmt);
1979       else if (can_throw)
1980 	make_eh_edges (copy_stmt);
1981 
1982       if (nonlocal_goto)
1983 	make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1984 
1985       if ((can_throw || nonlocal_goto)
1986 	  && gimple_in_ssa_p (cfun))
1987 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1988 					  can_throw, nonlocal_goto);
1989     }
1990   return need_debug_cleanup;
1991 }
1992 
1993 /* Copy the PHIs.  All blocks and edges are copied, some blocks
1994    was possibly split and new outgoing EH edges inserted.
1995    BB points to the block of original function and AUX pointers links
1996    the original and newly copied blocks.  */
1997 
1998 static void
1999 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2000 {
2001   basic_block const new_bb = (basic_block) bb->aux;
2002   edge_iterator ei;
2003   gimple phi;
2004   gimple_stmt_iterator si;
2005   edge new_edge;
2006   bool inserted = false;
2007 
2008   for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
2009     {
2010       tree res, new_res;
2011       gimple new_phi;
2012 
2013       phi = gsi_stmt (si);
2014       res = PHI_RESULT (phi);
2015       new_res = res;
2016       if (is_gimple_reg (res))
2017 	{
2018 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2019 	  SSA_NAME_DEF_STMT (new_res)
2020 	    = new_phi = create_phi_node (new_res, new_bb);
2021 	  FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2022 	    {
2023 	      edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2024 	      tree arg;
2025 	      tree new_arg;
2026 	      tree block = id->block;
2027 	      edge_iterator ei2;
2028 
2029 	      /* When doing partial cloning, we allow PHIs on the entry block
2030 		 as long as all the arguments are the same.  Find any input
2031 		 edge to see argument to copy.  */
2032 	      if (!old_edge)
2033 		FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2034 		  if (!old_edge->src->aux)
2035 		    break;
2036 
2037 	      arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2038 	      new_arg = arg;
2039 	      id->block = NULL_TREE;
2040 	      walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2041 	      id->block = block;
2042 	      gcc_assert (new_arg);
2043 	      /* With return slot optimization we can end up with
2044 	         non-gimple (foo *)&this->m, fix that here.  */
2045 	      if (TREE_CODE (new_arg) != SSA_NAME
2046 		  && TREE_CODE (new_arg) != FUNCTION_DECL
2047 		  && !is_gimple_val (new_arg))
2048 		{
2049 		  gimple_seq stmts = NULL;
2050 		  new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2051 		  gsi_insert_seq_on_edge (new_edge, stmts);
2052 		  inserted = true;
2053 		}
2054 	      add_phi_arg (new_phi, new_arg, new_edge,
2055 			   gimple_phi_arg_location_from_edge (phi, old_edge));
2056 	    }
2057 	}
2058     }
2059 
2060   /* Commit the delayed edge insertions.  */
2061   if (inserted)
2062     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2063       gsi_commit_one_edge_insert (new_edge, NULL);
2064 }
2065 
2066 
2067 /* Wrapper for remap_decl so it can be used as a callback.  */
2068 
2069 static tree
2070 remap_decl_1 (tree decl, void *data)
2071 {
2072   return remap_decl (decl, (copy_body_data *) data);
2073 }
2074 
2075 /* Build struct function and associated datastructures for the new clone
2076    NEW_FNDECL to be build.  CALLEE_FNDECL is the original */
2077 
2078 static void
2079 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2080 {
2081   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2082   gcov_type count_scale;
2083 
2084   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2085     count_scale = (REG_BR_PROB_BASE * count
2086 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2087   else
2088     count_scale = REG_BR_PROB_BASE;
2089 
2090   /* Register specific tree functions.  */
2091   gimple_register_cfg_hooks ();
2092 
2093   /* Get clean struct function.  */
2094   push_struct_function (new_fndecl);
2095 
2096   /* We will rebuild these, so just sanity check that they are empty.  */
2097   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2098   gcc_assert (cfun->local_decls == NULL);
2099   gcc_assert (cfun->cfg == NULL);
2100   gcc_assert (cfun->decl == new_fndecl);
2101 
2102   /* Copy items we preserve during cloning.  */
2103   cfun->static_chain_decl = src_cfun->static_chain_decl;
2104   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2105   cfun->function_end_locus = src_cfun->function_end_locus;
2106   cfun->curr_properties = src_cfun->curr_properties;
2107   cfun->last_verified = src_cfun->last_verified;
2108   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2109   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2110   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2111   cfun->stdarg = src_cfun->stdarg;
2112   cfun->after_inlining = src_cfun->after_inlining;
2113   cfun->can_throw_non_call_exceptions
2114     = src_cfun->can_throw_non_call_exceptions;
2115   cfun->returns_struct = src_cfun->returns_struct;
2116   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2117   cfun->after_tree_profile = src_cfun->after_tree_profile;
2118 
2119   init_empty_tree_cfg ();
2120 
2121   profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2122   ENTRY_BLOCK_PTR->count =
2123     (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2124      REG_BR_PROB_BASE);
2125   ENTRY_BLOCK_PTR->frequency
2126     = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2127   EXIT_BLOCK_PTR->count =
2128     (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2129      REG_BR_PROB_BASE);
2130   EXIT_BLOCK_PTR->frequency =
2131     EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2132   if (src_cfun->eh)
2133     init_eh_for_function ();
2134 
2135   if (src_cfun->gimple_df)
2136     {
2137       init_tree_ssa (cfun);
2138       cfun->gimple_df->in_ssa_p = true;
2139       init_ssa_operands ();
2140     }
2141   pop_cfun ();
2142 }
2143 
2144 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2145    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2146    successor has multiple predecessors, reset them, otherwise keep
2147    their value.  */
2148 
2149 static void
2150 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2151 {
2152   edge e;
2153   edge_iterator ei;
2154   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2155 
2156   if (gsi_end_p (si)
2157       || gsi_one_before_end_p (si)
2158       || !(stmt_can_throw_internal (gsi_stmt (si))
2159 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2160     return;
2161 
2162   FOR_EACH_EDGE (e, ei, new_bb->succs)
2163     {
2164       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2165       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2166       while (is_gimple_debug (gsi_stmt (ssi)))
2167 	{
2168 	  gimple stmt = gsi_stmt (ssi), new_stmt;
2169 	  tree var;
2170 	  tree value;
2171 
2172 	  /* For the last edge move the debug stmts instead of copying
2173 	     them.  */
2174 	  if (ei_one_before_end_p (ei))
2175 	    {
2176 	      si = ssi;
2177 	      gsi_prev (&ssi);
2178 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2179 		gimple_debug_bind_reset_value (stmt);
2180 	      gsi_remove (&si, false);
2181 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2182 	      continue;
2183 	    }
2184 
2185 	  if (gimple_debug_bind_p (stmt))
2186 	    {
2187 	      var = gimple_debug_bind_get_var (stmt);
2188 	      if (single_pred_p (e->dest))
2189 		{
2190 		  value = gimple_debug_bind_get_value (stmt);
2191 		  value = unshare_expr (value);
2192 		}
2193 	      else
2194 		value = NULL_TREE;
2195 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2196 	    }
2197 	  else if (gimple_debug_source_bind_p (stmt))
2198 	    {
2199 	      var = gimple_debug_source_bind_get_var (stmt);
2200 	      value = gimple_debug_source_bind_get_value (stmt);
2201 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2202 	    }
2203 	  else
2204 	    gcc_unreachable ();
2205 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2206 	  VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2207 	  gsi_prev (&ssi);
2208 	}
2209     }
2210 }
2211 
2212 /* Make a copy of the body of FN so that it can be inserted inline in
2213    another function.  Walks FN via CFG, returns new fndecl.  */
2214 
2215 static tree
2216 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2217 	       basic_block entry_block_map, basic_block exit_block_map,
2218 	       bitmap blocks_to_copy, basic_block new_entry)
2219 {
2220   tree callee_fndecl = id->src_fn;
2221   /* Original cfun for the callee, doesn't change.  */
2222   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2223   struct function *cfun_to_copy;
2224   basic_block bb;
2225   tree new_fndecl = NULL;
2226   bool need_debug_cleanup = false;
2227   gcov_type count_scale;
2228   int last;
2229   int incoming_frequency = 0;
2230   gcov_type incoming_count = 0;
2231 
2232   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2233     count_scale = (REG_BR_PROB_BASE * count
2234 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2235   else
2236     count_scale = REG_BR_PROB_BASE;
2237 
2238   /* Register specific tree functions.  */
2239   gimple_register_cfg_hooks ();
2240 
2241   /* If we are inlining just region of the function, make sure to connect new entry
2242      to ENTRY_BLOCK_PTR.  Since new entry can be part of loop, we must compute
2243      frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2244      probabilities of edges incoming from nonduplicated region.  */
2245   if (new_entry)
2246     {
2247       edge e;
2248       edge_iterator ei;
2249 
2250       FOR_EACH_EDGE (e, ei, new_entry->preds)
2251 	if (!e->src->aux)
2252 	  {
2253 	    incoming_frequency += EDGE_FREQUENCY (e);
2254 	    incoming_count += e->count;
2255 	  }
2256       incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2257       incoming_frequency
2258 	= incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2259       ENTRY_BLOCK_PTR->count = incoming_count;
2260       ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2261     }
2262 
2263   /* Must have a CFG here at this point.  */
2264   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2265 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2266 
2267   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2268 
2269   ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2270   EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2271   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2272   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2273 
2274   /* Duplicate any exception-handling regions.  */
2275   if (cfun->eh)
2276     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2277 				       remap_decl_1, id);
2278 
2279   /* Use aux pointers to map the original blocks to copy.  */
2280   FOR_EACH_BB_FN (bb, cfun_to_copy)
2281     if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2282       {
2283 	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2284 	bb->aux = new_bb;
2285 	new_bb->aux = bb;
2286       }
2287 
2288   last = last_basic_block;
2289 
2290   /* Now that we've duplicated the blocks, duplicate their edges.  */
2291   FOR_ALL_BB_FN (bb, cfun_to_copy)
2292     if (!blocks_to_copy
2293         || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2294       need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2295 
2296   if (new_entry)
2297     {
2298       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2299       e->probability = REG_BR_PROB_BASE;
2300       e->count = incoming_count;
2301     }
2302 
2303   if (gimple_in_ssa_p (cfun))
2304     FOR_ALL_BB_FN (bb, cfun_to_copy)
2305       if (!blocks_to_copy
2306 	  || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2307 	copy_phis_for_bb (bb, id);
2308 
2309   FOR_ALL_BB_FN (bb, cfun_to_copy)
2310     if (bb->aux)
2311       {
2312 	if (need_debug_cleanup
2313 	    && bb->index != ENTRY_BLOCK
2314 	    && bb->index != EXIT_BLOCK)
2315 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2316 	((basic_block)bb->aux)->aux = NULL;
2317 	bb->aux = NULL;
2318       }
2319 
2320   /* Zero out AUX fields of newly created block during EH edge
2321      insertion. */
2322   for (; last < last_basic_block; last++)
2323     {
2324       if (need_debug_cleanup)
2325 	maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2326       BASIC_BLOCK (last)->aux = NULL;
2327     }
2328   entry_block_map->aux = NULL;
2329   exit_block_map->aux = NULL;
2330 
2331   if (id->eh_map)
2332     {
2333       pointer_map_destroy (id->eh_map);
2334       id->eh_map = NULL;
2335     }
2336 
2337   return new_fndecl;
2338 }
2339 
2340 /* Copy the debug STMT using ID.  We deal with these statements in a
2341    special way: if any variable in their VALUE expression wasn't
2342    remapped yet, we won't remap it, because that would get decl uids
2343    out of sync, causing codegen differences between -g and -g0.  If
2344    this arises, we drop the VALUE expression altogether.  */
2345 
2346 static void
2347 copy_debug_stmt (gimple stmt, copy_body_data *id)
2348 {
2349   tree t, *n;
2350   struct walk_stmt_info wi;
2351 
2352   t = id->block;
2353   if (gimple_block (stmt))
2354     {
2355       n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2356       if (n)
2357 	t = *n;
2358     }
2359   gimple_set_block (stmt, t);
2360 
2361   /* Remap all the operands in COPY.  */
2362   memset (&wi, 0, sizeof (wi));
2363   wi.info = id;
2364 
2365   processing_debug_stmt = 1;
2366 
2367   if (gimple_debug_source_bind_p (stmt))
2368     t = gimple_debug_source_bind_get_var (stmt);
2369   else
2370     t = gimple_debug_bind_get_var (stmt);
2371 
2372   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2373       && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2374     {
2375       gcc_assert (TREE_CODE (*n) == VAR_DECL);
2376       t = *n;
2377     }
2378   else if (TREE_CODE (t) == VAR_DECL
2379 	   && !TREE_STATIC (t)
2380 	   && gimple_in_ssa_p (cfun)
2381 	   && !pointer_map_contains (id->decl_map, t)
2382 	   && !var_ann (t))
2383     /* T is a non-localized variable.  */;
2384   else
2385     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2386 
2387   if (gimple_debug_bind_p (stmt))
2388     {
2389       gimple_debug_bind_set_var (stmt, t);
2390 
2391       if (gimple_debug_bind_has_value_p (stmt))
2392 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2393 		   remap_gimple_op_r, &wi, NULL);
2394 
2395       /* Punt if any decl couldn't be remapped.  */
2396       if (processing_debug_stmt < 0)
2397 	gimple_debug_bind_reset_value (stmt);
2398     }
2399   else if (gimple_debug_source_bind_p (stmt))
2400     {
2401       gimple_debug_source_bind_set_var (stmt, t);
2402       walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2403 		 remap_gimple_op_r, &wi, NULL);
2404     }
2405 
2406   processing_debug_stmt = 0;
2407 
2408   update_stmt (stmt);
2409   if (gimple_in_ssa_p (cfun))
2410     mark_symbols_for_renaming (stmt);
2411 }
2412 
2413 /* Process deferred debug stmts.  In order to give values better odds
2414    of being successfully remapped, we delay the processing of debug
2415    stmts until all other stmts that might require remapping are
2416    processed.  */
2417 
2418 static void
2419 copy_debug_stmts (copy_body_data *id)
2420 {
2421   size_t i;
2422   gimple stmt;
2423 
2424   if (!id->debug_stmts)
2425     return;
2426 
2427   FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2428     copy_debug_stmt (stmt, id);
2429 
2430   VEC_free (gimple, heap, id->debug_stmts);
2431 }
2432 
2433 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2434    another function.  */
2435 
2436 static tree
2437 copy_tree_body (copy_body_data *id)
2438 {
2439   tree fndecl = id->src_fn;
2440   tree body = DECL_SAVED_TREE (fndecl);
2441 
2442   walk_tree (&body, copy_tree_body_r, id, NULL);
2443 
2444   return body;
2445 }
2446 
2447 /* Make a copy of the body of FN so that it can be inserted inline in
2448    another function.  */
2449 
2450 static tree
2451 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2452 	   basic_block entry_block_map, basic_block exit_block_map,
2453 	   bitmap blocks_to_copy, basic_block new_entry)
2454 {
2455   tree fndecl = id->src_fn;
2456   tree body;
2457 
2458   /* If this body has a CFG, walk CFG and copy.  */
2459   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2460   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2461 		        blocks_to_copy, new_entry);
2462   copy_debug_stmts (id);
2463 
2464   return body;
2465 }
2466 
2467 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2468    defined in function FN, or of a data member thereof.  */
2469 
2470 static bool
2471 self_inlining_addr_expr (tree value, tree fn)
2472 {
2473   tree var;
2474 
2475   if (TREE_CODE (value) != ADDR_EXPR)
2476     return false;
2477 
2478   var = get_base_address (TREE_OPERAND (value, 0));
2479 
2480   return var && auto_var_in_fn_p (var, fn);
2481 }
2482 
2483 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2484    lexical block and line number information from base_stmt, if given,
2485    or from the last stmt of the block otherwise.  */
2486 
2487 static gimple
2488 insert_init_debug_bind (copy_body_data *id,
2489 			basic_block bb, tree var, tree value,
2490 			gimple base_stmt)
2491 {
2492   gimple note;
2493   gimple_stmt_iterator gsi;
2494   tree tracked_var;
2495 
2496   if (!gimple_in_ssa_p (id->src_cfun))
2497     return NULL;
2498 
2499   if (!MAY_HAVE_DEBUG_STMTS)
2500     return NULL;
2501 
2502   tracked_var = target_for_debug_bind (var);
2503   if (!tracked_var)
2504     return NULL;
2505 
2506   if (bb)
2507     {
2508       gsi = gsi_last_bb (bb);
2509       if (!base_stmt && !gsi_end_p (gsi))
2510 	base_stmt = gsi_stmt (gsi);
2511     }
2512 
2513   note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2514 
2515   if (bb)
2516     {
2517       if (!gsi_end_p (gsi))
2518 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2519       else
2520 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2521     }
2522 
2523   return note;
2524 }
2525 
2526 static void
2527 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2528 {
2529   /* If VAR represents a zero-sized variable, it's possible that the
2530      assignment statement may result in no gimple statements.  */
2531   if (init_stmt)
2532     {
2533       gimple_stmt_iterator si = gsi_last_bb (bb);
2534 
2535       /* We can end up with init statements that store to a non-register
2536          from a rhs with a conversion.  Handle that here by forcing the
2537 	 rhs into a temporary.  gimple_regimplify_operands is not
2538 	 prepared to do this for us.  */
2539       if (!is_gimple_debug (init_stmt)
2540 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2541 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2542 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2543 	{
2544 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2545 			     gimple_expr_type (init_stmt),
2546 			     gimple_assign_rhs1 (init_stmt));
2547 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2548 					  GSI_NEW_STMT);
2549 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2550 	  gimple_assign_set_rhs1 (init_stmt, rhs);
2551 	}
2552       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2553       gimple_regimplify_operands (init_stmt, &si);
2554       mark_symbols_for_renaming (init_stmt);
2555 
2556       if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2557 	{
2558 	  tree var, def = gimple_assign_lhs (init_stmt);
2559 
2560 	  if (TREE_CODE (def) == SSA_NAME)
2561 	    var = SSA_NAME_VAR (def);
2562 	  else
2563 	    var = def;
2564 
2565 	  insert_init_debug_bind (id, bb, var, def, init_stmt);
2566 	}
2567     }
2568 }
2569 
2570 /* Initialize parameter P with VALUE.  If needed, produce init statement
2571    at the end of BB.  When BB is NULL, we return init statement to be
2572    output later.  */
2573 static gimple
2574 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2575 		     basic_block bb, tree *vars)
2576 {
2577   gimple init_stmt = NULL;
2578   tree var;
2579   tree rhs = value;
2580   tree def = (gimple_in_ssa_p (cfun)
2581 	      ? gimple_default_def (id->src_cfun, p) : NULL);
2582 
2583   if (value
2584       && value != error_mark_node
2585       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2586     {
2587       /* If we can match up types by promotion/demotion do so.  */
2588       if (fold_convertible_p (TREE_TYPE (p), value))
2589 	rhs = fold_convert (TREE_TYPE (p), value);
2590       else
2591 	{
2592 	  /* ???  For valid programs we should not end up here.
2593 	     Still if we end up with truly mismatched types here, fall back
2594 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2595 	     GIMPLE to the following passes.  */
2596 	  if (!is_gimple_reg_type (TREE_TYPE (value))
2597 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2598 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2599 	  else
2600 	    rhs = build_zero_cst (TREE_TYPE (p));
2601 	}
2602     }
2603 
2604   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
2605      here since the type of this decl must be visible to the calling
2606      function.  */
2607   var = copy_decl_to_var (p, id);
2608 
2609   /* We're actually using the newly-created var.  */
2610   if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2611     add_referenced_var (var);
2612 
2613   /* Declare this new variable.  */
2614   DECL_CHAIN (var) = *vars;
2615   *vars = var;
2616 
2617   /* Make gimplifier happy about this variable.  */
2618   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2619 
2620   /* We are eventually using the value - make sure all variables
2621      referenced therein are properly recorded.  */
2622   if (value
2623       && gimple_in_ssa_p (cfun)
2624       && TREE_CODE (value) == ADDR_EXPR)
2625     {
2626       tree base = get_base_address (TREE_OPERAND (value, 0));
2627       if (base && TREE_CODE (base) == VAR_DECL)
2628 	add_referenced_var (base);
2629     }
2630 
2631   /* If the parameter is never assigned to, has no SSA_NAMEs created,
2632      we would not need to create a new variable here at all, if it
2633      weren't for debug info.  Still, we can just use the argument
2634      value.  */
2635   if (TREE_READONLY (p)
2636       && !TREE_ADDRESSABLE (p)
2637       && value && !TREE_SIDE_EFFECTS (value)
2638       && !def)
2639     {
2640       /* We may produce non-gimple trees by adding NOPs or introduce
2641 	 invalid sharing when operand is not really constant.
2642 	 It is not big deal to prohibit constant propagation here as
2643 	 we will constant propagate in DOM1 pass anyway.  */
2644       if (is_gimple_min_invariant (value)
2645 	  && useless_type_conversion_p (TREE_TYPE (p),
2646 						 TREE_TYPE (value))
2647 	  /* We have to be very careful about ADDR_EXPR.  Make sure
2648 	     the base variable isn't a local variable of the inlined
2649 	     function, e.g., when doing recursive inlining, direct or
2650 	     mutually-recursive or whatever, which is why we don't
2651 	     just test whether fn == current_function_decl.  */
2652 	  && ! self_inlining_addr_expr (value, fn))
2653 	{
2654 	  insert_decl_map (id, p, value);
2655 	  insert_debug_decl_map (id, p, var);
2656 	  return insert_init_debug_bind (id, bb, var, value, NULL);
2657 	}
2658     }
2659 
2660   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2661      that way, when the PARM_DECL is encountered, it will be
2662      automatically replaced by the VAR_DECL.  */
2663   insert_decl_map (id, p, var);
2664 
2665   /* Even if P was TREE_READONLY, the new VAR should not be.
2666      In the original code, we would have constructed a
2667      temporary, and then the function body would have never
2668      changed the value of P.  However, now, we will be
2669      constructing VAR directly.  The constructor body may
2670      change its value multiple times as it is being
2671      constructed.  Therefore, it must not be TREE_READONLY;
2672      the back-end assumes that TREE_READONLY variable is
2673      assigned to only once.  */
2674   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2675     TREE_READONLY (var) = 0;
2676 
2677   /* If there is no setup required and we are in SSA, take the easy route
2678      replacing all SSA names representing the function parameter by the
2679      SSA name passed to function.
2680 
2681      We need to construct map for the variable anyway as it might be used
2682      in different SSA names when parameter is set in function.
2683 
2684      Do replacement at -O0 for const arguments replaced by constant.
2685      This is important for builtin_constant_p and other construct requiring
2686      constant argument to be visible in inlined function body.  */
2687   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2688       && (optimize
2689           || (TREE_READONLY (p)
2690 	      && is_gimple_min_invariant (rhs)))
2691       && (TREE_CODE (rhs) == SSA_NAME
2692 	  || is_gimple_min_invariant (rhs))
2693       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2694     {
2695       insert_decl_map (id, def, rhs);
2696       return insert_init_debug_bind (id, bb, var, rhs, NULL);
2697     }
2698 
2699   /* If the value of argument is never used, don't care about initializing
2700      it.  */
2701   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2702     {
2703       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2704       return insert_init_debug_bind (id, bb, var, rhs, NULL);
2705     }
2706 
2707   /* Initialize this VAR_DECL from the equivalent argument.  Convert
2708      the argument to the proper type in case it was promoted.  */
2709   if (value)
2710     {
2711       if (rhs == error_mark_node)
2712 	{
2713 	  insert_decl_map (id, p, var);
2714 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
2715 	}
2716 
2717       STRIP_USELESS_TYPE_CONVERSION (rhs);
2718 
2719       /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2720 	 keep our trees in gimple form.  */
2721       if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2722 	{
2723 	  def = remap_ssa_name (def, id);
2724           init_stmt = gimple_build_assign (def, rhs);
2725 	  SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2726 	  set_default_def (var, NULL);
2727 	}
2728       else
2729         init_stmt = gimple_build_assign (var, rhs);
2730 
2731       if (bb && init_stmt)
2732         insert_init_stmt (id, bb, init_stmt);
2733     }
2734   return init_stmt;
2735 }
2736 
2737 /* Generate code to initialize the parameters of the function at the
2738    top of the stack in ID from the GIMPLE_CALL STMT.  */
2739 
2740 static void
2741 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2742 			       tree fn, basic_block bb)
2743 {
2744   tree parms;
2745   size_t i;
2746   tree p;
2747   tree vars = NULL_TREE;
2748   tree static_chain = gimple_call_chain (stmt);
2749 
2750   /* Figure out what the parameters are.  */
2751   parms = DECL_ARGUMENTS (fn);
2752 
2753   /* Loop through the parameter declarations, replacing each with an
2754      equivalent VAR_DECL, appropriately initialized.  */
2755   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2756     {
2757       tree val;
2758       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2759       setup_one_parameter (id, p, val, fn, bb, &vars);
2760     }
2761   /* After remapping parameters remap their types.  This has to be done
2762      in a second loop over all parameters to appropriately remap
2763      variable sized arrays when the size is specified in a
2764      parameter following the array.  */
2765   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2766     {
2767       tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2768       if (varp
2769 	  && TREE_CODE (*varp) == VAR_DECL)
2770 	{
2771 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2772 		      ? gimple_default_def (id->src_cfun, p) : NULL);
2773 	  tree var = *varp;
2774 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2775 	  /* Also remap the default definition if it was remapped
2776 	     to the default definition of the parameter replacement
2777 	     by the parameter setup.  */
2778 	  if (def)
2779 	    {
2780 	      tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2781 	      if (defp
2782 		  && TREE_CODE (*defp) == SSA_NAME
2783 		  && SSA_NAME_VAR (*defp) == var)
2784 		TREE_TYPE (*defp) = TREE_TYPE (var);
2785 	    }
2786 	}
2787     }
2788 
2789   /* Initialize the static chain.  */
2790   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2791   gcc_assert (fn != current_function_decl);
2792   if (p)
2793     {
2794       /* No static chain?  Seems like a bug in tree-nested.c.  */
2795       gcc_assert (static_chain);
2796 
2797       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2798     }
2799 
2800   declare_inline_vars (id->block, vars);
2801 }
2802 
2803 
2804 /* Declare a return variable to replace the RESULT_DECL for the
2805    function we are calling.  An appropriate DECL_STMT is returned.
2806    The USE_STMT is filled to contain a use of the declaration to
2807    indicate the return value of the function.
2808 
2809    RETURN_SLOT, if non-null is place where to store the result.  It
2810    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
2811    was the LHS of the MODIFY_EXPR to which this call is the RHS.
2812 
2813    The return value is a (possibly null) value that holds the result
2814    as seen by the caller.  */
2815 
2816 static tree
2817 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2818 			 basic_block entry_bb)
2819 {
2820   tree callee = id->src_fn;
2821   tree result = DECL_RESULT (callee);
2822   tree callee_type = TREE_TYPE (result);
2823   tree caller_type;
2824   tree var, use;
2825 
2826   /* Handle type-mismatches in the function declaration return type
2827      vs. the call expression.  */
2828   if (modify_dest)
2829     caller_type = TREE_TYPE (modify_dest);
2830   else
2831     caller_type = TREE_TYPE (TREE_TYPE (callee));
2832 
2833   /* We don't need to do anything for functions that don't return anything.  */
2834   if (VOID_TYPE_P (callee_type))
2835     return NULL_TREE;
2836 
2837   /* If there was a return slot, then the return value is the
2838      dereferenced address of that object.  */
2839   if (return_slot)
2840     {
2841       /* The front end shouldn't have used both return_slot and
2842 	 a modify expression.  */
2843       gcc_assert (!modify_dest);
2844       if (DECL_BY_REFERENCE (result))
2845 	{
2846 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
2847 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2848 
2849 	  /* We are going to construct *&return_slot and we can't do that
2850 	     for variables believed to be not addressable.
2851 
2852 	     FIXME: This check possibly can match, because values returned
2853 	     via return slot optimization are not believed to have address
2854 	     taken by alias analysis.  */
2855 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2856 	  var = return_slot_addr;
2857 	}
2858       else
2859 	{
2860 	  var = return_slot;
2861 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
2862 	  TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2863 	}
2864       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2865            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2866 	  && !DECL_GIMPLE_REG_P (result)
2867 	  && DECL_P (var))
2868 	DECL_GIMPLE_REG_P (var) = 0;
2869       use = NULL;
2870       goto done;
2871     }
2872 
2873   /* All types requiring non-trivial constructors should have been handled.  */
2874   gcc_assert (!TREE_ADDRESSABLE (callee_type));
2875 
2876   /* Attempt to avoid creating a new temporary variable.  */
2877   if (modify_dest
2878       && TREE_CODE (modify_dest) != SSA_NAME)
2879     {
2880       bool use_it = false;
2881 
2882       /* We can't use MODIFY_DEST if there's type promotion involved.  */
2883       if (!useless_type_conversion_p (callee_type, caller_type))
2884 	use_it = false;
2885 
2886       /* ??? If we're assigning to a variable sized type, then we must
2887 	 reuse the destination variable, because we've no good way to
2888 	 create variable sized temporaries at this point.  */
2889       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2890 	use_it = true;
2891 
2892       /* If the callee cannot possibly modify MODIFY_DEST, then we can
2893 	 reuse it as the result of the call directly.  Don't do this if
2894 	 it would promote MODIFY_DEST to addressable.  */
2895       else if (TREE_ADDRESSABLE (result))
2896 	use_it = false;
2897       else
2898 	{
2899 	  tree base_m = get_base_address (modify_dest);
2900 
2901 	  /* If the base isn't a decl, then it's a pointer, and we don't
2902 	     know where that's going to go.  */
2903 	  if (!DECL_P (base_m))
2904 	    use_it = false;
2905 	  else if (is_global_var (base_m))
2906 	    use_it = false;
2907 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2908 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2909 		   && !DECL_GIMPLE_REG_P (result)
2910 		   && DECL_GIMPLE_REG_P (base_m))
2911 	    use_it = false;
2912 	  else if (!TREE_ADDRESSABLE (base_m))
2913 	    use_it = true;
2914 	}
2915 
2916       if (use_it)
2917 	{
2918 	  var = modify_dest;
2919 	  use = NULL;
2920 	  goto done;
2921 	}
2922     }
2923 
2924   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2925 
2926   var = copy_result_decl_to_var (result, id);
2927   if (gimple_in_ssa_p (cfun))
2928     add_referenced_var (var);
2929 
2930   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2931 
2932   /* Do not have the rest of GCC warn about this variable as it should
2933      not be visible to the user.  */
2934   TREE_NO_WARNING (var) = 1;
2935 
2936   declare_inline_vars (id->block, var);
2937 
2938   /* Build the use expr.  If the return type of the function was
2939      promoted, convert it back to the expected type.  */
2940   use = var;
2941   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2942     {
2943       /* If we can match up types by promotion/demotion do so.  */
2944       if (fold_convertible_p (caller_type, var))
2945 	use = fold_convert (caller_type, var);
2946       else
2947 	{
2948 	  /* ???  For valid programs we should not end up here.
2949 	     Still if we end up with truly mismatched types here, fall back
2950 	     to using a MEM_REF to not leak invalid GIMPLE to the following
2951 	     passes.  */
2952 	  /* Prevent var from being written into SSA form.  */
2953 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2954 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2955 	    DECL_GIMPLE_REG_P (var) = false;
2956 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
2957 	    TREE_ADDRESSABLE (var) = true;
2958 	  use = fold_build2 (MEM_REF, caller_type,
2959 			     build_fold_addr_expr (var),
2960 			     build_int_cst (ptr_type_node, 0));
2961 	}
2962     }
2963 
2964   STRIP_USELESS_TYPE_CONVERSION (use);
2965 
2966   if (DECL_BY_REFERENCE (result))
2967     {
2968       TREE_ADDRESSABLE (var) = 1;
2969       var = build_fold_addr_expr (var);
2970     }
2971 
2972  done:
2973   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2974      way, when the RESULT_DECL is encountered, it will be
2975      automatically replaced by the VAR_DECL.
2976 
2977      When returning by reference, ensure that RESULT_DECL remaps to
2978      gimple_val.  */
2979   if (DECL_BY_REFERENCE (result)
2980       && !is_gimple_val (var))
2981     {
2982       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2983       if (gimple_in_ssa_p (id->src_cfun))
2984 	add_referenced_var (temp);
2985       insert_decl_map (id, result, temp);
2986       /* When RESULT_DECL is in SSA form, we need to remap and initialize
2987 	 it's default_def SSA_NAME.  */
2988       if (gimple_in_ssa_p (id->src_cfun)
2989 	  && is_gimple_reg (result))
2990 	{
2991 	  temp = make_ssa_name (temp, NULL);
2992 	  insert_decl_map (id, gimple_default_def (id->src_cfun, result),
2993 			   temp);
2994 	}
2995       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2996     }
2997   else
2998     insert_decl_map (id, result, var);
2999 
3000   /* Remember this so we can ignore it in remap_decls.  */
3001   id->retvar = var;
3002 
3003   return use;
3004 }
3005 
3006 /* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
3007    to a local label.  */
3008 
3009 static tree
3010 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3011 {
3012   tree node = *nodep;
3013   tree fn = (tree) fnp;
3014 
3015   if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3016     return node;
3017 
3018   if (TYPE_P (node))
3019     *walk_subtrees = 0;
3020 
3021   return NULL_TREE;
3022 }
3023 
3024 /* Determine if the function can be copied.  If so return NULL.  If
3025    not return a string describng the reason for failure.  */
3026 
3027 static const char *
3028 copy_forbidden (struct function *fun, tree fndecl)
3029 {
3030   const char *reason = fun->cannot_be_copied_reason;
3031   tree decl;
3032   unsigned ix;
3033 
3034   /* Only examine the function once.  */
3035   if (fun->cannot_be_copied_set)
3036     return reason;
3037 
3038   /* We cannot copy a function that receives a non-local goto
3039      because we cannot remap the destination label used in the
3040      function that is performing the non-local goto.  */
3041   /* ??? Actually, this should be possible, if we work at it.
3042      No doubt there's just a handful of places that simply
3043      assume it doesn't happen and don't substitute properly.  */
3044   if (fun->has_nonlocal_label)
3045     {
3046       reason = G_("function %q+F can never be copied "
3047 		  "because it receives a non-local goto");
3048       goto fail;
3049     }
3050 
3051   FOR_EACH_LOCAL_DECL (fun, ix, decl)
3052     if (TREE_CODE (decl) == VAR_DECL
3053 	&& TREE_STATIC (decl)
3054 	&& !DECL_EXTERNAL (decl)
3055 	&& DECL_INITIAL (decl)
3056 	&& walk_tree_without_duplicates (&DECL_INITIAL (decl),
3057 					 has_label_address_in_static_1,
3058 					 fndecl))
3059       {
3060 	reason = G_("function %q+F can never be copied because it saves "
3061 		    "address of local label in a static variable");
3062 	goto fail;
3063       }
3064 
3065  fail:
3066   fun->cannot_be_copied_reason = reason;
3067   fun->cannot_be_copied_set = true;
3068   return reason;
3069 }
3070 
3071 
3072 static const char *inline_forbidden_reason;
3073 
3074 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3075    iff a function can not be inlined.  Also sets the reason why. */
3076 
3077 static tree
3078 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3079 			 struct walk_stmt_info *wip)
3080 {
3081   tree fn = (tree) wip->info;
3082   tree t;
3083   gimple stmt = gsi_stmt (*gsi);
3084 
3085   switch (gimple_code (stmt))
3086     {
3087     case GIMPLE_CALL:
3088       /* Refuse to inline alloca call unless user explicitly forced so as
3089 	 this may change program's memory overhead drastically when the
3090 	 function using alloca is called in loop.  In GCC present in
3091 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3092 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3093 	 VLA objects as those can't cause unbounded growth (they're always
3094 	 wrapped inside stack_save/stack_restore regions.  */
3095       if (gimple_alloca_call_p (stmt)
3096 	  && !gimple_call_alloca_for_var_p (stmt)
3097 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3098 	{
3099 	  inline_forbidden_reason
3100 	    = G_("function %q+F can never be inlined because it uses "
3101 		 "alloca (override using the always_inline attribute)");
3102 	  *handled_ops_p = true;
3103 	  return fn;
3104 	}
3105 
3106       t = gimple_call_fndecl (stmt);
3107       if (t == NULL_TREE)
3108 	break;
3109 
3110       /* We cannot inline functions that call setjmp.  */
3111       if (setjmp_call_p (t))
3112 	{
3113 	  inline_forbidden_reason
3114 	    = G_("function %q+F can never be inlined because it uses setjmp");
3115 	  *handled_ops_p = true;
3116 	  return t;
3117 	}
3118 
3119       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3120 	switch (DECL_FUNCTION_CODE (t))
3121 	  {
3122 	    /* We cannot inline functions that take a variable number of
3123 	       arguments.  */
3124 	  case BUILT_IN_VA_START:
3125 	  case BUILT_IN_NEXT_ARG:
3126 	  case BUILT_IN_VA_END:
3127 	    inline_forbidden_reason
3128 	      = G_("function %q+F can never be inlined because it "
3129 		   "uses variable argument lists");
3130 	    *handled_ops_p = true;
3131 	    return t;
3132 
3133 	  case BUILT_IN_LONGJMP:
3134 	    /* We can't inline functions that call __builtin_longjmp at
3135 	       all.  The non-local goto machinery really requires the
3136 	       destination be in a different function.  If we allow the
3137 	       function calling __builtin_longjmp to be inlined into the
3138 	       function calling __builtin_setjmp, Things will Go Awry.  */
3139 	    inline_forbidden_reason
3140 	      = G_("function %q+F can never be inlined because "
3141 		   "it uses setjmp-longjmp exception handling");
3142 	    *handled_ops_p = true;
3143 	    return t;
3144 
3145 	  case BUILT_IN_NONLOCAL_GOTO:
3146 	    /* Similarly.  */
3147 	    inline_forbidden_reason
3148 	      = G_("function %q+F can never be inlined because "
3149 		   "it uses non-local goto");
3150 	    *handled_ops_p = true;
3151 	    return t;
3152 
3153 	  case BUILT_IN_RETURN:
3154 	  case BUILT_IN_APPLY_ARGS:
3155 	    /* If a __builtin_apply_args caller would be inlined,
3156 	       it would be saving arguments of the function it has
3157 	       been inlined into.  Similarly __builtin_return would
3158 	       return from the function the inline has been inlined into.  */
3159 	    inline_forbidden_reason
3160 	      = G_("function %q+F can never be inlined because "
3161 		   "it uses __builtin_return or __builtin_apply_args");
3162 	    *handled_ops_p = true;
3163 	    return t;
3164 
3165 	  default:
3166 	    break;
3167 	  }
3168       break;
3169 
3170     case GIMPLE_GOTO:
3171       t = gimple_goto_dest (stmt);
3172 
3173       /* We will not inline a function which uses computed goto.  The
3174 	 addresses of its local labels, which may be tucked into
3175 	 global storage, are of course not constant across
3176 	 instantiations, which causes unexpected behavior.  */
3177       if (TREE_CODE (t) != LABEL_DECL)
3178 	{
3179 	  inline_forbidden_reason
3180 	    = G_("function %q+F can never be inlined "
3181 		 "because it contains a computed goto");
3182 	  *handled_ops_p = true;
3183 	  return t;
3184 	}
3185       break;
3186 
3187     default:
3188       break;
3189     }
3190 
3191   *handled_ops_p = false;
3192   return NULL_TREE;
3193 }
3194 
3195 /* Return true if FNDECL is a function that cannot be inlined into
3196    another one.  */
3197 
3198 static bool
3199 inline_forbidden_p (tree fndecl)
3200 {
3201   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3202   struct walk_stmt_info wi;
3203   struct pointer_set_t *visited_nodes;
3204   basic_block bb;
3205   bool forbidden_p = false;
3206 
3207   /* First check for shared reasons not to copy the code.  */
3208   inline_forbidden_reason = copy_forbidden (fun, fndecl);
3209   if (inline_forbidden_reason != NULL)
3210     return true;
3211 
3212   /* Next, walk the statements of the function looking for
3213      constraucts we can't handle, or are non-optimal for inlining.  */
3214   visited_nodes = pointer_set_create ();
3215   memset (&wi, 0, sizeof (wi));
3216   wi.info = (void *) fndecl;
3217   wi.pset = visited_nodes;
3218 
3219   FOR_EACH_BB_FN (bb, fun)
3220     {
3221       gimple ret;
3222       gimple_seq seq = bb_seq (bb);
3223       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3224       forbidden_p = (ret != NULL);
3225       if (forbidden_p)
3226 	break;
3227     }
3228 
3229   pointer_set_destroy (visited_nodes);
3230   return forbidden_p;
3231 }
3232 
3233 /* Returns nonzero if FN is a function that does not have any
3234    fundamental inline blocking properties.  */
3235 
3236 bool
3237 tree_inlinable_function_p (tree fn)
3238 {
3239   bool inlinable = true;
3240   bool do_warning;
3241   tree always_inline;
3242 
3243   /* If we've already decided this function shouldn't be inlined,
3244      there's no need to check again.  */
3245   if (DECL_UNINLINABLE (fn))
3246     return false;
3247 
3248   /* We only warn for functions declared `inline' by the user.  */
3249   do_warning = (warn_inline
3250 		&& DECL_DECLARED_INLINE_P (fn)
3251 		&& !DECL_NO_INLINE_WARNING_P (fn)
3252 		&& !DECL_IN_SYSTEM_HEADER (fn));
3253 
3254   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3255 
3256   if (flag_no_inline
3257       && always_inline == NULL)
3258     {
3259       if (do_warning)
3260         warning (OPT_Winline, "function %q+F can never be inlined because it "
3261                  "is suppressed using -fno-inline", fn);
3262       inlinable = false;
3263     }
3264 
3265   else if (!function_attribute_inlinable_p (fn))
3266     {
3267       if (do_warning)
3268         warning (OPT_Winline, "function %q+F can never be inlined because it "
3269                  "uses attributes conflicting with inlining", fn);
3270       inlinable = false;
3271     }
3272 
3273   else if (inline_forbidden_p (fn))
3274     {
3275       /* See if we should warn about uninlinable functions.  Previously,
3276 	 some of these warnings would be issued while trying to expand
3277 	 the function inline, but that would cause multiple warnings
3278 	 about functions that would for example call alloca.  But since
3279 	 this a property of the function, just one warning is enough.
3280 	 As a bonus we can now give more details about the reason why a
3281 	 function is not inlinable.  */
3282       if (always_inline)
3283 	error (inline_forbidden_reason, fn);
3284       else if (do_warning)
3285 	warning (OPT_Winline, inline_forbidden_reason, fn);
3286 
3287       inlinable = false;
3288     }
3289 
3290   /* Squirrel away the result so that we don't have to check again.  */
3291   DECL_UNINLINABLE (fn) = !inlinable;
3292 
3293   return inlinable;
3294 }
3295 
3296 /* Estimate the cost of a memory move.  Use machine dependent
3297    word size and take possible memcpy call into account.  */
3298 
3299 int
3300 estimate_move_cost (tree type)
3301 {
3302   HOST_WIDE_INT size;
3303 
3304   gcc_assert (!VOID_TYPE_P (type));
3305 
3306   if (TREE_CODE (type) == VECTOR_TYPE)
3307     {
3308       enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3309       enum machine_mode simd
3310 	= targetm.vectorize.preferred_simd_mode (inner);
3311       int simd_mode_size = GET_MODE_SIZE (simd);
3312       return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3313 	      / simd_mode_size);
3314     }
3315 
3316   size = int_size_in_bytes (type);
3317 
3318   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3319     /* Cost of a memcpy call, 3 arguments and the call.  */
3320     return 4;
3321   else
3322     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3323 }
3324 
3325 /* Returns cost of operation CODE, according to WEIGHTS  */
3326 
3327 static int
3328 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3329 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3330 {
3331   switch (code)
3332     {
3333     /* These are "free" conversions, or their presumed cost
3334        is folded into other operations.  */
3335     case RANGE_EXPR:
3336     CASE_CONVERT:
3337     case COMPLEX_EXPR:
3338     case PAREN_EXPR:
3339     case VIEW_CONVERT_EXPR:
3340       return 0;
3341 
3342     /* Assign cost of 1 to usual operations.
3343        ??? We may consider mapping RTL costs to this.  */
3344     case COND_EXPR:
3345     case VEC_COND_EXPR:
3346     case VEC_PERM_EXPR:
3347 
3348     case PLUS_EXPR:
3349     case POINTER_PLUS_EXPR:
3350     case MINUS_EXPR:
3351     case MULT_EXPR:
3352     case FMA_EXPR:
3353 
3354     case ADDR_SPACE_CONVERT_EXPR:
3355     case FIXED_CONVERT_EXPR:
3356     case FIX_TRUNC_EXPR:
3357 
3358     case NEGATE_EXPR:
3359     case FLOAT_EXPR:
3360     case MIN_EXPR:
3361     case MAX_EXPR:
3362     case ABS_EXPR:
3363 
3364     case LSHIFT_EXPR:
3365     case RSHIFT_EXPR:
3366     case LROTATE_EXPR:
3367     case RROTATE_EXPR:
3368     case VEC_LSHIFT_EXPR:
3369     case VEC_RSHIFT_EXPR:
3370 
3371     case BIT_IOR_EXPR:
3372     case BIT_XOR_EXPR:
3373     case BIT_AND_EXPR:
3374     case BIT_NOT_EXPR:
3375 
3376     case TRUTH_ANDIF_EXPR:
3377     case TRUTH_ORIF_EXPR:
3378     case TRUTH_AND_EXPR:
3379     case TRUTH_OR_EXPR:
3380     case TRUTH_XOR_EXPR:
3381     case TRUTH_NOT_EXPR:
3382 
3383     case LT_EXPR:
3384     case LE_EXPR:
3385     case GT_EXPR:
3386     case GE_EXPR:
3387     case EQ_EXPR:
3388     case NE_EXPR:
3389     case ORDERED_EXPR:
3390     case UNORDERED_EXPR:
3391 
3392     case UNLT_EXPR:
3393     case UNLE_EXPR:
3394     case UNGT_EXPR:
3395     case UNGE_EXPR:
3396     case UNEQ_EXPR:
3397     case LTGT_EXPR:
3398 
3399     case CONJ_EXPR:
3400 
3401     case PREDECREMENT_EXPR:
3402     case PREINCREMENT_EXPR:
3403     case POSTDECREMENT_EXPR:
3404     case POSTINCREMENT_EXPR:
3405 
3406     case REALIGN_LOAD_EXPR:
3407 
3408     case REDUC_MAX_EXPR:
3409     case REDUC_MIN_EXPR:
3410     case REDUC_PLUS_EXPR:
3411     case WIDEN_SUM_EXPR:
3412     case WIDEN_MULT_EXPR:
3413     case DOT_PROD_EXPR:
3414     case WIDEN_MULT_PLUS_EXPR:
3415     case WIDEN_MULT_MINUS_EXPR:
3416     case WIDEN_LSHIFT_EXPR:
3417 
3418     case VEC_WIDEN_MULT_HI_EXPR:
3419     case VEC_WIDEN_MULT_LO_EXPR:
3420     case VEC_UNPACK_HI_EXPR:
3421     case VEC_UNPACK_LO_EXPR:
3422     case VEC_UNPACK_FLOAT_HI_EXPR:
3423     case VEC_UNPACK_FLOAT_LO_EXPR:
3424     case VEC_PACK_TRUNC_EXPR:
3425     case VEC_PACK_SAT_EXPR:
3426     case VEC_PACK_FIX_TRUNC_EXPR:
3427     case VEC_WIDEN_LSHIFT_HI_EXPR:
3428     case VEC_WIDEN_LSHIFT_LO_EXPR:
3429 
3430       return 1;
3431 
3432     /* Few special cases of expensive operations.  This is useful
3433        to avoid inlining on functions having too many of these.  */
3434     case TRUNC_DIV_EXPR:
3435     case CEIL_DIV_EXPR:
3436     case FLOOR_DIV_EXPR:
3437     case ROUND_DIV_EXPR:
3438     case EXACT_DIV_EXPR:
3439     case TRUNC_MOD_EXPR:
3440     case CEIL_MOD_EXPR:
3441     case FLOOR_MOD_EXPR:
3442     case ROUND_MOD_EXPR:
3443     case RDIV_EXPR:
3444       if (TREE_CODE (op2) != INTEGER_CST)
3445         return weights->div_mod_cost;
3446       return 1;
3447 
3448     default:
3449       /* We expect a copy assignment with no operator.  */
3450       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3451       return 0;
3452     }
3453 }
3454 
3455 
3456 /* Estimate number of instructions that will be created by expanding
3457    the statements in the statement sequence STMTS.
3458    WEIGHTS contains weights attributed to various constructs.  */
3459 
3460 static
3461 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3462 {
3463   int cost;
3464   gimple_stmt_iterator gsi;
3465 
3466   cost = 0;
3467   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3468     cost += estimate_num_insns (gsi_stmt (gsi), weights);
3469 
3470   return cost;
3471 }
3472 
3473 
3474 /* Estimate number of instructions that will be created by expanding STMT.
3475    WEIGHTS contains weights attributed to various constructs.  */
3476 
3477 int
3478 estimate_num_insns (gimple stmt, eni_weights *weights)
3479 {
3480   unsigned cost, i;
3481   enum gimple_code code = gimple_code (stmt);
3482   tree lhs;
3483   tree rhs;
3484 
3485   switch (code)
3486     {
3487     case GIMPLE_ASSIGN:
3488       /* Try to estimate the cost of assignments.  We have three cases to
3489 	 deal with:
3490 	 1) Simple assignments to registers;
3491 	 2) Stores to things that must live in memory.  This includes
3492 	    "normal" stores to scalars, but also assignments of large
3493 	    structures, or constructors of big arrays;
3494 
3495 	 Let us look at the first two cases, assuming we have "a = b + C":
3496 	 <GIMPLE_ASSIGN <var_decl "a">
3497 	        <plus_expr <var_decl "b"> <constant C>>
3498 	 If "a" is a GIMPLE register, the assignment to it is free on almost
3499 	 any target, because "a" usually ends up in a real register.  Hence
3500 	 the only cost of this expression comes from the PLUS_EXPR, and we
3501 	 can ignore the GIMPLE_ASSIGN.
3502 	 If "a" is not a GIMPLE register, the assignment to "a" will most
3503 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3504 	 of moving something into "a", which we compute using the function
3505 	 estimate_move_cost.  */
3506       if (gimple_clobber_p (stmt))
3507 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
3508 
3509       lhs = gimple_assign_lhs (stmt);
3510       rhs = gimple_assign_rhs1 (stmt);
3511 
3512       if (is_gimple_reg (lhs))
3513 	cost = 0;
3514       else
3515 	cost = estimate_move_cost (TREE_TYPE (lhs));
3516 
3517       if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3518 	cost += estimate_move_cost (TREE_TYPE (rhs));
3519 
3520       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3521       				      gimple_assign_rhs1 (stmt),
3522 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3523 				      == GIMPLE_BINARY_RHS
3524 				      ? gimple_assign_rhs2 (stmt) : NULL);
3525       break;
3526 
3527     case GIMPLE_COND:
3528       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3529       				         gimple_op (stmt, 0),
3530 				         gimple_op (stmt, 1));
3531       break;
3532 
3533     case GIMPLE_SWITCH:
3534       /* Take into account cost of the switch + guess 2 conditional jumps for
3535          each case label.
3536 
3537 	 TODO: once the switch expansion logic is sufficiently separated, we can
3538 	 do better job on estimating cost of the switch.  */
3539       if (weights->time_based)
3540         cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3541       else
3542         cost = gimple_switch_num_labels (stmt) * 2;
3543       break;
3544 
3545     case GIMPLE_CALL:
3546       {
3547 	tree decl = gimple_call_fndecl (stmt);
3548 	struct cgraph_node *node = NULL;
3549 
3550 	/* Do not special case builtins where we see the body.
3551 	   This just confuse inliner.  */
3552 	if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3553 	  ;
3554 	/* For buitins that are likely expanded to nothing or
3555 	   inlined do not account operand costs.  */
3556 	else if (is_simple_builtin (decl))
3557 	  return 0;
3558 	else if (is_inexpensive_builtin (decl))
3559 	  return weights->target_builtin_call_cost;
3560 	else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3561 	  {
3562 	    /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3563 	       specialize the cheap expansion we do here.
3564 	       ???  This asks for a more general solution.  */
3565 	    switch (DECL_FUNCTION_CODE (decl))
3566 	      {
3567 		case BUILT_IN_POW:
3568 		case BUILT_IN_POWF:
3569 		case BUILT_IN_POWL:
3570 		  if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3571 		      && REAL_VALUES_EQUAL
3572 			   (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3573 		    return estimate_operator_cost (MULT_EXPR, weights,
3574 						   gimple_call_arg (stmt, 0),
3575 						   gimple_call_arg (stmt, 0));
3576 		  break;
3577 
3578 		default:
3579 		  break;
3580 	      }
3581 	  }
3582 
3583 	cost = node ? weights->call_cost : weights->indirect_call_cost;
3584 	if (gimple_call_lhs (stmt))
3585 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3586 	for (i = 0; i < gimple_call_num_args (stmt); i++)
3587 	  {
3588 	    tree arg = gimple_call_arg (stmt, i);
3589 	    cost += estimate_move_cost (TREE_TYPE (arg));
3590 	  }
3591 	break;
3592       }
3593 
3594     case GIMPLE_RETURN:
3595       return weights->return_cost;
3596 
3597     case GIMPLE_GOTO:
3598     case GIMPLE_LABEL:
3599     case GIMPLE_NOP:
3600     case GIMPLE_PHI:
3601     case GIMPLE_PREDICT:
3602     case GIMPLE_DEBUG:
3603       return 0;
3604 
3605     case GIMPLE_ASM:
3606       return asm_str_count (gimple_asm_string (stmt));
3607 
3608     case GIMPLE_RESX:
3609       /* This is either going to be an external function call with one
3610 	 argument, or two register copy statements plus a goto.  */
3611       return 2;
3612 
3613     case GIMPLE_EH_DISPATCH:
3614       /* ??? This is going to turn into a switch statement.  Ideally
3615 	 we'd have a look at the eh region and estimate the number of
3616 	 edges involved.  */
3617       return 10;
3618 
3619     case GIMPLE_BIND:
3620       return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3621 
3622     case GIMPLE_EH_FILTER:
3623       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3624 
3625     case GIMPLE_CATCH:
3626       return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3627 
3628     case GIMPLE_TRY:
3629       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3630               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3631 
3632     /* OpenMP directives are generally very expensive.  */
3633 
3634     case GIMPLE_OMP_RETURN:
3635     case GIMPLE_OMP_SECTIONS_SWITCH:
3636     case GIMPLE_OMP_ATOMIC_STORE:
3637     case GIMPLE_OMP_CONTINUE:
3638       /* ...except these, which are cheap.  */
3639       return 0;
3640 
3641     case GIMPLE_OMP_ATOMIC_LOAD:
3642       return weights->omp_cost;
3643 
3644     case GIMPLE_OMP_FOR:
3645       return (weights->omp_cost
3646               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3647               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3648 
3649     case GIMPLE_OMP_PARALLEL:
3650     case GIMPLE_OMP_TASK:
3651     case GIMPLE_OMP_CRITICAL:
3652     case GIMPLE_OMP_MASTER:
3653     case GIMPLE_OMP_ORDERED:
3654     case GIMPLE_OMP_SECTION:
3655     case GIMPLE_OMP_SECTIONS:
3656     case GIMPLE_OMP_SINGLE:
3657       return (weights->omp_cost
3658               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3659 
3660     case GIMPLE_TRANSACTION:
3661       return (weights->tm_cost
3662 	      + estimate_num_insns_seq (gimple_transaction_body (stmt),
3663 					weights));
3664 
3665     default:
3666       gcc_unreachable ();
3667     }
3668 
3669   return cost;
3670 }
3671 
3672 /* Estimate number of instructions that will be created by expanding
3673    function FNDECL.  WEIGHTS contains weights attributed to various
3674    constructs.  */
3675 
3676 int
3677 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3678 {
3679   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3680   gimple_stmt_iterator bsi;
3681   basic_block bb;
3682   int n = 0;
3683 
3684   gcc_assert (my_function && my_function->cfg);
3685   FOR_EACH_BB_FN (bb, my_function)
3686     {
3687       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3688 	n += estimate_num_insns (gsi_stmt (bsi), weights);
3689     }
3690 
3691   return n;
3692 }
3693 
3694 
3695 /* Initializes weights used by estimate_num_insns.  */
3696 
3697 void
3698 init_inline_once (void)
3699 {
3700   eni_size_weights.call_cost = 1;
3701   eni_size_weights.indirect_call_cost = 3;
3702   eni_size_weights.target_builtin_call_cost = 1;
3703   eni_size_weights.div_mod_cost = 1;
3704   eni_size_weights.omp_cost = 40;
3705   eni_size_weights.tm_cost = 10;
3706   eni_size_weights.time_based = false;
3707   eni_size_weights.return_cost = 1;
3708 
3709   /* Estimating time for call is difficult, since we have no idea what the
3710      called function does.  In the current uses of eni_time_weights,
3711      underestimating the cost does less harm than overestimating it, so
3712      we choose a rather small value here.  */
3713   eni_time_weights.call_cost = 10;
3714   eni_time_weights.indirect_call_cost = 15;
3715   eni_time_weights.target_builtin_call_cost = 1;
3716   eni_time_weights.div_mod_cost = 10;
3717   eni_time_weights.omp_cost = 40;
3718   eni_time_weights.tm_cost = 40;
3719   eni_time_weights.time_based = true;
3720   eni_time_weights.return_cost = 2;
3721 }
3722 
3723 /* Estimate the number of instructions in a gimple_seq. */
3724 
3725 int
3726 count_insns_seq (gimple_seq seq, eni_weights *weights)
3727 {
3728   gimple_stmt_iterator gsi;
3729   int n = 0;
3730   for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3731     n += estimate_num_insns (gsi_stmt (gsi), weights);
3732 
3733   return n;
3734 }
3735 
3736 
3737 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
3738 
3739 static void
3740 prepend_lexical_block (tree current_block, tree new_block)
3741 {
3742   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3743   BLOCK_SUBBLOCKS (current_block) = new_block;
3744   BLOCK_SUPERCONTEXT (new_block) = current_block;
3745 }
3746 
3747 /* Add local variables from CALLEE to CALLER.  */
3748 
3749 static inline void
3750 add_local_variables (struct function *callee, struct function *caller,
3751 		     copy_body_data *id, bool check_var_ann)
3752 {
3753   tree var;
3754   unsigned ix;
3755 
3756   FOR_EACH_LOCAL_DECL (callee, ix, var)
3757     if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3758       {
3759 	if (!check_var_ann
3760 	    || (var_ann (var) && add_referenced_var (var)))
3761 	  add_local_decl (caller, var);
3762       }
3763     else if (!can_be_nonlocal (var, id))
3764       {
3765         tree new_var = remap_decl (var, id);
3766 
3767         /* Remap debug-expressions.  */
3768 	if (TREE_CODE (new_var) == VAR_DECL
3769 	    && DECL_DEBUG_EXPR_IS_FROM (new_var)
3770 	    && new_var != var)
3771 	  {
3772 	    tree tem = DECL_DEBUG_EXPR (var);
3773 	    bool old_regimplify = id->regimplify;
3774 	    id->remapping_type_depth++;
3775 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
3776 	    id->remapping_type_depth--;
3777 	    id->regimplify = old_regimplify;
3778 	    SET_DECL_DEBUG_EXPR (new_var, tem);
3779 	  }
3780 	add_local_decl (caller, new_var);
3781       }
3782 }
3783 
3784 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
3785 
3786 static bool
3787 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3788 {
3789   tree use_retvar;
3790   tree fn;
3791   struct pointer_map_t *st, *dst;
3792   tree return_slot;
3793   tree modify_dest;
3794   location_t saved_location;
3795   struct cgraph_edge *cg_edge;
3796   cgraph_inline_failed_t reason;
3797   basic_block return_block;
3798   edge e;
3799   gimple_stmt_iterator gsi, stmt_gsi;
3800   bool successfully_inlined = FALSE;
3801   bool purge_dead_abnormal_edges;
3802 
3803   /* Set input_location here so we get the right instantiation context
3804      if we call instantiate_decl from inlinable_function_p.  */
3805   saved_location = input_location;
3806   if (gimple_has_location (stmt))
3807     input_location = gimple_location (stmt);
3808 
3809   /* From here on, we're only interested in CALL_EXPRs.  */
3810   if (gimple_code (stmt) != GIMPLE_CALL)
3811     goto egress;
3812 
3813   cg_edge = cgraph_edge (id->dst_node, stmt);
3814   gcc_checking_assert (cg_edge);
3815   /* First, see if we can figure out what function is being called.
3816      If we cannot, then there is no hope of inlining the function.  */
3817   if (cg_edge->indirect_unknown_callee)
3818     goto egress;
3819   fn = cg_edge->callee->decl;
3820   gcc_checking_assert (fn);
3821 
3822   /* If FN is a declaration of a function in a nested scope that was
3823      globally declared inline, we don't set its DECL_INITIAL.
3824      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3825      C++ front-end uses it for cdtors to refer to their internal
3826      declarations, that are not real functions.  Fortunately those
3827      don't have trees to be saved, so we can tell by checking their
3828      gimple_body.  */
3829   if (!DECL_INITIAL (fn)
3830       && DECL_ABSTRACT_ORIGIN (fn)
3831       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3832     fn = DECL_ABSTRACT_ORIGIN (fn);
3833 
3834   /* Don't try to inline functions that are not well-suited to inlining.  */
3835   if (!cgraph_inline_p (cg_edge, &reason))
3836     {
3837       /* If this call was originally indirect, we do not want to emit any
3838 	 inlining related warnings or sorry messages because there are no
3839 	 guarantees regarding those.  */
3840       if (cg_edge->indirect_inlining_edge)
3841 	goto egress;
3842 
3843       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3844           /* For extern inline functions that get redefined we always
3845 	     silently ignored always_inline flag. Better behaviour would
3846 	     be to be able to keep both bodies and use extern inline body
3847 	     for inlining, but we can't do that because frontends overwrite
3848 	     the body.  */
3849 	  && !cg_edge->callee->local.redefined_extern_inline
3850 	  /* Avoid warnings during early inline pass. */
3851 	  && cgraph_global_info_ready
3852 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
3853 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3854 	{
3855 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
3856 		 cgraph_inline_failed_string (reason));
3857 	  error ("called from here");
3858 	}
3859       else if (warn_inline
3860 	       && DECL_DECLARED_INLINE_P (fn)
3861 	       && !DECL_NO_INLINE_WARNING_P (fn)
3862 	       && !DECL_IN_SYSTEM_HEADER (fn)
3863 	       && reason != CIF_UNSPECIFIED
3864 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3865 	       /* Do not warn about not inlined recursive calls.  */
3866 	       && !cgraph_edge_recursive_p (cg_edge)
3867 	       /* Avoid warnings during early inline pass. */
3868 	       && cgraph_global_info_ready)
3869 	{
3870 	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3871 		   fn, _(cgraph_inline_failed_string (reason)));
3872 	  warning (OPT_Winline, "called from here");
3873 	}
3874       goto egress;
3875     }
3876   fn = cg_edge->callee->decl;
3877 
3878 #ifdef ENABLE_CHECKING
3879   if (cg_edge->callee->decl != id->dst_node->decl)
3880     verify_cgraph_node (cg_edge->callee);
3881 #endif
3882 
3883   /* We will be inlining this callee.  */
3884   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3885 
3886   /* Update the callers EH personality.  */
3887   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3888     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3889       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3890 
3891   /* Split the block holding the GIMPLE_CALL.  */
3892   e = split_block (bb, stmt);
3893   bb = e->src;
3894   return_block = e->dest;
3895   remove_edge (e);
3896 
3897   /* split_block splits after the statement; work around this by
3898      moving the call into the second block manually.  Not pretty,
3899      but seems easier than doing the CFG manipulation by hand
3900      when the GIMPLE_CALL is in the last statement of BB.  */
3901   stmt_gsi = gsi_last_bb (bb);
3902   gsi_remove (&stmt_gsi, false);
3903 
3904   /* If the GIMPLE_CALL was in the last statement of BB, it may have
3905      been the source of abnormal edges.  In this case, schedule
3906      the removal of dead abnormal edges.  */
3907   gsi = gsi_start_bb (return_block);
3908   if (gsi_end_p (gsi))
3909     {
3910       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3911       purge_dead_abnormal_edges = true;
3912     }
3913   else
3914     {
3915       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3916       purge_dead_abnormal_edges = false;
3917     }
3918 
3919   stmt_gsi = gsi_start_bb (return_block);
3920 
3921   /* Build a block containing code to initialize the arguments, the
3922      actual inline expansion of the body, and a label for the return
3923      statements within the function to jump to.  The type of the
3924      statement expression is the return type of the function call.  */
3925   id->block = make_node (BLOCK);
3926   BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3927   BLOCK_SOURCE_LOCATION (id->block) = input_location;
3928   prepend_lexical_block (gimple_block (stmt), id->block);
3929 
3930   /* Local declarations will be replaced by their equivalents in this
3931      map.  */
3932   st = id->decl_map;
3933   id->decl_map = pointer_map_create ();
3934   dst = id->debug_map;
3935   id->debug_map = NULL;
3936 
3937   /* Record the function we are about to inline.  */
3938   id->src_fn = fn;
3939   id->src_node = cg_edge->callee;
3940   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3941   id->gimple_call = stmt;
3942 
3943   gcc_assert (!id->src_cfun->after_inlining);
3944 
3945   id->entry_bb = bb;
3946   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3947     {
3948       gimple_stmt_iterator si = gsi_last_bb (bb);
3949       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3950       						   NOT_TAKEN),
3951 			GSI_NEW_STMT);
3952     }
3953   initialize_inlined_parameters (id, stmt, fn, bb);
3954 
3955   if (DECL_INITIAL (fn))
3956     prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3957 
3958   /* Return statements in the function body will be replaced by jumps
3959      to the RET_LABEL.  */
3960   gcc_assert (DECL_INITIAL (fn));
3961   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3962 
3963   /* Find the LHS to which the result of this call is assigned.  */
3964   return_slot = NULL;
3965   if (gimple_call_lhs (stmt))
3966     {
3967       modify_dest = gimple_call_lhs (stmt);
3968 
3969       /* The function which we are inlining might not return a value,
3970 	 in which case we should issue a warning that the function
3971 	 does not return a value.  In that case the optimizers will
3972 	 see that the variable to which the value is assigned was not
3973 	 initialized.  We do not want to issue a warning about that
3974 	 uninitialized variable.  */
3975       if (DECL_P (modify_dest))
3976 	TREE_NO_WARNING (modify_dest) = 1;
3977 
3978       if (gimple_call_return_slot_opt_p (stmt))
3979 	{
3980 	  return_slot = modify_dest;
3981 	  modify_dest = NULL;
3982 	}
3983     }
3984   else
3985     modify_dest = NULL;
3986 
3987   /* If we are inlining a call to the C++ operator new, we don't want
3988      to use type based alias analysis on the return value.  Otherwise
3989      we may get confused if the compiler sees that the inlined new
3990      function returns a pointer which was just deleted.  See bug
3991      33407.  */
3992   if (DECL_IS_OPERATOR_NEW (fn))
3993     {
3994       return_slot = NULL;
3995       modify_dest = NULL;
3996     }
3997 
3998   /* Declare the return variable for the function.  */
3999   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4000 
4001   /* Add local vars in this inlined callee to caller.  */
4002   add_local_variables (id->src_cfun, cfun, id, true);
4003 
4004   if (dump_file && (dump_flags & TDF_DETAILS))
4005     {
4006       fprintf (dump_file, "Inlining ");
4007       print_generic_expr (dump_file, id->src_fn, 0);
4008       fprintf (dump_file, " to ");
4009       print_generic_expr (dump_file, id->dst_fn, 0);
4010       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4011     }
4012 
4013   /* This is it.  Duplicate the callee body.  Assume callee is
4014      pre-gimplified.  Note that we must not alter the caller
4015      function in any way before this point, as this CALL_EXPR may be
4016      a self-referential call; if we're calling ourselves, we need to
4017      duplicate our body before altering anything.  */
4018   copy_body (id, bb->count,
4019   	     cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
4020 	     bb, return_block, NULL, NULL);
4021 
4022   /* Reset the escaped solution.  */
4023   if (cfun->gimple_df)
4024     pt_solution_reset (&cfun->gimple_df->escaped);
4025 
4026   /* Clean up.  */
4027   if (id->debug_map)
4028     {
4029       pointer_map_destroy (id->debug_map);
4030       id->debug_map = dst;
4031     }
4032   pointer_map_destroy (id->decl_map);
4033   id->decl_map = st;
4034 
4035   /* Unlink the calls virtual operands before replacing it.  */
4036   unlink_stmt_vdef (stmt);
4037 
4038   /* If the inlined function returns a result that we care about,
4039      substitute the GIMPLE_CALL with an assignment of the return
4040      variable to the LHS of the call.  That is, if STMT was
4041      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4042   if (use_retvar && gimple_call_lhs (stmt))
4043     {
4044       gimple old_stmt = stmt;
4045       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4046       gsi_replace (&stmt_gsi, stmt, false);
4047       if (gimple_in_ssa_p (cfun))
4048 	mark_symbols_for_renaming (stmt);
4049       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4050     }
4051   else
4052     {
4053       /* Handle the case of inlining a function with no return
4054 	 statement, which causes the return value to become undefined.  */
4055       if (gimple_call_lhs (stmt)
4056 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4057 	{
4058 	  tree name = gimple_call_lhs (stmt);
4059 	  tree var = SSA_NAME_VAR (name);
4060 	  tree def = gimple_default_def (cfun, var);
4061 
4062 	  if (def)
4063 	    {
4064 	      /* If the variable is used undefined, make this name
4065 		 undefined via a move.  */
4066 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4067 	      gsi_replace (&stmt_gsi, stmt, true);
4068 	    }
4069 	  else
4070 	    {
4071 	      /* Otherwise make this variable undefined.  */
4072 	      gsi_remove (&stmt_gsi, true);
4073 	      set_default_def (var, name);
4074 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4075 	    }
4076 	}
4077       else
4078         gsi_remove (&stmt_gsi, true);
4079     }
4080 
4081   if (purge_dead_abnormal_edges)
4082     {
4083       gimple_purge_dead_eh_edges (return_block);
4084       gimple_purge_dead_abnormal_call_edges (return_block);
4085     }
4086 
4087   /* If the value of the new expression is ignored, that's OK.  We
4088      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4089      the equivalent inlined version either.  */
4090   if (is_gimple_assign (stmt))
4091     {
4092       gcc_assert (gimple_assign_single_p (stmt)
4093 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4094       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4095     }
4096 
4097   /* Output the inlining info for this abstract function, since it has been
4098      inlined.  If we don't do this now, we can lose the information about the
4099      variables in the function when the blocks get blown away as soon as we
4100      remove the cgraph node.  */
4101   (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4102 
4103   /* Update callgraph if needed.  */
4104   cgraph_remove_node (cg_edge->callee);
4105 
4106   id->block = NULL_TREE;
4107   successfully_inlined = TRUE;
4108 
4109  egress:
4110   input_location = saved_location;
4111   return successfully_inlined;
4112 }
4113 
4114 /* Expand call statements reachable from STMT_P.
4115    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4116    in a MODIFY_EXPR.  */
4117 
4118 static bool
4119 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4120 {
4121   gimple_stmt_iterator gsi;
4122 
4123   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4124     {
4125       gimple stmt = gsi_stmt (gsi);
4126 
4127       if (is_gimple_call (stmt)
4128 	  && expand_call_inline (bb, stmt, id))
4129 	return true;
4130     }
4131 
4132   return false;
4133 }
4134 
4135 
4136 /* Walk all basic blocks created after FIRST and try to fold every statement
4137    in the STATEMENTS pointer set.  */
4138 
4139 static void
4140 fold_marked_statements (int first, struct pointer_set_t *statements)
4141 {
4142   for (; first < n_basic_blocks; first++)
4143     if (BASIC_BLOCK (first))
4144       {
4145         gimple_stmt_iterator gsi;
4146 
4147 	for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4148 	     !gsi_end_p (gsi);
4149 	     gsi_next (&gsi))
4150 	  if (pointer_set_contains (statements, gsi_stmt (gsi)))
4151 	    {
4152 	      gimple old_stmt = gsi_stmt (gsi);
4153 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4154 
4155 	      if (old_decl && DECL_BUILT_IN (old_decl))
4156 		{
4157 		  /* Folding builtins can create multiple instructions,
4158 		     we need to look at all of them.  */
4159 		  gimple_stmt_iterator i2 = gsi;
4160 		  gsi_prev (&i2);
4161 		  if (fold_stmt (&gsi))
4162 		    {
4163 		      gimple new_stmt;
4164 		      /* If a builtin at the end of a bb folded into nothing,
4165 			 the following loop won't work.  */
4166 		      if (gsi_end_p (gsi))
4167 			{
4168 			  cgraph_update_edges_for_call_stmt (old_stmt,
4169 							     old_decl, NULL);
4170 			  break;
4171 			}
4172 		      if (gsi_end_p (i2))
4173 			i2 = gsi_start_bb (BASIC_BLOCK (first));
4174 		      else
4175 			gsi_next (&i2);
4176 		      while (1)
4177 			{
4178 			  new_stmt = gsi_stmt (i2);
4179 			  update_stmt (new_stmt);
4180 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4181 							     new_stmt);
4182 
4183 			  if (new_stmt == gsi_stmt (gsi))
4184 			    {
4185 			      /* It is okay to check only for the very last
4186 				 of these statements.  If it is a throwing
4187 				 statement nothing will change.  If it isn't
4188 				 this can remove EH edges.  If that weren't
4189 				 correct then because some intermediate stmts
4190 				 throw, but not the last one.  That would mean
4191 				 we'd have to split the block, which we can't
4192 				 here and we'd loose anyway.  And as builtins
4193 				 probably never throw, this all
4194 				 is mood anyway.  */
4195 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
4196 								  new_stmt))
4197 				gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4198 			      break;
4199 			    }
4200 			  gsi_next (&i2);
4201 			}
4202 		    }
4203 		}
4204 	      else if (fold_stmt (&gsi))
4205 		{
4206 		  /* Re-read the statement from GSI as fold_stmt() may
4207 		     have changed it.  */
4208 		  gimple new_stmt = gsi_stmt (gsi);
4209 		  update_stmt (new_stmt);
4210 
4211 		  if (is_gimple_call (old_stmt)
4212 		      || is_gimple_call (new_stmt))
4213 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4214 						       new_stmt);
4215 
4216 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4217 		    gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4218 		}
4219 	    }
4220       }
4221 }
4222 
4223 /* Return true if BB has at least one abnormal outgoing edge.  */
4224 
4225 static inline bool
4226 has_abnormal_outgoing_edge_p (basic_block bb)
4227 {
4228   edge e;
4229   edge_iterator ei;
4230 
4231   FOR_EACH_EDGE (e, ei, bb->succs)
4232     if (e->flags & EDGE_ABNORMAL)
4233       return true;
4234 
4235   return false;
4236 }
4237 
4238 /* Expand calls to inline functions in the body of FN.  */
4239 
4240 unsigned int
4241 optimize_inline_calls (tree fn)
4242 {
4243   copy_body_data id;
4244   basic_block bb;
4245   int last = n_basic_blocks;
4246   struct gimplify_ctx gctx;
4247   bool inlined_p = false;
4248 
4249   /* Clear out ID.  */
4250   memset (&id, 0, sizeof (id));
4251 
4252   id.src_node = id.dst_node = cgraph_get_node (fn);
4253   gcc_assert (id.dst_node->analyzed);
4254   id.dst_fn = fn;
4255   /* Or any functions that aren't finished yet.  */
4256   if (current_function_decl)
4257     id.dst_fn = current_function_decl;
4258 
4259   id.copy_decl = copy_decl_maybe_to_var;
4260   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4261   id.transform_new_cfg = false;
4262   id.transform_return_to_modify = true;
4263   id.transform_lang_insert_block = NULL;
4264   id.statements_to_fold = pointer_set_create ();
4265 
4266   push_gimplify_context (&gctx);
4267 
4268   /* We make no attempts to keep dominance info up-to-date.  */
4269   free_dominance_info (CDI_DOMINATORS);
4270   free_dominance_info (CDI_POST_DOMINATORS);
4271 
4272   /* Register specific gimple functions.  */
4273   gimple_register_cfg_hooks ();
4274 
4275   /* Reach the trees by walking over the CFG, and note the
4276      enclosing basic-blocks in the call edges.  */
4277   /* We walk the blocks going forward, because inlined function bodies
4278      will split id->current_basic_block, and the new blocks will
4279      follow it; we'll trudge through them, processing their CALL_EXPRs
4280      along the way.  */
4281   FOR_EACH_BB (bb)
4282     inlined_p |= gimple_expand_calls_inline (bb, &id);
4283 
4284   pop_gimplify_context (NULL);
4285 
4286 #ifdef ENABLE_CHECKING
4287     {
4288       struct cgraph_edge *e;
4289 
4290       verify_cgraph_node (id.dst_node);
4291 
4292       /* Double check that we inlined everything we are supposed to inline.  */
4293       for (e = id.dst_node->callees; e; e = e->next_callee)
4294 	gcc_assert (e->inline_failed);
4295     }
4296 #endif
4297 
4298   /* Fold queued statements.  */
4299   fold_marked_statements (last, id.statements_to_fold);
4300   pointer_set_destroy (id.statements_to_fold);
4301 
4302   gcc_assert (!id.debug_stmts);
4303 
4304   /* If we didn't inline into the function there is nothing to do.  */
4305   if (!inlined_p)
4306     return 0;
4307 
4308   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4309   number_blocks (fn);
4310 
4311   delete_unreachable_blocks_update_callgraph (&id);
4312 #ifdef ENABLE_CHECKING
4313   verify_cgraph_node (id.dst_node);
4314 #endif
4315 
4316   /* It would be nice to check SSA/CFG/statement consistency here, but it is
4317      not possible yet - the IPA passes might make various functions to not
4318      throw and they don't care to proactively update local EH info.  This is
4319      done later in fixup_cfg pass that also execute the verification.  */
4320   return (TODO_update_ssa
4321 	  | TODO_cleanup_cfg
4322 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4323 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4324 	  | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4325 }
4326 
4327 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
4328 
4329 tree
4330 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4331 {
4332   enum tree_code code = TREE_CODE (*tp);
4333   enum tree_code_class cl = TREE_CODE_CLASS (code);
4334 
4335   /* We make copies of most nodes.  */
4336   if (IS_EXPR_CODE_CLASS (cl)
4337       || code == TREE_LIST
4338       || code == TREE_VEC
4339       || code == TYPE_DECL
4340       || code == OMP_CLAUSE)
4341     {
4342       /* Because the chain gets clobbered when we make a copy, we save it
4343 	 here.  */
4344       tree chain = NULL_TREE, new_tree;
4345 
4346       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4347 	chain = TREE_CHAIN (*tp);
4348 
4349       /* Copy the node.  */
4350       new_tree = copy_node (*tp);
4351 
4352       /* Propagate mudflap marked-ness.  */
4353       if (flag_mudflap && mf_marked_p (*tp))
4354         mf_mark (new_tree);
4355 
4356       *tp = new_tree;
4357 
4358       /* Now, restore the chain, if appropriate.  That will cause
4359 	 walk_tree to walk into the chain as well.  */
4360       if (code == PARM_DECL
4361 	  || code == TREE_LIST
4362 	  || code == OMP_CLAUSE)
4363 	TREE_CHAIN (*tp) = chain;
4364 
4365       /* For now, we don't update BLOCKs when we make copies.  So, we
4366 	 have to nullify all BIND_EXPRs.  */
4367       if (TREE_CODE (*tp) == BIND_EXPR)
4368 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4369     }
4370   else if (code == CONSTRUCTOR)
4371     {
4372       /* CONSTRUCTOR nodes need special handling because
4373          we need to duplicate the vector of elements.  */
4374       tree new_tree;
4375 
4376       new_tree = copy_node (*tp);
4377 
4378       /* Propagate mudflap marked-ness.  */
4379       if (flag_mudflap && mf_marked_p (*tp))
4380         mf_mark (new_tree);
4381 
4382       CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4383 					 CONSTRUCTOR_ELTS (*tp));
4384       *tp = new_tree;
4385     }
4386   else if (code == STATEMENT_LIST)
4387     /* We used to just abort on STATEMENT_LIST, but we can run into them
4388        with statement-expressions (c++/40975).  */
4389     copy_statement_list (tp);
4390   else if (TREE_CODE_CLASS (code) == tcc_type)
4391     *walk_subtrees = 0;
4392   else if (TREE_CODE_CLASS (code) == tcc_declaration)
4393     *walk_subtrees = 0;
4394   else if (TREE_CODE_CLASS (code) == tcc_constant)
4395     *walk_subtrees = 0;
4396   return NULL_TREE;
4397 }
4398 
4399 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
4400    information indicating to what new SAVE_EXPR this one should be mapped,
4401    use that one.  Otherwise, create a new node and enter it in ST.  FN is
4402    the function into which the copy will be placed.  */
4403 
4404 static void
4405 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4406 {
4407   struct pointer_map_t *st = (struct pointer_map_t *) st_;
4408   tree *n;
4409   tree t;
4410 
4411   /* See if we already encountered this SAVE_EXPR.  */
4412   n = (tree *) pointer_map_contains (st, *tp);
4413 
4414   /* If we didn't already remap this SAVE_EXPR, do so now.  */
4415   if (!n)
4416     {
4417       t = copy_node (*tp);
4418 
4419       /* Remember this SAVE_EXPR.  */
4420       *pointer_map_insert (st, *tp) = t;
4421       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
4422       *pointer_map_insert (st, t) = t;
4423     }
4424   else
4425     {
4426       /* We've already walked into this SAVE_EXPR; don't do it again.  */
4427       *walk_subtrees = 0;
4428       t = *n;
4429     }
4430 
4431   /* Replace this SAVE_EXPR with the copy.  */
4432   *tp = t;
4433 }
4434 
4435 /* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
4436    copies the declaration and enters it in the splay_tree in DATA (which is
4437    really an `copy_body_data *').  */
4438 
4439 static tree
4440 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4441 			void *data)
4442 {
4443   copy_body_data *id = (copy_body_data *) data;
4444 
4445   /* Don't walk into types.  */
4446   if (TYPE_P (*tp))
4447     *walk_subtrees = 0;
4448 
4449   else if (TREE_CODE (*tp) == LABEL_EXPR)
4450     {
4451       tree decl = TREE_OPERAND (*tp, 0);
4452 
4453       /* Copy the decl and remember the copy.  */
4454       insert_decl_map (id, decl, id->copy_decl (decl, id));
4455     }
4456 
4457   return NULL_TREE;
4458 }
4459 
4460 /* Perform any modifications to EXPR required when it is unsaved.  Does
4461    not recurse into EXPR's subtrees.  */
4462 
4463 static void
4464 unsave_expr_1 (tree expr)
4465 {
4466   switch (TREE_CODE (expr))
4467     {
4468     case TARGET_EXPR:
4469       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4470          It's OK for this to happen if it was part of a subtree that
4471          isn't immediately expanded, such as operand 2 of another
4472          TARGET_EXPR.  */
4473       if (TREE_OPERAND (expr, 1))
4474 	break;
4475 
4476       TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4477       TREE_OPERAND (expr, 3) = NULL_TREE;
4478       break;
4479 
4480     default:
4481       break;
4482     }
4483 }
4484 
4485 /* Called via walk_tree when an expression is unsaved.  Using the
4486    splay_tree pointed to by ST (which is really a `splay_tree'),
4487    remaps all local declarations to appropriate replacements.  */
4488 
4489 static tree
4490 unsave_r (tree *tp, int *walk_subtrees, void *data)
4491 {
4492   copy_body_data *id = (copy_body_data *) data;
4493   struct pointer_map_t *st = id->decl_map;
4494   tree *n;
4495 
4496   /* Only a local declaration (variable or label).  */
4497   if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4498       || TREE_CODE (*tp) == LABEL_DECL)
4499     {
4500       /* Lookup the declaration.  */
4501       n = (tree *) pointer_map_contains (st, *tp);
4502 
4503       /* If it's there, remap it.  */
4504       if (n)
4505 	*tp = *n;
4506     }
4507 
4508   else if (TREE_CODE (*tp) == STATEMENT_LIST)
4509     gcc_unreachable ();
4510   else if (TREE_CODE (*tp) == BIND_EXPR)
4511     copy_bind_expr (tp, walk_subtrees, id);
4512   else if (TREE_CODE (*tp) == SAVE_EXPR
4513 	   || TREE_CODE (*tp) == TARGET_EXPR)
4514     remap_save_expr (tp, st, walk_subtrees);
4515   else
4516     {
4517       copy_tree_r (tp, walk_subtrees, NULL);
4518 
4519       /* Do whatever unsaving is required.  */
4520       unsave_expr_1 (*tp);
4521     }
4522 
4523   /* Keep iterating.  */
4524   return NULL_TREE;
4525 }
4526 
4527 /* Copies everything in EXPR and replaces variables, labels
4528    and SAVE_EXPRs local to EXPR.  */
4529 
4530 tree
4531 unsave_expr_now (tree expr)
4532 {
4533   copy_body_data id;
4534 
4535   /* There's nothing to do for NULL_TREE.  */
4536   if (expr == 0)
4537     return expr;
4538 
4539   /* Set up ID.  */
4540   memset (&id, 0, sizeof (id));
4541   id.src_fn = current_function_decl;
4542   id.dst_fn = current_function_decl;
4543   id.decl_map = pointer_map_create ();
4544   id.debug_map = NULL;
4545 
4546   id.copy_decl = copy_decl_no_change;
4547   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4548   id.transform_new_cfg = false;
4549   id.transform_return_to_modify = false;
4550   id.transform_lang_insert_block = NULL;
4551 
4552   /* Walk the tree once to find local labels.  */
4553   walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4554 
4555   /* Walk the tree again, copying, remapping, and unsaving.  */
4556   walk_tree (&expr, unsave_r, &id, NULL);
4557 
4558   /* Clean up.  */
4559   pointer_map_destroy (id.decl_map);
4560   if (id.debug_map)
4561     pointer_map_destroy (id.debug_map);
4562 
4563   return expr;
4564 }
4565 
4566 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
4567    label, copies the declaration and enters it in the splay_tree in DATA (which
4568    is really a 'copy_body_data *'.  */
4569 
4570 static tree
4571 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4572 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
4573 		        struct walk_stmt_info *wi)
4574 {
4575   copy_body_data *id = (copy_body_data *) wi->info;
4576   gimple stmt = gsi_stmt (*gsip);
4577 
4578   if (gimple_code (stmt) == GIMPLE_LABEL)
4579     {
4580       tree decl = gimple_label_label (stmt);
4581 
4582       /* Copy the decl and remember the copy.  */
4583       insert_decl_map (id, decl, id->copy_decl (decl, id));
4584     }
4585 
4586   return NULL_TREE;
4587 }
4588 
4589 
4590 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4591    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4592    remaps all local declarations to appropriate replacements in gimple
4593    operands. */
4594 
4595 static tree
4596 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4597 {
4598   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4599   copy_body_data *id = (copy_body_data *) wi->info;
4600   struct pointer_map_t *st = id->decl_map;
4601   tree *n;
4602   tree expr = *tp;
4603 
4604   /* Only a local declaration (variable or label).  */
4605   if ((TREE_CODE (expr) == VAR_DECL
4606        && !TREE_STATIC (expr))
4607       || TREE_CODE (expr) == LABEL_DECL)
4608     {
4609       /* Lookup the declaration.  */
4610       n = (tree *) pointer_map_contains (st, expr);
4611 
4612       /* If it's there, remap it.  */
4613       if (n)
4614 	*tp = *n;
4615       *walk_subtrees = 0;
4616     }
4617   else if (TREE_CODE (expr) == STATEMENT_LIST
4618 	   || TREE_CODE (expr) == BIND_EXPR
4619 	   || TREE_CODE (expr) == SAVE_EXPR)
4620     gcc_unreachable ();
4621   else if (TREE_CODE (expr) == TARGET_EXPR)
4622     {
4623       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4624          It's OK for this to happen if it was part of a subtree that
4625          isn't immediately expanded, such as operand 2 of another
4626          TARGET_EXPR.  */
4627       if (!TREE_OPERAND (expr, 1))
4628 	{
4629 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4630 	  TREE_OPERAND (expr, 3) = NULL_TREE;
4631 	}
4632     }
4633 
4634   /* Keep iterating.  */
4635   return NULL_TREE;
4636 }
4637 
4638 
4639 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4640    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4641    remaps all local declarations to appropriate replacements in gimple
4642    statements. */
4643 
4644 static tree
4645 replace_locals_stmt (gimple_stmt_iterator *gsip,
4646 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
4647 		     struct walk_stmt_info *wi)
4648 {
4649   copy_body_data *id = (copy_body_data *) wi->info;
4650   gimple stmt = gsi_stmt (*gsip);
4651 
4652   if (gimple_code (stmt) == GIMPLE_BIND)
4653     {
4654       tree block = gimple_bind_block (stmt);
4655 
4656       if (block)
4657 	{
4658 	  remap_block (&block, id);
4659 	  gimple_bind_set_block (stmt, block);
4660 	}
4661 
4662       /* This will remap a lot of the same decls again, but this should be
4663 	 harmless.  */
4664       if (gimple_bind_vars (stmt))
4665 	gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4666     }
4667 
4668   /* Keep iterating.  */
4669   return NULL_TREE;
4670 }
4671 
4672 
4673 /* Copies everything in SEQ and replaces variables and labels local to
4674    current_function_decl.  */
4675 
4676 gimple_seq
4677 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4678 {
4679   copy_body_data id;
4680   struct walk_stmt_info wi;
4681   struct pointer_set_t *visited;
4682   gimple_seq copy;
4683 
4684   /* There's nothing to do for NULL_TREE.  */
4685   if (seq == NULL)
4686     return seq;
4687 
4688   /* Set up ID.  */
4689   memset (&id, 0, sizeof (id));
4690   id.src_fn = current_function_decl;
4691   id.dst_fn = current_function_decl;
4692   id.decl_map = pointer_map_create ();
4693   id.debug_map = NULL;
4694 
4695   id.copy_decl = copy_decl_no_change;
4696   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4697   id.transform_new_cfg = false;
4698   id.transform_return_to_modify = false;
4699   id.transform_lang_insert_block = NULL;
4700 
4701   /* Walk the tree once to find local labels.  */
4702   memset (&wi, 0, sizeof (wi));
4703   visited = pointer_set_create ();
4704   wi.info = &id;
4705   wi.pset = visited;
4706   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4707   pointer_set_destroy (visited);
4708 
4709   copy = gimple_seq_copy (seq);
4710 
4711   /* Walk the copy, remapping decls.  */
4712   memset (&wi, 0, sizeof (wi));
4713   wi.info = &id;
4714   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4715 
4716   /* Clean up.  */
4717   pointer_map_destroy (id.decl_map);
4718   if (id.debug_map)
4719     pointer_map_destroy (id.debug_map);
4720 
4721   return copy;
4722 }
4723 
4724 
4725 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
4726 
4727 static tree
4728 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4729 {
4730   if (*tp == data)
4731     return (tree) data;
4732   else
4733     return NULL;
4734 }
4735 
4736 DEBUG_FUNCTION bool
4737 debug_find_tree (tree top, tree search)
4738 {
4739   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4740 }
4741 
4742 
4743 /* Declare the variables created by the inliner.  Add all the variables in
4744    VARS to BIND_EXPR.  */
4745 
4746 static void
4747 declare_inline_vars (tree block, tree vars)
4748 {
4749   tree t;
4750   for (t = vars; t; t = DECL_CHAIN (t))
4751     {
4752       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4753       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4754       add_local_decl (cfun, t);
4755     }
4756 
4757   if (block)
4758     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4759 }
4760 
4761 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
4762    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
4763    VAR_DECL translation.  */
4764 
4765 static tree
4766 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4767 {
4768   /* Don't generate debug information for the copy if we wouldn't have
4769      generated it for the copy either.  */
4770   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4771   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4772 
4773   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4774      declaration inspired this copy.  */
4775   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4776 
4777   /* The new variable/label has no RTL, yet.  */
4778   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4779       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4780     SET_DECL_RTL (copy, 0);
4781 
4782   /* These args would always appear unused, if not for this.  */
4783   TREE_USED (copy) = 1;
4784 
4785   /* Set the context for the new declaration.  */
4786   if (!DECL_CONTEXT (decl))
4787     /* Globals stay global.  */
4788     ;
4789   else if (DECL_CONTEXT (decl) != id->src_fn)
4790     /* Things that weren't in the scope of the function we're inlining
4791        from aren't in the scope we're inlining to, either.  */
4792     ;
4793   else if (TREE_STATIC (decl))
4794     /* Function-scoped static variables should stay in the original
4795        function.  */
4796     ;
4797   else
4798     /* Ordinary automatic local variables are now in the scope of the
4799        new function.  */
4800     DECL_CONTEXT (copy) = id->dst_fn;
4801 
4802   if (TREE_CODE (decl) == VAR_DECL
4803       /* C++ clones functions during parsing, before
4804 	 referenced_vars.  */
4805       && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
4806       && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
4807 				DECL_UID (decl)))
4808     add_referenced_var (copy);
4809 
4810   return copy;
4811 }
4812 
4813 static tree
4814 copy_decl_to_var (tree decl, copy_body_data *id)
4815 {
4816   tree copy, type;
4817 
4818   gcc_assert (TREE_CODE (decl) == PARM_DECL
4819 	      || TREE_CODE (decl) == RESULT_DECL);
4820 
4821   type = TREE_TYPE (decl);
4822 
4823   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4824 		     VAR_DECL, DECL_NAME (decl), type);
4825   if (DECL_PT_UID_SET_P (decl))
4826     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4827   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4828   TREE_READONLY (copy) = TREE_READONLY (decl);
4829   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4830   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4831 
4832   return copy_decl_for_dup_finish (id, decl, copy);
4833 }
4834 
4835 /* Like copy_decl_to_var, but create a return slot object instead of a
4836    pointer variable for return by invisible reference.  */
4837 
4838 static tree
4839 copy_result_decl_to_var (tree decl, copy_body_data *id)
4840 {
4841   tree copy, type;
4842 
4843   gcc_assert (TREE_CODE (decl) == PARM_DECL
4844 	      || TREE_CODE (decl) == RESULT_DECL);
4845 
4846   type = TREE_TYPE (decl);
4847   if (DECL_BY_REFERENCE (decl))
4848     type = TREE_TYPE (type);
4849 
4850   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4851 		     VAR_DECL, DECL_NAME (decl), type);
4852   if (DECL_PT_UID_SET_P (decl))
4853     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4854   TREE_READONLY (copy) = TREE_READONLY (decl);
4855   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4856   if (!DECL_BY_REFERENCE (decl))
4857     {
4858       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4859       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4860     }
4861 
4862   return copy_decl_for_dup_finish (id, decl, copy);
4863 }
4864 
4865 tree
4866 copy_decl_no_change (tree decl, copy_body_data *id)
4867 {
4868   tree copy;
4869 
4870   copy = copy_node (decl);
4871 
4872   /* The COPY is not abstract; it will be generated in DST_FN.  */
4873   DECL_ABSTRACT (copy) = 0;
4874   lang_hooks.dup_lang_specific_decl (copy);
4875 
4876   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4877      been taken; it's for internal bookkeeping in expand_goto_internal.  */
4878   if (TREE_CODE (copy) == LABEL_DECL)
4879     {
4880       TREE_ADDRESSABLE (copy) = 0;
4881       LABEL_DECL_UID (copy) = -1;
4882     }
4883 
4884   return copy_decl_for_dup_finish (id, decl, copy);
4885 }
4886 
4887 static tree
4888 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4889 {
4890   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4891     return copy_decl_to_var (decl, id);
4892   else
4893     return copy_decl_no_change (decl, id);
4894 }
4895 
4896 /* Return a copy of the function's argument tree.  */
4897 static tree
4898 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4899 			       bitmap args_to_skip, tree *vars)
4900 {
4901   tree arg, *parg;
4902   tree new_parm = NULL;
4903   int i = 0;
4904 
4905   parg = &new_parm;
4906 
4907   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4908     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4909       {
4910         tree new_tree = remap_decl (arg, id);
4911 	if (TREE_CODE (new_tree) != PARM_DECL)
4912 	  new_tree = id->copy_decl (arg, id);
4913         lang_hooks.dup_lang_specific_decl (new_tree);
4914         *parg = new_tree;
4915 	parg = &DECL_CHAIN (new_tree);
4916       }
4917     else if (!pointer_map_contains (id->decl_map, arg))
4918       {
4919 	/* Make an equivalent VAR_DECL.  If the argument was used
4920 	   as temporary variable later in function, the uses will be
4921 	   replaced by local variable.  */
4922 	tree var = copy_decl_to_var (arg, id);
4923 	add_referenced_var (var);
4924 	insert_decl_map (id, arg, var);
4925         /* Declare this new variable.  */
4926         DECL_CHAIN (var) = *vars;
4927         *vars = var;
4928       }
4929   return new_parm;
4930 }
4931 
4932 /* Return a copy of the function's static chain.  */
4933 static tree
4934 copy_static_chain (tree static_chain, copy_body_data * id)
4935 {
4936   tree *chain_copy, *pvar;
4937 
4938   chain_copy = &static_chain;
4939   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4940     {
4941       tree new_tree = remap_decl (*pvar, id);
4942       lang_hooks.dup_lang_specific_decl (new_tree);
4943       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4944       *pvar = new_tree;
4945     }
4946   return static_chain;
4947 }
4948 
4949 /* Return true if the function is allowed to be versioned.
4950    This is a guard for the versioning functionality.  */
4951 
4952 bool
4953 tree_versionable_function_p (tree fndecl)
4954 {
4955   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4956 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4957 }
4958 
4959 /* Delete all unreachable basic blocks and update callgraph.
4960    Doing so is somewhat nontrivial because we need to update all clones and
4961    remove inline function that become unreachable.  */
4962 
4963 static bool
4964 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4965 {
4966   bool changed = false;
4967   basic_block b, next_bb;
4968 
4969   find_unreachable_blocks ();
4970 
4971   /* Delete all unreachable basic blocks.  */
4972 
4973   for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4974     {
4975       next_bb = b->next_bb;
4976 
4977       if (!(b->flags & BB_REACHABLE))
4978 	{
4979           gimple_stmt_iterator bsi;
4980 
4981           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4982 	    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4983 	      {
4984 	        struct cgraph_edge *e;
4985 		struct cgraph_node *node;
4986 
4987 	        if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4988 		  {
4989 		    if (!e->inline_failed)
4990 		      cgraph_remove_node_and_inline_clones (e->callee);
4991 		    else
4992 	              cgraph_remove_edge (e);
4993 		  }
4994 		if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4995 		    && id->dst_node->clones)
4996      		  for (node = id->dst_node->clones; node != id->dst_node;)
4997 		    {
4998 	              if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4999 			{
5000 		          if (!e->inline_failed)
5001 		            cgraph_remove_node_and_inline_clones (e->callee);
5002 			  else
5003 	                    cgraph_remove_edge (e);
5004 			}
5005 
5006 		      if (node->clones)
5007 			node = node->clones;
5008 		      else if (node->next_sibling_clone)
5009 			node = node->next_sibling_clone;
5010 		      else
5011 			{
5012 			  while (node != id->dst_node && !node->next_sibling_clone)
5013 			    node = node->clone_of;
5014 			  if (node != id->dst_node)
5015 			    node = node->next_sibling_clone;
5016 			}
5017 		    }
5018 	      }
5019 	  delete_basic_block (b);
5020 	  changed = true;
5021 	}
5022     }
5023 
5024   return changed;
5025 }
5026 
5027 /* Update clone info after duplication.  */
5028 
5029 static void
5030 update_clone_info (copy_body_data * id)
5031 {
5032   struct cgraph_node *node;
5033   if (!id->dst_node->clones)
5034     return;
5035   for (node = id->dst_node->clones; node != id->dst_node;)
5036     {
5037       /* First update replace maps to match the new body.  */
5038       if (node->clone.tree_map)
5039         {
5040 	  unsigned int i;
5041           for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
5042 	    {
5043 	      struct ipa_replace_map *replace_info;
5044 	      replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
5045 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5046 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5047 	    }
5048 	}
5049       if (node->clones)
5050 	node = node->clones;
5051       else if (node->next_sibling_clone)
5052 	node = node->next_sibling_clone;
5053       else
5054 	{
5055 	  while (node != id->dst_node && !node->next_sibling_clone)
5056 	    node = node->clone_of;
5057 	  if (node != id->dst_node)
5058 	    node = node->next_sibling_clone;
5059 	}
5060     }
5061 }
5062 
5063 /* Create a copy of a function's tree.
5064    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5065    of the original function and the new copied function
5066    respectively.  In case we want to replace a DECL
5067    tree with another tree while duplicating the function's
5068    body, TREE_MAP represents the mapping between these
5069    trees. If UPDATE_CLONES is set, the call_stmt fields
5070    of edges of clones of the function will be updated.
5071 
5072    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5073    from new version.
5074    If SKIP_RETURN is true, the new version will return void.
5075    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5076    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5077 */
5078 void
5079 tree_function_versioning (tree old_decl, tree new_decl,
5080 			  VEC(ipa_replace_map_p,gc)* tree_map,
5081 			  bool update_clones, bitmap args_to_skip,
5082 			  bool skip_return, bitmap blocks_to_copy,
5083 			  basic_block new_entry)
5084 {
5085   struct cgraph_node *old_version_node;
5086   struct cgraph_node *new_version_node;
5087   copy_body_data id;
5088   tree p;
5089   unsigned i;
5090   struct ipa_replace_map *replace_info;
5091   basic_block old_entry_block, bb;
5092   VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5093 
5094   tree old_current_function_decl = current_function_decl;
5095   tree vars = NULL_TREE;
5096 
5097   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5098 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5099   DECL_POSSIBLY_INLINED (old_decl) = 1;
5100 
5101   old_version_node = cgraph_get_node (old_decl);
5102   gcc_checking_assert (old_version_node);
5103   new_version_node = cgraph_get_node (new_decl);
5104   gcc_checking_assert (new_version_node);
5105 
5106   /* Copy over debug args.  */
5107   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5108     {
5109       VEC(tree, gc) **new_debug_args, **old_debug_args;
5110       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5111       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5112       old_debug_args = decl_debug_args_lookup (old_decl);
5113       if (old_debug_args)
5114 	{
5115 	  new_debug_args = decl_debug_args_insert (new_decl);
5116 	  *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
5117 	}
5118     }
5119 
5120   /* Output the inlining info for this abstract function, since it has been
5121      inlined.  If we don't do this now, we can lose the information about the
5122      variables in the function when the blocks get blown away as soon as we
5123      remove the cgraph node.  */
5124   (*debug_hooks->outlining_inline_function) (old_decl);
5125 
5126   DECL_ARTIFICIAL (new_decl) = 1;
5127   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5128   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5129 
5130   /* Prepare the data structures for the tree copy.  */
5131   memset (&id, 0, sizeof (id));
5132 
5133   /* Generate a new name for the new version. */
5134   id.statements_to_fold = pointer_set_create ();
5135 
5136   id.decl_map = pointer_map_create ();
5137   id.debug_map = NULL;
5138   id.src_fn = old_decl;
5139   id.dst_fn = new_decl;
5140   id.src_node = old_version_node;
5141   id.dst_node = new_version_node;
5142   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5143   if (id.src_node->ipa_transforms_to_apply)
5144     {
5145       VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5146       unsigned int i;
5147 
5148       id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5149 					               id.src_node->ipa_transforms_to_apply);
5150       for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5151         VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5152 		       VEC_index (ipa_opt_pass,
5153 		       		  old_transforms_to_apply,
5154 				  i));
5155     }
5156 
5157   id.copy_decl = copy_decl_no_change;
5158   id.transform_call_graph_edges
5159     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5160   id.transform_new_cfg = true;
5161   id.transform_return_to_modify = false;
5162   id.transform_lang_insert_block = NULL;
5163 
5164   current_function_decl = new_decl;
5165   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5166     (DECL_STRUCT_FUNCTION (old_decl));
5167   initialize_cfun (new_decl, old_decl,
5168 		   old_entry_block->count);
5169   DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5170     = id.src_cfun->gimple_df->ipa_pta;
5171   push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5172 
5173   /* Copy the function's static chain.  */
5174   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5175   if (p)
5176     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5177       copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5178 			 &id);
5179 
5180   /* If there's a tree_map, prepare for substitution.  */
5181   if (tree_map)
5182     for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5183       {
5184 	gimple init;
5185 	replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5186 	if (replace_info->replace_p)
5187 	  {
5188 	    tree op = replace_info->new_tree;
5189 	    if (!replace_info->old_tree)
5190 	      {
5191 		int i = replace_info->parm_num;
5192 		tree parm;
5193 		for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5194 		  i --;
5195 		replace_info->old_tree = parm;
5196 	      }
5197 
5198 
5199 	    STRIP_NOPS (op);
5200 
5201 	    if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5202 	      op = TREE_OPERAND (op, 0);
5203 
5204 	    if (TREE_CODE (op) == ADDR_EXPR)
5205 	      {
5206 		op = TREE_OPERAND (op, 0);
5207 		while (handled_component_p (op))
5208 		  op = TREE_OPERAND (op, 0);
5209 		if (TREE_CODE (op) == VAR_DECL)
5210 		  add_referenced_var (op);
5211 	      }
5212 	    gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5213 	    init = setup_one_parameter (&id, replace_info->old_tree,
5214 	    			        replace_info->new_tree, id.src_fn,
5215 				        NULL,
5216 				        &vars);
5217 	    if (init)
5218 	      VEC_safe_push (gimple, heap, init_stmts, init);
5219 	  }
5220       }
5221   /* Copy the function's arguments.  */
5222   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5223     DECL_ARGUMENTS (new_decl) =
5224       copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5225       				     args_to_skip, &vars);
5226 
5227   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5228   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5229 
5230   declare_inline_vars (DECL_INITIAL (new_decl), vars);
5231 
5232   if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5233     /* Add local vars.  */
5234     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
5235 
5236   if (DECL_RESULT (old_decl) == NULL_TREE)
5237     ;
5238   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5239     {
5240       DECL_RESULT (new_decl)
5241 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5242 		      RESULT_DECL, NULL_TREE, void_type_node);
5243       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5244       cfun->returns_struct = 0;
5245       cfun->returns_pcc_struct = 0;
5246     }
5247   else
5248     {
5249       tree old_name;
5250       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5251       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5252       if (gimple_in_ssa_p (id.src_cfun)
5253 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5254 	  && (old_name
5255 	      = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5256 	{
5257 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5258 	  insert_decl_map (&id, old_name, new_name);
5259 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5260 	  set_default_def (DECL_RESULT (new_decl), new_name);
5261 	}
5262     }
5263 
5264   /* Copy the Function's body.  */
5265   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5266 	     ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5267 
5268   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5269   number_blocks (new_decl);
5270 
5271   /* We want to create the BB unconditionally, so that the addition of
5272      debug stmts doesn't affect BB count, which may in the end cause
5273      codegen differences.  */
5274   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5275   while (VEC_length (gimple, init_stmts))
5276     insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5277   update_clone_info (&id);
5278 
5279   /* Remap the nonlocal_goto_save_area, if any.  */
5280   if (cfun->nonlocal_goto_save_area)
5281     {
5282       struct walk_stmt_info wi;
5283 
5284       memset (&wi, 0, sizeof (wi));
5285       wi.info = &id;
5286       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5287     }
5288 
5289   /* Clean up.  */
5290   pointer_map_destroy (id.decl_map);
5291   if (id.debug_map)
5292     pointer_map_destroy (id.debug_map);
5293   free_dominance_info (CDI_DOMINATORS);
5294   free_dominance_info (CDI_POST_DOMINATORS);
5295 
5296   fold_marked_statements (0, id.statements_to_fold);
5297   pointer_set_destroy (id.statements_to_fold);
5298   fold_cond_expr_cond ();
5299   delete_unreachable_blocks_update_callgraph (&id);
5300   if (id.dst_node->analyzed)
5301     cgraph_rebuild_references ();
5302   update_ssa (TODO_update_ssa);
5303 
5304   /* After partial cloning we need to rescale frequencies, so they are
5305      within proper range in the cloned function.  */
5306   if (new_entry)
5307     {
5308       struct cgraph_edge *e;
5309       rebuild_frequencies ();
5310 
5311       new_version_node->count = ENTRY_BLOCK_PTR->count;
5312       for (e = new_version_node->callees; e; e = e->next_callee)
5313 	{
5314 	  basic_block bb = gimple_bb (e->call_stmt);
5315 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5316 							 bb);
5317 	  e->count = bb->count;
5318 	}
5319       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5320 	{
5321 	  basic_block bb = gimple_bb (e->call_stmt);
5322 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5323 							 bb);
5324 	  e->count = bb->count;
5325 	}
5326     }
5327 
5328   free_dominance_info (CDI_DOMINATORS);
5329   free_dominance_info (CDI_POST_DOMINATORS);
5330 
5331   gcc_assert (!id.debug_stmts);
5332   VEC_free (gimple, heap, init_stmts);
5333   pop_cfun ();
5334   current_function_decl = old_current_function_decl;
5335   gcc_assert (!current_function_decl
5336 	      || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5337   return;
5338 }
5339 
5340 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5341    the callee and return the inlined body on success.  */
5342 
5343 tree
5344 maybe_inline_call_in_expr (tree exp)
5345 {
5346   tree fn = get_callee_fndecl (exp);
5347 
5348   /* We can only try to inline "const" functions.  */
5349   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5350     {
5351       struct pointer_map_t *decl_map = pointer_map_create ();
5352       call_expr_arg_iterator iter;
5353       copy_body_data id;
5354       tree param, arg, t;
5355 
5356       /* Remap the parameters.  */
5357       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5358 	   param;
5359 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5360 	*pointer_map_insert (decl_map, param) = arg;
5361 
5362       memset (&id, 0, sizeof (id));
5363       id.src_fn = fn;
5364       id.dst_fn = current_function_decl;
5365       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5366       id.decl_map = decl_map;
5367 
5368       id.copy_decl = copy_decl_no_change;
5369       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5370       id.transform_new_cfg = false;
5371       id.transform_return_to_modify = true;
5372       id.transform_lang_insert_block = NULL;
5373 
5374       /* Make sure not to unshare trees behind the front-end's back
5375 	 since front-end specific mechanisms may rely on sharing.  */
5376       id.regimplify = false;
5377       id.do_not_unshare = true;
5378 
5379       /* We're not inside any EH region.  */
5380       id.eh_lp_nr = 0;
5381 
5382       t = copy_tree_body (&id);
5383       pointer_map_destroy (decl_map);
5384 
5385       /* We can only return something suitable for use in a GENERIC
5386 	 expression tree.  */
5387       if (TREE_CODE (t) == MODIFY_EXPR)
5388 	return TREE_OPERAND (t, 1);
5389     }
5390 
5391    return NULL_TREE;
5392 }
5393 
5394 /* Duplicate a type, fields and all.  */
5395 
5396 tree
5397 build_duplicate_type (tree type)
5398 {
5399   struct copy_body_data id;
5400 
5401   memset (&id, 0, sizeof (id));
5402   id.src_fn = current_function_decl;
5403   id.dst_fn = current_function_decl;
5404   id.src_cfun = cfun;
5405   id.decl_map = pointer_map_create ();
5406   id.debug_map = NULL;
5407   id.copy_decl = copy_decl_no_change;
5408 
5409   type = remap_type_1 (type, &id);
5410 
5411   pointer_map_destroy (id.decl_map);
5412   if (id.debug_map)
5413     pointer_map_destroy (id.debug_map);
5414 
5415   TYPE_CANONICAL (type) = type;
5416 
5417   return type;
5418 }
5419