xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision 946379e7b37692fc43f68eb0d1c10daa0a7f3b6c)
1 /* Tree inlining.
2    Copyright (C) 2001-2013 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "cgraph.h"
37 #include "intl.h"
38 #include "tree-mudflap.h"
39 #include "tree-flow.h"
40 #include "function.h"
41 #include "tree-flow.h"
42 #include "tree-pretty-print.h"
43 #include "except.h"
44 #include "debug.h"
45 #include "pointer-set.h"
46 #include "ipa-prop.h"
47 #include "value-prof.h"
48 #include "tree-pass.h"
49 #include "target.h"
50 
51 #include "rtl.h"	/* FIXME: For asm_str_count.  */
52 
53 /* I'm not real happy about this, but we need to handle gimple and
54    non-gimple trees.  */
55 #include "gimple.h"
56 
57 /* Inlining, Cloning, Versioning, Parallelization
58 
59    Inlining: a function body is duplicated, but the PARM_DECLs are
60    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
61    MODIFY_EXPRs that store to a dedicated returned-value variable.
62    The duplicated eh_region info of the copy will later be appended
63    to the info for the caller; the eh_region info in copied throwing
64    statements and RESX statements are adjusted accordingly.
65 
66    Cloning: (only in C++) We have one body for a con/de/structor, and
67    multiple function decls, each with a unique parameter list.
68    Duplicate the body, using the given splay tree; some parameters
69    will become constants (like 0 or 1).
70 
71    Versioning: a function body is duplicated and the result is a new
72    function rather than into blocks of an existing function as with
73    inlining.  Some parameters will become constants.
74 
75    Parallelization: a region of a function is duplicated resulting in
76    a new function.  Variables may be replaced with complex expressions
77    to enable shared variable semantics.
78 
79    All of these will simultaneously lookup any callgraph edges.  If
80    we're going to inline the duplicated function body, and the given
81    function has some cloned callgraph nodes (one for each place this
82    function will be inlined) those callgraph edges will be duplicated.
83    If we're cloning the body, those callgraph edges will be
84    updated to point into the new body.  (Note that the original
85    callgraph node and edge list will not be altered.)
86 
87    See the CALL_EXPR handling case in copy_tree_body_r ().  */
88 
89 /* To Do:
90 
91    o In order to make inlining-on-trees work, we pessimized
92      function-local static constants.  In particular, they are now
93      always output, even when not addressed.  Fix this by treating
94      function-local static constants just like global static
95      constants; the back-end already knows not to output them if they
96      are not needed.
97 
98    o Provide heuristics to clamp inlining of recursive template
99      calls?  */
100 
101 
102 /* Weights that estimate_num_insns uses to estimate the size of the
103    produced code.  */
104 
105 eni_weights eni_size_weights;
106 
107 /* Weights that estimate_num_insns uses to estimate the time necessary
108    to execute the produced code.  */
109 
110 eni_weights eni_time_weights;
111 
112 /* Prototypes.  */
113 
114 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
115 static void remap_block (tree *, copy_body_data *);
116 static void copy_bind_expr (tree *, int *, copy_body_data *);
117 static tree mark_local_for_remap_r (tree *, int *, void *);
118 static void unsave_expr_1 (tree);
119 static tree unsave_r (tree *, int *, void *);
120 static void declare_inline_vars (tree, tree);
121 static void remap_save_expr (tree *, void *, int *);
122 static void prepend_lexical_block (tree current_block, tree new_block);
123 static tree copy_decl_to_var (tree, copy_body_data *);
124 static tree copy_result_decl_to_var (tree, copy_body_data *);
125 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
126 static gimple remap_gimple_stmt (gimple, copy_body_data *);
127 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
128 
129 /* Insert a tree->tree mapping for ID.  Despite the name suggests
130    that the trees should be variables, it is used for more than that.  */
131 
132 void
133 insert_decl_map (copy_body_data *id, tree key, tree value)
134 {
135   *pointer_map_insert (id->decl_map, key) = value;
136 
137   /* Always insert an identity map as well.  If we see this same new
138      node again, we won't want to duplicate it a second time.  */
139   if (key != value)
140     *pointer_map_insert (id->decl_map, value) = value;
141 }
142 
143 /* Insert a tree->tree mapping for ID.  This is only used for
144    variables.  */
145 
146 static void
147 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
148 {
149   if (!gimple_in_ssa_p (id->src_cfun))
150     return;
151 
152   if (!MAY_HAVE_DEBUG_STMTS)
153     return;
154 
155   if (!target_for_debug_bind (key))
156     return;
157 
158   gcc_assert (TREE_CODE (key) == PARM_DECL);
159   gcc_assert (TREE_CODE (value) == VAR_DECL);
160 
161   if (!id->debug_map)
162     id->debug_map = pointer_map_create ();
163 
164   *pointer_map_insert (id->debug_map, key) = value;
165 }
166 
167 /* If nonzero, we're remapping the contents of inlined debug
168    statements.  If negative, an error has occurred, such as a
169    reference to a variable that isn't available in the inlined
170    context.  */
171 static int processing_debug_stmt = 0;
172 
173 /* Construct new SSA name for old NAME. ID is the inline context.  */
174 
175 static tree
176 remap_ssa_name (tree name, copy_body_data *id)
177 {
178   tree new_tree, var;
179   tree *n;
180 
181   gcc_assert (TREE_CODE (name) == SSA_NAME);
182 
183   n = (tree *) pointer_map_contains (id->decl_map, name);
184   if (n)
185     return unshare_expr (*n);
186 
187   if (processing_debug_stmt)
188     {
189       if (SSA_NAME_IS_DEFAULT_DEF (name)
190 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
191 	  && id->entry_bb == NULL
192 	  && single_succ_p (ENTRY_BLOCK_PTR))
193 	{
194 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
195 	  gimple def_temp;
196 	  gimple_stmt_iterator gsi;
197 	  tree val = SSA_NAME_VAR (name);
198 
199 	  n = (tree *) pointer_map_contains (id->decl_map, val);
200 	  if (n != NULL)
201 	    val = *n;
202 	  if (TREE_CODE (val) != PARM_DECL)
203 	    {
204 	      processing_debug_stmt = -1;
205 	      return name;
206 	    }
207 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
208 	  DECL_ARTIFICIAL (vexpr) = 1;
209 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
210 	  DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
211 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
212 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
213 	  return vexpr;
214 	}
215 
216       processing_debug_stmt = -1;
217       return name;
218     }
219 
220   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
221   var = SSA_NAME_VAR (name);
222   if (!var
223       || (!SSA_NAME_IS_DEFAULT_DEF (name)
224 	  && TREE_CODE (var) == VAR_DECL
225 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
226 	  && DECL_ARTIFICIAL (var)
227 	  && DECL_IGNORED_P (var)
228 	  && !DECL_NAME (var)))
229     {
230       struct ptr_info_def *pi;
231       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
232       if (!var && SSA_NAME_IDENTIFIER (name))
233 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
234       insert_decl_map (id, name, new_tree);
235       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
236 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
237       /* At least IPA points-to info can be directly transferred.  */
238       if (id->src_cfun->gimple_df
239 	  && id->src_cfun->gimple_df->ipa_pta
240 	  && (pi = SSA_NAME_PTR_INFO (name))
241 	  && !pi->pt.anything)
242 	{
243 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
244 	  new_pi->pt = pi->pt;
245 	}
246       return new_tree;
247     }
248 
249   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
250      in copy_bb.  */
251   new_tree = remap_decl (var, id);
252 
253   /* We might've substituted constant or another SSA_NAME for
254      the variable.
255 
256      Replace the SSA name representing RESULT_DECL by variable during
257      inlining:  this saves us from need to introduce PHI node in a case
258      return value is just partly initialized.  */
259   if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
260       && (!SSA_NAME_VAR (name)
261 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
262 	  || !id->transform_return_to_modify))
263     {
264       struct ptr_info_def *pi;
265       new_tree = make_ssa_name (new_tree, NULL);
266       insert_decl_map (id, name, new_tree);
267       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
268 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
269       /* At least IPA points-to info can be directly transferred.  */
270       if (id->src_cfun->gimple_df
271 	  && id->src_cfun->gimple_df->ipa_pta
272 	  && (pi = SSA_NAME_PTR_INFO (name))
273 	  && !pi->pt.anything)
274 	{
275 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
276 	  new_pi->pt = pi->pt;
277 	}
278       if (SSA_NAME_IS_DEFAULT_DEF (name))
279 	{
280 	  /* By inlining function having uninitialized variable, we might
281 	     extend the lifetime (variable might get reused).  This cause
282 	     ICE in the case we end up extending lifetime of SSA name across
283 	     abnormal edge, but also increase register pressure.
284 
285 	     We simply initialize all uninitialized vars by 0 except
286 	     for case we are inlining to very first BB.  We can avoid
287 	     this for all BBs that are not inside strongly connected
288 	     regions of the CFG, but this is expensive to test.  */
289 	  if (id->entry_bb
290 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
291 	      && (!SSA_NAME_VAR (name)
292 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
293 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
294 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
295 	    {
296 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
297 	      gimple init_stmt;
298 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
299 
300 	      init_stmt = gimple_build_assign (new_tree, zero);
301 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
302 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
303 	    }
304 	  else
305 	    {
306 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
307 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
308 	    }
309 	}
310     }
311   else
312     insert_decl_map (id, name, new_tree);
313   return new_tree;
314 }
315 
316 /* Remap DECL during the copying of the BLOCK tree for the function.  */
317 
318 tree
319 remap_decl (tree decl, copy_body_data *id)
320 {
321   tree *n;
322 
323   /* We only remap local variables in the current function.  */
324 
325   /* See if we have remapped this declaration.  */
326 
327   n = (tree *) pointer_map_contains (id->decl_map, decl);
328 
329   if (!n && processing_debug_stmt)
330     {
331       processing_debug_stmt = -1;
332       return decl;
333     }
334 
335   /* If we didn't already have an equivalent for this declaration,
336      create one now.  */
337   if (!n)
338     {
339       /* Make a copy of the variable or label.  */
340       tree t = id->copy_decl (decl, id);
341 
342       /* Remember it, so that if we encounter this local entity again
343 	 we can reuse this copy.  Do this early because remap_type may
344 	 need this decl for TYPE_STUB_DECL.  */
345       insert_decl_map (id, decl, t);
346 
347       if (!DECL_P (t))
348 	return t;
349 
350       /* Remap types, if necessary.  */
351       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
352       if (TREE_CODE (t) == TYPE_DECL)
353         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
354 
355       /* Remap sizes as necessary.  */
356       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
357       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
358 
359       /* If fields, do likewise for offset and qualifier.  */
360       if (TREE_CODE (t) == FIELD_DECL)
361 	{
362 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
363 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
364 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
365 	}
366 
367       return t;
368     }
369 
370   if (id->do_not_unshare)
371     return *n;
372   else
373     return unshare_expr (*n);
374 }
375 
376 static tree
377 remap_type_1 (tree type, copy_body_data *id)
378 {
379   tree new_tree, t;
380 
381   /* We do need a copy.  build and register it now.  If this is a pointer or
382      reference type, remap the designated type and make a new pointer or
383      reference type.  */
384   if (TREE_CODE (type) == POINTER_TYPE)
385     {
386       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
387 					 TYPE_MODE (type),
388 					 TYPE_REF_CAN_ALIAS_ALL (type));
389       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
390 	new_tree = build_type_attribute_qual_variant (new_tree,
391 						      TYPE_ATTRIBUTES (type),
392 						      TYPE_QUALS (type));
393       insert_decl_map (id, type, new_tree);
394       return new_tree;
395     }
396   else if (TREE_CODE (type) == REFERENCE_TYPE)
397     {
398       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
399 					    TYPE_MODE (type),
400 					    TYPE_REF_CAN_ALIAS_ALL (type));
401       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
402 	new_tree = build_type_attribute_qual_variant (new_tree,
403 						      TYPE_ATTRIBUTES (type),
404 						      TYPE_QUALS (type));
405       insert_decl_map (id, type, new_tree);
406       return new_tree;
407     }
408   else
409     new_tree = copy_node (type);
410 
411   insert_decl_map (id, type, new_tree);
412 
413   /* This is a new type, not a copy of an old type.  Need to reassociate
414      variants.  We can handle everything except the main variant lazily.  */
415   t = TYPE_MAIN_VARIANT (type);
416   if (type != t)
417     {
418       t = remap_type (t, id);
419       TYPE_MAIN_VARIANT (new_tree) = t;
420       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
421       TYPE_NEXT_VARIANT (t) = new_tree;
422     }
423   else
424     {
425       TYPE_MAIN_VARIANT (new_tree) = new_tree;
426       TYPE_NEXT_VARIANT (new_tree) = NULL;
427     }
428 
429   if (TYPE_STUB_DECL (type))
430     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
431 
432   /* Lazily create pointer and reference types.  */
433   TYPE_POINTER_TO (new_tree) = NULL;
434   TYPE_REFERENCE_TO (new_tree) = NULL;
435 
436   switch (TREE_CODE (new_tree))
437     {
438     case INTEGER_TYPE:
439     case REAL_TYPE:
440     case FIXED_POINT_TYPE:
441     case ENUMERAL_TYPE:
442     case BOOLEAN_TYPE:
443       t = TYPE_MIN_VALUE (new_tree);
444       if (t && TREE_CODE (t) != INTEGER_CST)
445         walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
446 
447       t = TYPE_MAX_VALUE (new_tree);
448       if (t && TREE_CODE (t) != INTEGER_CST)
449         walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
450       return new_tree;
451 
452     case FUNCTION_TYPE:
453       TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
454       walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
455       return new_tree;
456 
457     case ARRAY_TYPE:
458       TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
459       TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
460       break;
461 
462     case RECORD_TYPE:
463     case UNION_TYPE:
464     case QUAL_UNION_TYPE:
465       {
466 	tree f, nf = NULL;
467 
468 	for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
469 	  {
470 	    t = remap_decl (f, id);
471 	    DECL_CONTEXT (t) = new_tree;
472 	    DECL_CHAIN (t) = nf;
473 	    nf = t;
474 	  }
475 	TYPE_FIELDS (new_tree) = nreverse (nf);
476       }
477       break;
478 
479     case OFFSET_TYPE:
480     default:
481       /* Shouldn't have been thought variable sized.  */
482       gcc_unreachable ();
483     }
484 
485   walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
486   walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
487 
488   return new_tree;
489 }
490 
491 tree
492 remap_type (tree type, copy_body_data *id)
493 {
494   tree *node;
495   tree tmp;
496 
497   if (type == NULL)
498     return type;
499 
500   /* See if we have remapped this type.  */
501   node = (tree *) pointer_map_contains (id->decl_map, type);
502   if (node)
503     return *node;
504 
505   /* The type only needs remapping if it's variably modified.  */
506   if (! variably_modified_type_p (type, id->src_fn))
507     {
508       insert_decl_map (id, type, type);
509       return type;
510     }
511 
512   id->remapping_type_depth++;
513   tmp = remap_type_1 (type, id);
514   id->remapping_type_depth--;
515 
516   return tmp;
517 }
518 
519 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
520 
521 static bool
522 can_be_nonlocal (tree decl, copy_body_data *id)
523 {
524   /* We can not duplicate function decls.  */
525   if (TREE_CODE (decl) == FUNCTION_DECL)
526     return true;
527 
528   /* Local static vars must be non-local or we get multiple declaration
529      problems.  */
530   if (TREE_CODE (decl) == VAR_DECL
531       && !auto_var_in_fn_p (decl, id->src_fn))
532     return true;
533 
534   return false;
535 }
536 
537 static tree
538 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
539 	     copy_body_data *id)
540 {
541   tree old_var;
542   tree new_decls = NULL_TREE;
543 
544   /* Remap its variables.  */
545   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
546     {
547       tree new_var;
548 
549       if (can_be_nonlocal (old_var, id))
550 	{
551 	  /* We need to add this variable to the local decls as otherwise
552 	     nothing else will do so.  */
553 	  if (TREE_CODE (old_var) == VAR_DECL
554 	      && ! DECL_EXTERNAL (old_var))
555 	    add_local_decl (cfun, old_var);
556 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
557 	      && !DECL_IGNORED_P (old_var)
558 	      && nonlocalized_list)
559 	    vec_safe_push (*nonlocalized_list, old_var);
560 	  continue;
561 	}
562 
563       /* Remap the variable.  */
564       new_var = remap_decl (old_var, id);
565 
566       /* If we didn't remap this variable, we can't mess with its
567 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
568 	 already declared somewhere else, so don't declare it here.  */
569 
570       if (new_var == id->retvar)
571 	;
572       else if (!new_var)
573         {
574 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
575 	      && !DECL_IGNORED_P (old_var)
576 	      && nonlocalized_list)
577 	    vec_safe_push (*nonlocalized_list, old_var);
578 	}
579       else
580 	{
581 	  gcc_assert (DECL_P (new_var));
582 	  DECL_CHAIN (new_var) = new_decls;
583 	  new_decls = new_var;
584 
585 	  /* Also copy value-expressions.  */
586 	  if (TREE_CODE (new_var) == VAR_DECL
587 	      && DECL_HAS_VALUE_EXPR_P (new_var))
588 	    {
589 	      tree tem = DECL_VALUE_EXPR (new_var);
590 	      bool old_regimplify = id->regimplify;
591 	      id->remapping_type_depth++;
592 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
593 	      id->remapping_type_depth--;
594 	      id->regimplify = old_regimplify;
595 	      SET_DECL_VALUE_EXPR (new_var, tem);
596 	    }
597 	}
598     }
599 
600   return nreverse (new_decls);
601 }
602 
603 /* Copy the BLOCK to contain remapped versions of the variables
604    therein.  And hook the new block into the block-tree.  */
605 
606 static void
607 remap_block (tree *block, copy_body_data *id)
608 {
609   tree old_block;
610   tree new_block;
611 
612   /* Make the new block.  */
613   old_block = *block;
614   new_block = make_node (BLOCK);
615   TREE_USED (new_block) = TREE_USED (old_block);
616   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
617   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
618   BLOCK_NONLOCALIZED_VARS (new_block)
619     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
620   *block = new_block;
621 
622   /* Remap its variables.  */
623   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
624   					&BLOCK_NONLOCALIZED_VARS (new_block),
625 					id);
626 
627   if (id->transform_lang_insert_block)
628     id->transform_lang_insert_block (new_block);
629 
630   /* Remember the remapped block.  */
631   insert_decl_map (id, old_block, new_block);
632 }
633 
634 /* Copy the whole block tree and root it in id->block.  */
635 static tree
636 remap_blocks (tree block, copy_body_data *id)
637 {
638   tree t;
639   tree new_tree = block;
640 
641   if (!block)
642     return NULL;
643 
644   remap_block (&new_tree, id);
645   gcc_assert (new_tree != block);
646   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
647     prepend_lexical_block (new_tree, remap_blocks (t, id));
648   /* Blocks are in arbitrary order, but make things slightly prettier and do
649      not swap order when producing a copy.  */
650   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
651   return new_tree;
652 }
653 
654 /* Remap the block tree rooted at BLOCK to nothing.  */
655 static void
656 remap_blocks_to_null (tree block, copy_body_data *id)
657 {
658   tree t;
659   insert_decl_map (id, block, NULL_TREE);
660   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
661     remap_blocks_to_null (t, id);
662 }
663 
664 static void
665 copy_statement_list (tree *tp)
666 {
667   tree_stmt_iterator oi, ni;
668   tree new_tree;
669 
670   new_tree = alloc_stmt_list ();
671   ni = tsi_start (new_tree);
672   oi = tsi_start (*tp);
673   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
674   *tp = new_tree;
675 
676   for (; !tsi_end_p (oi); tsi_next (&oi))
677     {
678       tree stmt = tsi_stmt (oi);
679       if (TREE_CODE (stmt) == STATEMENT_LIST)
680 	/* This copy is not redundant; tsi_link_after will smash this
681 	   STATEMENT_LIST into the end of the one we're building, and we
682 	   don't want to do that with the original.  */
683 	copy_statement_list (&stmt);
684       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
685     }
686 }
687 
688 static void
689 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
690 {
691   tree block = BIND_EXPR_BLOCK (*tp);
692   /* Copy (and replace) the statement.  */
693   copy_tree_r (tp, walk_subtrees, NULL);
694   if (block)
695     {
696       remap_block (&block, id);
697       BIND_EXPR_BLOCK (*tp) = block;
698     }
699 
700   if (BIND_EXPR_VARS (*tp))
701     /* This will remap a lot of the same decls again, but this should be
702        harmless.  */
703     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
704 }
705 
706 
707 /* Create a new gimple_seq by remapping all the statements in BODY
708    using the inlining information in ID.  */
709 
710 static gimple_seq
711 remap_gimple_seq (gimple_seq body, copy_body_data *id)
712 {
713   gimple_stmt_iterator si;
714   gimple_seq new_body = NULL;
715 
716   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
717     {
718       gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
719       gimple_seq_add_stmt (&new_body, new_stmt);
720     }
721 
722   return new_body;
723 }
724 
725 
726 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
727    block using the mapping information in ID.  */
728 
729 static gimple
730 copy_gimple_bind (gimple stmt, copy_body_data *id)
731 {
732   gimple new_bind;
733   tree new_block, new_vars;
734   gimple_seq body, new_body;
735 
736   /* Copy the statement.  Note that we purposely don't use copy_stmt
737      here because we need to remap statements as we copy.  */
738   body = gimple_bind_body (stmt);
739   new_body = remap_gimple_seq (body, id);
740 
741   new_block = gimple_bind_block (stmt);
742   if (new_block)
743     remap_block (&new_block, id);
744 
745   /* This will remap a lot of the same decls again, but this should be
746      harmless.  */
747   new_vars = gimple_bind_vars (stmt);
748   if (new_vars)
749     new_vars = remap_decls (new_vars, NULL, id);
750 
751   new_bind = gimple_build_bind (new_vars, new_body, new_block);
752 
753   return new_bind;
754 }
755 
756 
757 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
758    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
759    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
760    recursing into the children nodes of *TP.  */
761 
762 static tree
763 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
764 {
765   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
766   copy_body_data *id = (copy_body_data *) wi_p->info;
767   tree fn = id->src_fn;
768 
769   if (TREE_CODE (*tp) == SSA_NAME)
770     {
771       *tp = remap_ssa_name (*tp, id);
772       *walk_subtrees = 0;
773       return NULL;
774     }
775   else if (auto_var_in_fn_p (*tp, fn))
776     {
777       /* Local variables and labels need to be replaced by equivalent
778 	 variables.  We don't want to copy static variables; there's
779 	 only one of those, no matter how many times we inline the
780 	 containing function.  Similarly for globals from an outer
781 	 function.  */
782       tree new_decl;
783 
784       /* Remap the declaration.  */
785       new_decl = remap_decl (*tp, id);
786       gcc_assert (new_decl);
787       /* Replace this variable with the copy.  */
788       STRIP_TYPE_NOPS (new_decl);
789       /* ???  The C++ frontend uses void * pointer zero to initialize
790          any other type.  This confuses the middle-end type verification.
791 	 As cloned bodies do not go through gimplification again the fixup
792 	 there doesn't trigger.  */
793       if (TREE_CODE (new_decl) == INTEGER_CST
794 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
795 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
796       *tp = new_decl;
797       *walk_subtrees = 0;
798     }
799   else if (TREE_CODE (*tp) == STATEMENT_LIST)
800     gcc_unreachable ();
801   else if (TREE_CODE (*tp) == SAVE_EXPR)
802     gcc_unreachable ();
803   else if (TREE_CODE (*tp) == LABEL_DECL
804 	   && (!DECL_CONTEXT (*tp)
805 	       || decl_function_context (*tp) == id->src_fn))
806     /* These may need to be remapped for EH handling.  */
807     *tp = remap_decl (*tp, id);
808   else if (TREE_CODE (*tp) == FIELD_DECL)
809     {
810       /* If the enclosing record type is variably_modified_type_p, the field
811 	 has already been remapped.  Otherwise, it need not be.  */
812       tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
813       if (n)
814 	*tp = *n;
815       *walk_subtrees = 0;
816     }
817   else if (TYPE_P (*tp))
818     /* Types may need remapping as well.  */
819     *tp = remap_type (*tp, id);
820   else if (CONSTANT_CLASS_P (*tp))
821     {
822       /* If this is a constant, we have to copy the node iff the type
823 	 will be remapped.  copy_tree_r will not copy a constant.  */
824       tree new_type = remap_type (TREE_TYPE (*tp), id);
825 
826       if (new_type == TREE_TYPE (*tp))
827 	*walk_subtrees = 0;
828 
829       else if (TREE_CODE (*tp) == INTEGER_CST)
830 	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
831 				  TREE_INT_CST_HIGH (*tp));
832       else
833 	{
834 	  *tp = copy_node (*tp);
835 	  TREE_TYPE (*tp) = new_type;
836 	}
837     }
838   else
839     {
840       /* Otherwise, just copy the node.  Note that copy_tree_r already
841 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
842 
843       if (TREE_CODE (*tp) == MEM_REF)
844 	{
845 	  tree ptr = TREE_OPERAND (*tp, 0);
846 	  tree type = remap_type (TREE_TYPE (*tp), id);
847 	  tree old = *tp;
848 
849 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
850 	     that can happen when a pointer argument is an ADDR_EXPR.
851 	     Recurse here manually to allow that.  */
852 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
853 	  *tp = fold_build2 (MEM_REF, type,
854 			     ptr, TREE_OPERAND (*tp, 1));
855 	  TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
856 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
857 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
858 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
859 	  *walk_subtrees = 0;
860 	  return NULL;
861 	}
862 
863       /* Here is the "usual case".  Copy this tree node, and then
864 	 tweak some special cases.  */
865       copy_tree_r (tp, walk_subtrees, NULL);
866 
867       if (TREE_CODE (*tp) != OMP_CLAUSE)
868 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
869 
870       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
871 	{
872 	  /* The copied TARGET_EXPR has never been expanded, even if the
873 	     original node was expanded already.  */
874 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
875 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
876 	}
877       else if (TREE_CODE (*tp) == ADDR_EXPR)
878 	{
879 	  /* Variable substitution need not be simple.  In particular,
880 	     the MEM_REF substitution above.  Make sure that
881 	     TREE_CONSTANT and friends are up-to-date.  */
882 	  int invariant = is_gimple_min_invariant (*tp);
883 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
884 	  recompute_tree_invariant_for_addr_expr (*tp);
885 
886 	  /* If this used to be invariant, but is not any longer,
887 	     then regimplification is probably needed.  */
888 	  if (invariant && !is_gimple_min_invariant (*tp))
889 	    id->regimplify = true;
890 
891 	  *walk_subtrees = 0;
892 	}
893     }
894 
895   /* Update the TREE_BLOCK for the cloned expr.  */
896   if (EXPR_P (*tp))
897     {
898       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
899       tree old_block = TREE_BLOCK (*tp);
900       if (old_block)
901 	{
902 	  tree *n;
903 	  n = (tree *) pointer_map_contains (id->decl_map,
904 					     TREE_BLOCK (*tp));
905 	  if (n)
906 	    new_block = *n;
907 	}
908       TREE_SET_BLOCK (*tp, new_block);
909     }
910 
911   /* Keep iterating.  */
912   return NULL_TREE;
913 }
914 
915 
916 /* Called from copy_body_id via walk_tree.  DATA is really a
917    `copy_body_data *'.  */
918 
919 tree
920 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
921 {
922   copy_body_data *id = (copy_body_data *) data;
923   tree fn = id->src_fn;
924   tree new_block;
925 
926   /* Begin by recognizing trees that we'll completely rewrite for the
927      inlining context.  Our output for these trees is completely
928      different from out input (e.g. RETURN_EXPR is deleted, and morphs
929      into an edge).  Further down, we'll handle trees that get
930      duplicated and/or tweaked.  */
931 
932   /* When requested, RETURN_EXPRs should be transformed to just the
933      contained MODIFY_EXPR.  The branch semantics of the return will
934      be handled elsewhere by manipulating the CFG rather than a statement.  */
935   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
936     {
937       tree assignment = TREE_OPERAND (*tp, 0);
938 
939       /* If we're returning something, just turn that into an
940 	 assignment into the equivalent of the original RESULT_DECL.
941 	 If the "assignment" is just the result decl, the result
942 	 decl has already been set (e.g. a recent "foo (&result_decl,
943 	 ...)"); just toss the entire RETURN_EXPR.  */
944       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
945 	{
946 	  /* Replace the RETURN_EXPR with (a copy of) the
947 	     MODIFY_EXPR hanging underneath.  */
948 	  *tp = copy_node (assignment);
949 	}
950       else /* Else the RETURN_EXPR returns no value.  */
951 	{
952 	  *tp = NULL;
953 	  return (tree) (void *)1;
954 	}
955     }
956   else if (TREE_CODE (*tp) == SSA_NAME)
957     {
958       *tp = remap_ssa_name (*tp, id);
959       *walk_subtrees = 0;
960       return NULL;
961     }
962 
963   /* Local variables and labels need to be replaced by equivalent
964      variables.  We don't want to copy static variables; there's only
965      one of those, no matter how many times we inline the containing
966      function.  Similarly for globals from an outer function.  */
967   else if (auto_var_in_fn_p (*tp, fn))
968     {
969       tree new_decl;
970 
971       /* Remap the declaration.  */
972       new_decl = remap_decl (*tp, id);
973       gcc_assert (new_decl);
974       /* Replace this variable with the copy.  */
975       STRIP_TYPE_NOPS (new_decl);
976       *tp = new_decl;
977       *walk_subtrees = 0;
978     }
979   else if (TREE_CODE (*tp) == STATEMENT_LIST)
980     copy_statement_list (tp);
981   else if (TREE_CODE (*tp) == SAVE_EXPR
982 	   || TREE_CODE (*tp) == TARGET_EXPR)
983     remap_save_expr (tp, id->decl_map, walk_subtrees);
984   else if (TREE_CODE (*tp) == LABEL_DECL
985 	   && (! DECL_CONTEXT (*tp)
986 	       || decl_function_context (*tp) == id->src_fn))
987     /* These may need to be remapped for EH handling.  */
988     *tp = remap_decl (*tp, id);
989   else if (TREE_CODE (*tp) == BIND_EXPR)
990     copy_bind_expr (tp, walk_subtrees, id);
991   /* Types may need remapping as well.  */
992   else if (TYPE_P (*tp))
993     *tp = remap_type (*tp, id);
994 
995   /* If this is a constant, we have to copy the node iff the type will be
996      remapped.  copy_tree_r will not copy a constant.  */
997   else if (CONSTANT_CLASS_P (*tp))
998     {
999       tree new_type = remap_type (TREE_TYPE (*tp), id);
1000 
1001       if (new_type == TREE_TYPE (*tp))
1002 	*walk_subtrees = 0;
1003 
1004       else if (TREE_CODE (*tp) == INTEGER_CST)
1005 	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1006 				  TREE_INT_CST_HIGH (*tp));
1007       else
1008 	{
1009 	  *tp = copy_node (*tp);
1010 	  TREE_TYPE (*tp) = new_type;
1011 	}
1012     }
1013 
1014   /* Otherwise, just copy the node.  Note that copy_tree_r already
1015      knows not to copy VAR_DECLs, etc., so this is safe.  */
1016   else
1017     {
1018       /* Here we handle trees that are not completely rewritten.
1019 	 First we detect some inlining-induced bogosities for
1020 	 discarding.  */
1021       if (TREE_CODE (*tp) == MODIFY_EXPR
1022 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1023 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1024 	{
1025 	  /* Some assignments VAR = VAR; don't generate any rtl code
1026 	     and thus don't count as variable modification.  Avoid
1027 	     keeping bogosities like 0 = 0.  */
1028 	  tree decl = TREE_OPERAND (*tp, 0), value;
1029 	  tree *n;
1030 
1031 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1032 	  if (n)
1033 	    {
1034 	      value = *n;
1035 	      STRIP_TYPE_NOPS (value);
1036 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1037 		{
1038 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1039 		  return copy_tree_body_r (tp, walk_subtrees, data);
1040 		}
1041 	    }
1042 	}
1043       else if (TREE_CODE (*tp) == INDIRECT_REF)
1044 	{
1045 	  /* Get rid of *& from inline substitutions that can happen when a
1046 	     pointer argument is an ADDR_EXPR.  */
1047 	  tree decl = TREE_OPERAND (*tp, 0);
1048 	  tree *n;
1049 
1050 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1051 	  if (n)
1052 	    {
1053 	      tree new_tree;
1054 	      tree old;
1055 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1056 	         it manually here as we'll eventually get ADDR_EXPRs
1057 		 which lie about their types pointed to.  In this case
1058 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1059 		 but we absolutely rely on that.  As fold_indirect_ref
1060 	         does other useful transformations, try that first, though.  */
1061 	      tree type = TREE_TYPE (TREE_TYPE (*n));
1062 	      if (id->do_not_unshare)
1063 		new_tree = *n;
1064 	      else
1065 		new_tree = unshare_expr (*n);
1066 	      old = *tp;
1067 	      *tp = gimple_fold_indirect_ref (new_tree);
1068 	      if (! *tp)
1069 	        {
1070 		  if (TREE_CODE (new_tree) == ADDR_EXPR)
1071 		    {
1072 		      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1073 						 type, new_tree);
1074 		      /* ???  We should either assert here or build
1075 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1076 			 incompatible types to our IL.  */
1077 		      if (! *tp)
1078 			*tp = TREE_OPERAND (new_tree, 0);
1079 		    }
1080 	          else
1081 		    {
1082 	              *tp = build1 (INDIRECT_REF, type, new_tree);
1083 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1084 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1085 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1086 		      TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1087 		    }
1088 		}
1089 	      *walk_subtrees = 0;
1090 	      return NULL;
1091 	    }
1092 	}
1093       else if (TREE_CODE (*tp) == MEM_REF)
1094 	{
1095 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1096 	     that can happen when a pointer argument is an ADDR_EXPR.  */
1097 	  tree decl = TREE_OPERAND (*tp, 0);
1098 	  tree *n;
1099 
1100 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1101 	  if (n)
1102 	    {
1103 	      tree old = *tp;
1104 	      *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1105 				 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1106 	      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1107 	      TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1108 	      *walk_subtrees = 0;
1109 	      return NULL;
1110 	    }
1111 	}
1112 
1113       /* Here is the "usual case".  Copy this tree node, and then
1114 	 tweak some special cases.  */
1115       copy_tree_r (tp, walk_subtrees, NULL);
1116 
1117       /* If EXPR has block defined, map it to newly constructed block.
1118          When inlining we want EXPRs without block appear in the block
1119 	 of function call if we are not remapping a type.  */
1120       if (EXPR_P (*tp))
1121 	{
1122 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1123 	  if (TREE_BLOCK (*tp))
1124 	    {
1125 	      tree *n;
1126 	      n = (tree *) pointer_map_contains (id->decl_map,
1127 						 TREE_BLOCK (*tp));
1128 	      if (n)
1129 		new_block = *n;
1130 	    }
1131 	  TREE_SET_BLOCK (*tp, new_block);
1132 	}
1133 
1134       if (TREE_CODE (*tp) != OMP_CLAUSE)
1135 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1136 
1137       /* The copied TARGET_EXPR has never been expanded, even if the
1138 	 original node was expanded already.  */
1139       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1140 	{
1141 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1142 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1143 	}
1144 
1145       /* Variable substitution need not be simple.  In particular, the
1146 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1147 	 and friends are up-to-date.  */
1148       else if (TREE_CODE (*tp) == ADDR_EXPR)
1149 	{
1150 	  int invariant = is_gimple_min_invariant (*tp);
1151 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1152 
1153 	  /* Handle the case where we substituted an INDIRECT_REF
1154 	     into the operand of the ADDR_EXPR.  */
1155 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1156 	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1157 	  else
1158 	    recompute_tree_invariant_for_addr_expr (*tp);
1159 
1160 	  /* If this used to be invariant, but is not any longer,
1161 	     then regimplification is probably needed.  */
1162 	  if (invariant && !is_gimple_min_invariant (*tp))
1163 	    id->regimplify = true;
1164 
1165 	  *walk_subtrees = 0;
1166 	}
1167     }
1168 
1169   /* Keep iterating.  */
1170   return NULL_TREE;
1171 }
1172 
1173 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1174    source function, map that to the duplicate EH region number in
1175    the destination function.  */
1176 
1177 static int
1178 remap_eh_region_nr (int old_nr, copy_body_data *id)
1179 {
1180   eh_region old_r, new_r;
1181   void **slot;
1182 
1183   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1184   slot = pointer_map_contains (id->eh_map, old_r);
1185   new_r = (eh_region) *slot;
1186 
1187   return new_r->index;
1188 }
1189 
1190 /* Similar, but operate on INTEGER_CSTs.  */
1191 
1192 static tree
1193 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1194 {
1195   int old_nr, new_nr;
1196 
1197   old_nr = tree_low_cst (old_t_nr, 0);
1198   new_nr = remap_eh_region_nr (old_nr, id);
1199 
1200   return build_int_cst (integer_type_node, new_nr);
1201 }
1202 
1203 /* Helper for copy_bb.  Remap statement STMT using the inlining
1204    information in ID.  Return the new statement copy.  */
1205 
1206 static gimple
1207 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1208 {
1209   gimple copy = NULL;
1210   struct walk_stmt_info wi;
1211   bool skip_first = false;
1212 
1213   /* Begin by recognizing trees that we'll completely rewrite for the
1214      inlining context.  Our output for these trees is completely
1215      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1216      into an edge).  Further down, we'll handle trees that get
1217      duplicated and/or tweaked.  */
1218 
1219   /* When requested, GIMPLE_RETURNs should be transformed to just the
1220      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1221      be handled elsewhere by manipulating the CFG rather than the
1222      statement.  */
1223   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1224     {
1225       tree retval = gimple_return_retval (stmt);
1226 
1227       /* If we're returning something, just turn that into an
1228 	 assignment into the equivalent of the original RESULT_DECL.
1229 	 If RETVAL is just the result decl, the result decl has
1230 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1231 	 just toss the entire GIMPLE_RETURN.  */
1232       if (retval
1233 	  && (TREE_CODE (retval) != RESULT_DECL
1234 	      && (TREE_CODE (retval) != SSA_NAME
1235 		  || ! SSA_NAME_VAR (retval)
1236 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1237         {
1238 	  copy = gimple_build_assign (id->retvar, retval);
1239 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1240 	  skip_first = true;
1241 	}
1242       else
1243 	return gimple_build_nop ();
1244     }
1245   else if (gimple_has_substatements (stmt))
1246     {
1247       gimple_seq s1, s2;
1248 
1249       /* When cloning bodies from the C++ front end, we will be handed bodies
1250 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1251 	 have embedded statements.  */
1252       switch (gimple_code (stmt))
1253 	{
1254 	case GIMPLE_BIND:
1255 	  copy = copy_gimple_bind (stmt, id);
1256 	  break;
1257 
1258 	case GIMPLE_CATCH:
1259 	  s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1260 	  copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1261 	  break;
1262 
1263 	case GIMPLE_EH_FILTER:
1264 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1265 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1266 	  break;
1267 
1268 	case GIMPLE_TRY:
1269 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1270 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1271 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1272 	  break;
1273 
1274 	case GIMPLE_WITH_CLEANUP_EXPR:
1275 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1276 	  copy = gimple_build_wce (s1);
1277 	  break;
1278 
1279 	case GIMPLE_OMP_PARALLEL:
1280 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1281 	  copy = gimple_build_omp_parallel
1282 	           (s1,
1283 		    gimple_omp_parallel_clauses (stmt),
1284 		    gimple_omp_parallel_child_fn (stmt),
1285 		    gimple_omp_parallel_data_arg (stmt));
1286 	  break;
1287 
1288 	case GIMPLE_OMP_TASK:
1289 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1290 	  copy = gimple_build_omp_task
1291 	           (s1,
1292 		    gimple_omp_task_clauses (stmt),
1293 		    gimple_omp_task_child_fn (stmt),
1294 		    gimple_omp_task_data_arg (stmt),
1295 		    gimple_omp_task_copy_fn (stmt),
1296 		    gimple_omp_task_arg_size (stmt),
1297 		    gimple_omp_task_arg_align (stmt));
1298 	  break;
1299 
1300 	case GIMPLE_OMP_FOR:
1301 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1302 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1303 	  copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1304 				       gimple_omp_for_collapse (stmt), s2);
1305 	  {
1306 	    size_t i;
1307 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1308 	      {
1309 		gimple_omp_for_set_index (copy, i,
1310 					  gimple_omp_for_index (stmt, i));
1311 		gimple_omp_for_set_initial (copy, i,
1312 					    gimple_omp_for_initial (stmt, i));
1313 		gimple_omp_for_set_final (copy, i,
1314 					  gimple_omp_for_final (stmt, i));
1315 		gimple_omp_for_set_incr (copy, i,
1316 					 gimple_omp_for_incr (stmt, i));
1317 		gimple_omp_for_set_cond (copy, i,
1318 					 gimple_omp_for_cond (stmt, i));
1319 	      }
1320 	  }
1321 	  break;
1322 
1323 	case GIMPLE_OMP_MASTER:
1324 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1325 	  copy = gimple_build_omp_master (s1);
1326 	  break;
1327 
1328 	case GIMPLE_OMP_ORDERED:
1329 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1330 	  copy = gimple_build_omp_ordered (s1);
1331 	  break;
1332 
1333 	case GIMPLE_OMP_SECTION:
1334 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1335 	  copy = gimple_build_omp_section (s1);
1336 	  break;
1337 
1338 	case GIMPLE_OMP_SECTIONS:
1339 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 	  copy = gimple_build_omp_sections
1341 	           (s1, gimple_omp_sections_clauses (stmt));
1342 	  break;
1343 
1344 	case GIMPLE_OMP_SINGLE:
1345 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1346 	  copy = gimple_build_omp_single
1347 	           (s1, gimple_omp_single_clauses (stmt));
1348 	  break;
1349 
1350 	case GIMPLE_OMP_CRITICAL:
1351 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 	  copy
1353 	    = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1354 	  break;
1355 
1356 	case GIMPLE_TRANSACTION:
1357 	  s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1358 	  copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1359 	  gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1360 	  break;
1361 
1362 	default:
1363 	  gcc_unreachable ();
1364 	}
1365     }
1366   else
1367     {
1368       if (gimple_assign_copy_p (stmt)
1369 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1370 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1371 	{
1372 	  /* Here we handle statements that are not completely rewritten.
1373 	     First we detect some inlining-induced bogosities for
1374 	     discarding.  */
1375 
1376 	  /* Some assignments VAR = VAR; don't generate any rtl code
1377 	     and thus don't count as variable modification.  Avoid
1378 	     keeping bogosities like 0 = 0.  */
1379 	  tree decl = gimple_assign_lhs (stmt), value;
1380 	  tree *n;
1381 
1382 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1383 	  if (n)
1384 	    {
1385 	      value = *n;
1386 	      STRIP_TYPE_NOPS (value);
1387 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1388 		return gimple_build_nop ();
1389 	    }
1390 	}
1391 
1392       if (gimple_debug_bind_p (stmt))
1393 	{
1394 	  copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1395 					  gimple_debug_bind_get_value (stmt),
1396 					  stmt);
1397 	  id->debug_stmts.safe_push (copy);
1398 	  return copy;
1399 	}
1400       if (gimple_debug_source_bind_p (stmt))
1401 	{
1402 	  copy = gimple_build_debug_source_bind
1403 		   (gimple_debug_source_bind_get_var (stmt),
1404 		    gimple_debug_source_bind_get_value (stmt), stmt);
1405 	  id->debug_stmts.safe_push (copy);
1406 	  return copy;
1407 	}
1408 
1409       /* Create a new deep copy of the statement.  */
1410       copy = gimple_copy (stmt);
1411 
1412       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1413 	 RESX and EH_DISPATCH.  */
1414       if (id->eh_map)
1415 	switch (gimple_code (copy))
1416 	  {
1417 	  case GIMPLE_CALL:
1418 	    {
1419 	      tree r, fndecl = gimple_call_fndecl (copy);
1420 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1421 		switch (DECL_FUNCTION_CODE (fndecl))
1422 		  {
1423 		  case BUILT_IN_EH_COPY_VALUES:
1424 		    r = gimple_call_arg (copy, 1);
1425 		    r = remap_eh_region_tree_nr (r, id);
1426 		    gimple_call_set_arg (copy, 1, r);
1427 		    /* FALLTHRU */
1428 
1429 		  case BUILT_IN_EH_POINTER:
1430 		  case BUILT_IN_EH_FILTER:
1431 		    r = gimple_call_arg (copy, 0);
1432 		    r = remap_eh_region_tree_nr (r, id);
1433 		    gimple_call_set_arg (copy, 0, r);
1434 		    break;
1435 
1436 		  default:
1437 		    break;
1438 		  }
1439 
1440 	      /* Reset alias info if we didn't apply measures to
1441 		 keep it valid over inlining by setting DECL_PT_UID.  */
1442 	      if (!id->src_cfun->gimple_df
1443 		  || !id->src_cfun->gimple_df->ipa_pta)
1444 		gimple_call_reset_alias_info (copy);
1445 	    }
1446 	    break;
1447 
1448 	  case GIMPLE_RESX:
1449 	    {
1450 	      int r = gimple_resx_region (copy);
1451 	      r = remap_eh_region_nr (r, id);
1452 	      gimple_resx_set_region (copy, r);
1453 	    }
1454 	    break;
1455 
1456 	  case GIMPLE_EH_DISPATCH:
1457 	    {
1458 	      int r = gimple_eh_dispatch_region (copy);
1459 	      r = remap_eh_region_nr (r, id);
1460 	      gimple_eh_dispatch_set_region (copy, r);
1461 	    }
1462 	    break;
1463 
1464 	  default:
1465 	    break;
1466 	  }
1467     }
1468 
1469   /* If STMT has a block defined, map it to the newly constructed
1470      block.  */
1471   if (gimple_block (copy))
1472     {
1473       tree *n;
1474       n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1475       gcc_assert (n);
1476       gimple_set_block (copy, *n);
1477     }
1478 
1479   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1480     return copy;
1481 
1482   /* Remap all the operands in COPY.  */
1483   memset (&wi, 0, sizeof (wi));
1484   wi.info = id;
1485   if (skip_first)
1486     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1487   else
1488     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1489 
1490   /* Clear the copied virtual operands.  We are not remapping them here
1491      but are going to recreate them from scratch.  */
1492   if (gimple_has_mem_ops (copy))
1493     {
1494       gimple_set_vdef (copy, NULL_TREE);
1495       gimple_set_vuse (copy, NULL_TREE);
1496     }
1497 
1498   return copy;
1499 }
1500 
1501 
1502 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1503    later  */
1504 
1505 static basic_block
1506 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1507          gcov_type count_scale)
1508 {
1509   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1510   basic_block copy_basic_block;
1511   tree decl;
1512   gcov_type freq;
1513   basic_block prev;
1514 
1515   /* Search for previous copied basic block.  */
1516   prev = bb->prev_bb;
1517   while (!prev->aux)
1518     prev = prev->prev_bb;
1519 
1520   /* create_basic_block() will append every new block to
1521      basic_block_info automatically.  */
1522   copy_basic_block = create_basic_block (NULL, (void *) 0,
1523                                          (basic_block) prev->aux);
1524   copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1525 
1526   /* We are going to rebuild frequencies from scratch.  These values
1527      have just small importance to drive canonicalize_loop_headers.  */
1528   freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1529 
1530   /* We recompute frequencies after inlining, so this is quite safe.  */
1531   if (freq > BB_FREQ_MAX)
1532     freq = BB_FREQ_MAX;
1533   copy_basic_block->frequency = freq;
1534 
1535   copy_gsi = gsi_start_bb (copy_basic_block);
1536 
1537   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1538     {
1539       gimple stmt = gsi_stmt (gsi);
1540       gimple orig_stmt = stmt;
1541 
1542       id->regimplify = false;
1543       stmt = remap_gimple_stmt (stmt, id);
1544       if (gimple_nop_p (stmt))
1545 	continue;
1546 
1547       gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1548       seq_gsi = copy_gsi;
1549 
1550       /* With return slot optimization we can end up with
1551 	 non-gimple (foo *)&this->m, fix that here.  */
1552       if (is_gimple_assign (stmt)
1553 	  && gimple_assign_rhs_code (stmt) == NOP_EXPR
1554 	  && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1555 	{
1556 	  tree new_rhs;
1557 	  new_rhs = force_gimple_operand_gsi (&seq_gsi,
1558 					      gimple_assign_rhs1 (stmt),
1559 					      true, NULL, false,
1560 					      GSI_CONTINUE_LINKING);
1561 	  gimple_assign_set_rhs1 (stmt, new_rhs);
1562 	  id->regimplify = false;
1563 	}
1564 
1565       gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1566 
1567       if (id->regimplify)
1568 	gimple_regimplify_operands (stmt, &seq_gsi);
1569 
1570       /* If copy_basic_block has been empty at the start of this iteration,
1571 	 call gsi_start_bb again to get at the newly added statements.  */
1572       if (gsi_end_p (copy_gsi))
1573 	copy_gsi = gsi_start_bb (copy_basic_block);
1574       else
1575 	gsi_next (&copy_gsi);
1576 
1577       /* Process the new statement.  The call to gimple_regimplify_operands
1578 	 possibly turned the statement into multiple statements, we
1579 	 need to process all of them.  */
1580       do
1581 	{
1582 	  tree fn;
1583 
1584 	  stmt = gsi_stmt (copy_gsi);
1585 	  if (is_gimple_call (stmt)
1586 	      && gimple_call_va_arg_pack_p (stmt)
1587 	      && id->gimple_call)
1588 	    {
1589 	      /* __builtin_va_arg_pack () should be replaced by
1590 		 all arguments corresponding to ... in the caller.  */
1591 	      tree p;
1592 	      gimple new_call;
1593 	      vec<tree> argarray;
1594 	      size_t nargs = gimple_call_num_args (id->gimple_call);
1595 	      size_t n;
1596 
1597 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1598 		nargs--;
1599 
1600 	      /* Create the new array of arguments.  */
1601 	      n = nargs + gimple_call_num_args (stmt);
1602 	      argarray.create (n);
1603 	      argarray.safe_grow_cleared (n);
1604 
1605 	      /* Copy all the arguments before '...'  */
1606 	      memcpy (argarray.address (),
1607 		      gimple_call_arg_ptr (stmt, 0),
1608 		      gimple_call_num_args (stmt) * sizeof (tree));
1609 
1610 	      /* Append the arguments passed in '...'  */
1611 	      memcpy (argarray.address () + gimple_call_num_args (stmt),
1612 		      gimple_call_arg_ptr (id->gimple_call, 0)
1613 			+ (gimple_call_num_args (id->gimple_call) - nargs),
1614 		      nargs * sizeof (tree));
1615 
1616 	      new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1617 						argarray);
1618 
1619 	      argarray.release ();
1620 
1621 	      /* Copy all GIMPLE_CALL flags, location and block, except
1622 		 GF_CALL_VA_ARG_PACK.  */
1623 	      gimple_call_copy_flags (new_call, stmt);
1624 	      gimple_call_set_va_arg_pack (new_call, false);
1625 	      gimple_set_location (new_call, gimple_location (stmt));
1626 	      gimple_set_block (new_call, gimple_block (stmt));
1627 	      gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1628 
1629 	      gsi_replace (&copy_gsi, new_call, false);
1630 	      stmt = new_call;
1631 	    }
1632 	  else if (is_gimple_call (stmt)
1633 		   && id->gimple_call
1634 		   && (decl = gimple_call_fndecl (stmt))
1635 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1636 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1637 	    {
1638 	      /* __builtin_va_arg_pack_len () should be replaced by
1639 		 the number of anonymous arguments.  */
1640 	      size_t nargs = gimple_call_num_args (id->gimple_call);
1641 	      tree count, p;
1642 	      gimple new_stmt;
1643 
1644 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1645 		nargs--;
1646 
1647 	      count = build_int_cst (integer_type_node, nargs);
1648 	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1649 	      gsi_replace (&copy_gsi, new_stmt, false);
1650 	      stmt = new_stmt;
1651 	    }
1652 
1653 	  /* Statements produced by inlining can be unfolded, especially
1654 	     when we constant propagated some operands.  We can't fold
1655 	     them right now for two reasons:
1656 	     1) folding require SSA_NAME_DEF_STMTs to be correct
1657 	     2) we can't change function calls to builtins.
1658 	     So we just mark statement for later folding.  We mark
1659 	     all new statements, instead just statements that has changed
1660 	     by some nontrivial substitution so even statements made
1661 	     foldable indirectly are updated.  If this turns out to be
1662 	     expensive, copy_body can be told to watch for nontrivial
1663 	     changes.  */
1664 	  if (id->statements_to_fold)
1665 	    pointer_set_insert (id->statements_to_fold, stmt);
1666 
1667 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1668 	     callgraph edges and update or duplicate them.  */
1669 	  if (is_gimple_call (stmt))
1670 	    {
1671 	      struct cgraph_edge *edge;
1672 	      int flags;
1673 
1674 	      switch (id->transform_call_graph_edges)
1675 		{
1676 		case CB_CGE_DUPLICATE:
1677 		  edge = cgraph_edge (id->src_node, orig_stmt);
1678 		  if (edge)
1679 		    {
1680 		      int edge_freq = edge->frequency;
1681 		      edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1682 					        gimple_uid (stmt),
1683 					        REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1684 					        true);
1685 		      /* We could also just rescale the frequency, but
1686 		         doing so would introduce roundoff errors and make
1687 			 verifier unhappy.  */
1688 		      edge->frequency
1689 		        = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1690 							  copy_basic_block);
1691 		      if (dump_file
1692 		      	  && profile_status_for_function (cfun) != PROFILE_ABSENT
1693 			  && (edge_freq > edge->frequency + 10
1694 			      || edge_freq < edge->frequency - 10))
1695 			{
1696 			  fprintf (dump_file, "Edge frequency estimated by "
1697 			           "cgraph %i diverge from inliner's estimate %i\n",
1698 			  	   edge_freq,
1699 				   edge->frequency);
1700 			  fprintf (dump_file,
1701 			  	   "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1702 				   bb->index,
1703 				   bb->frequency,
1704 				   copy_basic_block->frequency);
1705 			}
1706 		      stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1707 		    }
1708 		  break;
1709 
1710 		case CB_CGE_MOVE_CLONES:
1711 		  cgraph_set_call_stmt_including_clones (id->dst_node,
1712 							 orig_stmt, stmt);
1713 		  edge = cgraph_edge (id->dst_node, stmt);
1714 		  break;
1715 
1716 		case CB_CGE_MOVE:
1717 		  edge = cgraph_edge (id->dst_node, orig_stmt);
1718 		  if (edge)
1719 		    cgraph_set_call_stmt (edge, stmt);
1720 		  break;
1721 
1722 		default:
1723 		  gcc_unreachable ();
1724 		}
1725 
1726 	      /* Constant propagation on argument done during inlining
1727 		 may create new direct call.  Produce an edge for it.  */
1728 	      if ((!edge
1729 		   || (edge->indirect_inlining_edge
1730 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1731 		  && id->dst_node->analyzed
1732 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
1733 		{
1734 		  struct cgraph_node *dest = cgraph_get_node (fn);
1735 
1736 		  /* We have missing edge in the callgraph.  This can happen
1737 		     when previous inlining turned an indirect call into a
1738 		     direct call by constant propagating arguments or we are
1739 		     producing dead clone (for further cloning).  In all
1740 		     other cases we hit a bug (incorrect node sharing is the
1741 		     most common reason for missing edges).  */
1742 		  gcc_assert (!dest->analyzed
1743 			      || dest->symbol.address_taken
1744 		  	      || !id->src_node->analyzed
1745 			      || !id->dst_node->analyzed);
1746 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1747 		    cgraph_create_edge_including_clones
1748 		      (id->dst_node, dest, orig_stmt, stmt, bb->count,
1749 		       compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1750 		       				       copy_basic_block),
1751 		       CIF_ORIGINALLY_INDIRECT_CALL);
1752 		  else
1753 		    cgraph_create_edge (id->dst_node, dest, stmt,
1754 					bb->count,
1755 					compute_call_stmt_bb_frequency
1756 					  (id->dst_node->symbol.decl,
1757 					   copy_basic_block))->inline_failed
1758 		      = CIF_ORIGINALLY_INDIRECT_CALL;
1759 		  if (dump_file)
1760 		    {
1761 		      fprintf (dump_file, "Created new direct edge to %s\n",
1762 			       cgraph_node_name (dest));
1763 		    }
1764 		}
1765 
1766 	      flags = gimple_call_flags (stmt);
1767 	      if (flags & ECF_MAY_BE_ALLOCA)
1768 		cfun->calls_alloca = true;
1769 	      if (flags & ECF_RETURNS_TWICE)
1770 		cfun->calls_setjmp = true;
1771 	    }
1772 
1773 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1774 				      id->eh_map, id->eh_lp_nr);
1775 
1776 	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1777 	    {
1778 	      ssa_op_iter i;
1779 	      tree def;
1780 
1781 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1782 		if (TREE_CODE (def) == SSA_NAME)
1783 		  SSA_NAME_DEF_STMT (def) = stmt;
1784 	    }
1785 
1786 	  gsi_next (&copy_gsi);
1787 	}
1788       while (!gsi_end_p (copy_gsi));
1789 
1790       copy_gsi = gsi_last_bb (copy_basic_block);
1791     }
1792 
1793   return copy_basic_block;
1794 }
1795 
1796 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1797    form is quite easy, since dominator relationship for old basic blocks does
1798    not change.
1799 
1800    There is however exception where inlining might change dominator relation
1801    across EH edges from basic block within inlined functions destinating
1802    to landing pads in function we inline into.
1803 
1804    The function fills in PHI_RESULTs of such PHI nodes if they refer
1805    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
1806    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
1807    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1808    set, and this means that there will be no overlapping live ranges
1809    for the underlying symbol.
1810 
1811    This might change in future if we allow redirecting of EH edges and
1812    we might want to change way build CFG pre-inlining to include
1813    all the possible edges then.  */
1814 static void
1815 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1816 				  bool can_throw, bool nonlocal_goto)
1817 {
1818   edge e;
1819   edge_iterator ei;
1820 
1821   FOR_EACH_EDGE (e, ei, bb->succs)
1822     if (!e->dest->aux
1823 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1824       {
1825 	gimple phi;
1826 	gimple_stmt_iterator si;
1827 
1828 	if (!nonlocal_goto)
1829 	  gcc_assert (e->flags & EDGE_EH);
1830 
1831 	if (!can_throw)
1832 	  gcc_assert (!(e->flags & EDGE_EH));
1833 
1834 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1835 	  {
1836 	    edge re;
1837 
1838 	    phi = gsi_stmt (si);
1839 
1840 	    /* There shouldn't be any PHI nodes in the ENTRY_BLOCK.  */
1841 	    gcc_assert (!e->dest->aux);
1842 
1843 	    gcc_assert ((e->flags & EDGE_EH)
1844 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1845 
1846 	    if (virtual_operand_p (PHI_RESULT (phi)))
1847 	      {
1848 		mark_virtual_operands_for_renaming (cfun);
1849 		continue;
1850 	      }
1851 
1852 	    re = find_edge (ret_bb, e->dest);
1853 	    gcc_assert (re);
1854 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1855 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1856 
1857 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1858 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1859 	  }
1860       }
1861 }
1862 
1863 
1864 /* Copy edges from BB into its copy constructed earlier, scale profile
1865    accordingly.  Edges will be taken care of later.  Assume aux
1866    pointers to point to the copies of each BB.  Return true if any
1867    debug stmts are left after a statement that must end the basic block.  */
1868 
1869 static bool
1870 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1871 {
1872   basic_block new_bb = (basic_block) bb->aux;
1873   edge_iterator ei;
1874   edge old_edge;
1875   gimple_stmt_iterator si;
1876   int flags;
1877   bool need_debug_cleanup = false;
1878 
1879   /* Use the indices from the original blocks to create edges for the
1880      new ones.  */
1881   FOR_EACH_EDGE (old_edge, ei, bb->succs)
1882     if (!(old_edge->flags & EDGE_EH))
1883       {
1884 	edge new_edge;
1885 
1886 	flags = old_edge->flags;
1887 
1888 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
1889 	if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1890 	    && old_edge->dest->aux != EXIT_BLOCK_PTR)
1891 	  flags |= EDGE_FALLTHRU;
1892 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1893 	new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1894 	new_edge->probability = old_edge->probability;
1895       }
1896 
1897   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1898     return false;
1899 
1900   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1901     {
1902       gimple copy_stmt;
1903       bool can_throw, nonlocal_goto;
1904 
1905       copy_stmt = gsi_stmt (si);
1906       if (!is_gimple_debug (copy_stmt))
1907 	update_stmt (copy_stmt);
1908 
1909       /* Do this before the possible split_block.  */
1910       gsi_next (&si);
1911 
1912       /* If this tree could throw an exception, there are two
1913          cases where we need to add abnormal edge(s): the
1914          tree wasn't in a region and there is a "current
1915          region" in the caller; or the original tree had
1916          EH edges.  In both cases split the block after the tree,
1917          and add abnormal edge(s) as needed; we need both
1918          those from the callee and the caller.
1919          We check whether the copy can throw, because the const
1920          propagation can change an INDIRECT_REF which throws
1921          into a COMPONENT_REF which doesn't.  If the copy
1922          can throw, the original could also throw.  */
1923       can_throw = stmt_can_throw_internal (copy_stmt);
1924       nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1925 
1926       if (can_throw || nonlocal_goto)
1927 	{
1928 	  if (!gsi_end_p (si))
1929 	    {
1930 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1931 		gsi_next (&si);
1932 	      if (gsi_end_p (si))
1933 		need_debug_cleanup = true;
1934 	    }
1935 	  if (!gsi_end_p (si))
1936 	    /* Note that bb's predecessor edges aren't necessarily
1937 	       right at this point; split_block doesn't care.  */
1938 	    {
1939 	      edge e = split_block (new_bb, copy_stmt);
1940 
1941 	      new_bb = e->dest;
1942 	      new_bb->aux = e->src->aux;
1943 	      si = gsi_start_bb (new_bb);
1944 	    }
1945 	}
1946 
1947       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1948 	make_eh_dispatch_edges (copy_stmt);
1949       else if (can_throw)
1950 	make_eh_edges (copy_stmt);
1951 
1952       if (nonlocal_goto)
1953 	make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1954 
1955       if ((can_throw || nonlocal_goto)
1956 	  && gimple_in_ssa_p (cfun))
1957 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1958 					  can_throw, nonlocal_goto);
1959     }
1960   return need_debug_cleanup;
1961 }
1962 
1963 /* Copy the PHIs.  All blocks and edges are copied, some blocks
1964    was possibly split and new outgoing EH edges inserted.
1965    BB points to the block of original function and AUX pointers links
1966    the original and newly copied blocks.  */
1967 
1968 static void
1969 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1970 {
1971   basic_block const new_bb = (basic_block) bb->aux;
1972   edge_iterator ei;
1973   gimple phi;
1974   gimple_stmt_iterator si;
1975   edge new_edge;
1976   bool inserted = false;
1977 
1978   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
1979     {
1980       tree res, new_res;
1981       gimple new_phi;
1982 
1983       phi = gsi_stmt (si);
1984       res = PHI_RESULT (phi);
1985       new_res = res;
1986       if (!virtual_operand_p (res))
1987 	{
1988 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
1989 	  new_phi = create_phi_node (new_res, new_bb);
1990 	  FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1991 	    {
1992 	      edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1993 	      tree arg;
1994 	      tree new_arg;
1995 	      edge_iterator ei2;
1996 	      location_t locus;
1997 
1998 	      /* When doing partial cloning, we allow PHIs on the entry block
1999 		 as long as all the arguments are the same.  Find any input
2000 		 edge to see argument to copy.  */
2001 	      if (!old_edge)
2002 		FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2003 		  if (!old_edge->src->aux)
2004 		    break;
2005 
2006 	      arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2007 	      new_arg = arg;
2008 	      walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2009 	      gcc_assert (new_arg);
2010 	      /* With return slot optimization we can end up with
2011 	         non-gimple (foo *)&this->m, fix that here.  */
2012 	      if (TREE_CODE (new_arg) != SSA_NAME
2013 		  && TREE_CODE (new_arg) != FUNCTION_DECL
2014 		  && !is_gimple_val (new_arg))
2015 		{
2016 		  gimple_seq stmts = NULL;
2017 		  new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2018 		  gsi_insert_seq_on_edge (new_edge, stmts);
2019 		  inserted = true;
2020 		}
2021 	      locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2022 	      if (LOCATION_BLOCK (locus))
2023 		{
2024 		  tree *n;
2025 		  n = (tree *) pointer_map_contains (id->decl_map,
2026 			LOCATION_BLOCK (locus));
2027 		  gcc_assert (n);
2028 		  locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2029 		}
2030 	      else
2031 		locus = LOCATION_LOCUS (locus);
2032 
2033 	      add_phi_arg (new_phi, new_arg, new_edge, locus);
2034 	    }
2035 	}
2036     }
2037 
2038   /* Commit the delayed edge insertions.  */
2039   if (inserted)
2040     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2041       gsi_commit_one_edge_insert (new_edge, NULL);
2042 }
2043 
2044 
2045 /* Wrapper for remap_decl so it can be used as a callback.  */
2046 
2047 static tree
2048 remap_decl_1 (tree decl, void *data)
2049 {
2050   return remap_decl (decl, (copy_body_data *) data);
2051 }
2052 
2053 /* Build struct function and associated datastructures for the new clone
2054    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2055    the cfun to the function of new_fndecl (and current_function_decl too).  */
2056 
2057 static void
2058 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2059 {
2060   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2061   gcov_type count_scale;
2062 
2063   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2064     count_scale = (REG_BR_PROB_BASE * count
2065 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2066   else
2067     count_scale = REG_BR_PROB_BASE;
2068 
2069   /* Register specific tree functions.  */
2070   gimple_register_cfg_hooks ();
2071 
2072   /* Get clean struct function.  */
2073   push_struct_function (new_fndecl);
2074 
2075   /* We will rebuild these, so just sanity check that they are empty.  */
2076   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2077   gcc_assert (cfun->local_decls == NULL);
2078   gcc_assert (cfun->cfg == NULL);
2079   gcc_assert (cfun->decl == new_fndecl);
2080 
2081   /* Copy items we preserve during cloning.  */
2082   cfun->static_chain_decl = src_cfun->static_chain_decl;
2083   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2084   cfun->function_end_locus = src_cfun->function_end_locus;
2085   cfun->curr_properties = src_cfun->curr_properties & ~PROP_loops;
2086   cfun->last_verified = src_cfun->last_verified;
2087   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2088   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2089   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2090   cfun->stdarg = src_cfun->stdarg;
2091   cfun->after_inlining = src_cfun->after_inlining;
2092   cfun->can_throw_non_call_exceptions
2093     = src_cfun->can_throw_non_call_exceptions;
2094   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2095   cfun->returns_struct = src_cfun->returns_struct;
2096   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2097 
2098   init_empty_tree_cfg ();
2099 
2100   profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2101   ENTRY_BLOCK_PTR->count =
2102     (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2103      REG_BR_PROB_BASE);
2104   ENTRY_BLOCK_PTR->frequency
2105     = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2106   EXIT_BLOCK_PTR->count =
2107     (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2108      REG_BR_PROB_BASE);
2109   EXIT_BLOCK_PTR->frequency =
2110     EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2111   if (src_cfun->eh)
2112     init_eh_for_function ();
2113 
2114   if (src_cfun->gimple_df)
2115     {
2116       init_tree_ssa (cfun);
2117       cfun->gimple_df->in_ssa_p = true;
2118       init_ssa_operands (cfun);
2119     }
2120 }
2121 
2122 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2123    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2124    successor has multiple predecessors, reset them, otherwise keep
2125    their value.  */
2126 
2127 static void
2128 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2129 {
2130   edge e;
2131   edge_iterator ei;
2132   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2133 
2134   if (gsi_end_p (si)
2135       || gsi_one_before_end_p (si)
2136       || !(stmt_can_throw_internal (gsi_stmt (si))
2137 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2138     return;
2139 
2140   FOR_EACH_EDGE (e, ei, new_bb->succs)
2141     {
2142       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2143       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2144       while (is_gimple_debug (gsi_stmt (ssi)))
2145 	{
2146 	  gimple stmt = gsi_stmt (ssi), new_stmt;
2147 	  tree var;
2148 	  tree value;
2149 
2150 	  /* For the last edge move the debug stmts instead of copying
2151 	     them.  */
2152 	  if (ei_one_before_end_p (ei))
2153 	    {
2154 	      si = ssi;
2155 	      gsi_prev (&ssi);
2156 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2157 		gimple_debug_bind_reset_value (stmt);
2158 	      gsi_remove (&si, false);
2159 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2160 	      continue;
2161 	    }
2162 
2163 	  if (gimple_debug_bind_p (stmt))
2164 	    {
2165 	      var = gimple_debug_bind_get_var (stmt);
2166 	      if (single_pred_p (e->dest))
2167 		{
2168 		  value = gimple_debug_bind_get_value (stmt);
2169 		  value = unshare_expr (value);
2170 		}
2171 	      else
2172 		value = NULL_TREE;
2173 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2174 	    }
2175 	  else if (gimple_debug_source_bind_p (stmt))
2176 	    {
2177 	      var = gimple_debug_source_bind_get_var (stmt);
2178 	      value = gimple_debug_source_bind_get_value (stmt);
2179 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2180 	    }
2181 	  else
2182 	    gcc_unreachable ();
2183 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2184 	  id->debug_stmts.safe_push (new_stmt);
2185 	  gsi_prev (&ssi);
2186 	}
2187     }
2188 }
2189 
2190 /* Make a copy of the body of FN so that it can be inserted inline in
2191    another function.  Walks FN via CFG, returns new fndecl.  */
2192 
2193 static tree
2194 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2195 	       basic_block entry_block_map, basic_block exit_block_map,
2196 	       bitmap blocks_to_copy, basic_block new_entry)
2197 {
2198   tree callee_fndecl = id->src_fn;
2199   /* Original cfun for the callee, doesn't change.  */
2200   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2201   struct function *cfun_to_copy;
2202   basic_block bb;
2203   tree new_fndecl = NULL;
2204   bool need_debug_cleanup = false;
2205   gcov_type count_scale;
2206   int last;
2207   int incoming_frequency = 0;
2208   gcov_type incoming_count = 0;
2209 
2210   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2211     count_scale = (REG_BR_PROB_BASE * count
2212 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2213   else
2214     count_scale = REG_BR_PROB_BASE;
2215 
2216   /* Register specific tree functions.  */
2217   gimple_register_cfg_hooks ();
2218 
2219   /* If we are inlining just region of the function, make sure to connect new entry
2220      to ENTRY_BLOCK_PTR.  Since new entry can be part of loop, we must compute
2221      frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2222      probabilities of edges incoming from nonduplicated region.  */
2223   if (new_entry)
2224     {
2225       edge e;
2226       edge_iterator ei;
2227 
2228       FOR_EACH_EDGE (e, ei, new_entry->preds)
2229 	if (!e->src->aux)
2230 	  {
2231 	    incoming_frequency += EDGE_FREQUENCY (e);
2232 	    incoming_count += e->count;
2233 	  }
2234       incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2235       incoming_frequency
2236 	= incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2237       ENTRY_BLOCK_PTR->count = incoming_count;
2238       ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2239     }
2240 
2241   /* Must have a CFG here at this point.  */
2242   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2243 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2244 
2245   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2246 
2247   ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2248   EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2249   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2250   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2251 
2252   /* Duplicate any exception-handling regions.  */
2253   if (cfun->eh)
2254     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2255 				       remap_decl_1, id);
2256 
2257   /* Use aux pointers to map the original blocks to copy.  */
2258   FOR_EACH_BB_FN (bb, cfun_to_copy)
2259     if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2260       {
2261 	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2262 	bb->aux = new_bb;
2263 	new_bb->aux = bb;
2264       }
2265 
2266   last = last_basic_block;
2267 
2268   /* Now that we've duplicated the blocks, duplicate their edges.  */
2269   FOR_ALL_BB_FN (bb, cfun_to_copy)
2270     if (!blocks_to_copy
2271         || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2272       need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2273 
2274   if (new_entry)
2275     {
2276       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2277       e->probability = REG_BR_PROB_BASE;
2278       e->count = incoming_count;
2279     }
2280 
2281   if (gimple_in_ssa_p (cfun))
2282     FOR_ALL_BB_FN (bb, cfun_to_copy)
2283       if (!blocks_to_copy
2284 	  || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2285 	copy_phis_for_bb (bb, id);
2286 
2287   FOR_ALL_BB_FN (bb, cfun_to_copy)
2288     if (bb->aux)
2289       {
2290 	if (need_debug_cleanup
2291 	    && bb->index != ENTRY_BLOCK
2292 	    && bb->index != EXIT_BLOCK)
2293 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2294 	((basic_block)bb->aux)->aux = NULL;
2295 	bb->aux = NULL;
2296       }
2297 
2298   /* Zero out AUX fields of newly created block during EH edge
2299      insertion. */
2300   for (; last < last_basic_block; last++)
2301     {
2302       if (need_debug_cleanup)
2303 	maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2304       BASIC_BLOCK (last)->aux = NULL;
2305     }
2306   entry_block_map->aux = NULL;
2307   exit_block_map->aux = NULL;
2308 
2309   if (id->eh_map)
2310     {
2311       pointer_map_destroy (id->eh_map);
2312       id->eh_map = NULL;
2313     }
2314 
2315   return new_fndecl;
2316 }
2317 
2318 /* Copy the debug STMT using ID.  We deal with these statements in a
2319    special way: if any variable in their VALUE expression wasn't
2320    remapped yet, we won't remap it, because that would get decl uids
2321    out of sync, causing codegen differences between -g and -g0.  If
2322    this arises, we drop the VALUE expression altogether.  */
2323 
2324 static void
2325 copy_debug_stmt (gimple stmt, copy_body_data *id)
2326 {
2327   tree t, *n;
2328   struct walk_stmt_info wi;
2329 
2330   if (gimple_block (stmt))
2331     {
2332       n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2333       gimple_set_block (stmt, n ? *n : id->block);
2334     }
2335 
2336   /* Remap all the operands in COPY.  */
2337   memset (&wi, 0, sizeof (wi));
2338   wi.info = id;
2339 
2340   processing_debug_stmt = 1;
2341 
2342   if (gimple_debug_source_bind_p (stmt))
2343     t = gimple_debug_source_bind_get_var (stmt);
2344   else
2345     t = gimple_debug_bind_get_var (stmt);
2346 
2347   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2348       && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2349     {
2350       gcc_assert (TREE_CODE (*n) == VAR_DECL);
2351       t = *n;
2352     }
2353   else if (TREE_CODE (t) == VAR_DECL
2354 	   && !is_global_var (t)
2355 	   && !pointer_map_contains (id->decl_map, t))
2356     /* T is a non-localized variable.  */;
2357   else
2358     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2359 
2360   if (gimple_debug_bind_p (stmt))
2361     {
2362       gimple_debug_bind_set_var (stmt, t);
2363 
2364       if (gimple_debug_bind_has_value_p (stmt))
2365 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2366 		   remap_gimple_op_r, &wi, NULL);
2367 
2368       /* Punt if any decl couldn't be remapped.  */
2369       if (processing_debug_stmt < 0)
2370 	gimple_debug_bind_reset_value (stmt);
2371     }
2372   else if (gimple_debug_source_bind_p (stmt))
2373     {
2374       gimple_debug_source_bind_set_var (stmt, t);
2375       walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2376 		 remap_gimple_op_r, &wi, NULL);
2377       /* When inlining and source bind refers to one of the optimized
2378 	 away parameters, change the source bind into normal debug bind
2379 	 referring to the corresponding DEBUG_EXPR_DECL that should have
2380 	 been bound before the call stmt.  */
2381       t = gimple_debug_source_bind_get_value (stmt);
2382       if (t != NULL_TREE
2383 	  && TREE_CODE (t) == PARM_DECL
2384 	  && id->gimple_call)
2385 	{
2386 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2387 	  unsigned int i;
2388 	  if (debug_args != NULL)
2389 	    {
2390 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2391 		if ((**debug_args)[i] == DECL_ORIGIN (t)
2392 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2393 		  {
2394 		    t = (**debug_args)[i + 1];
2395 		    stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2396 		    gimple_debug_bind_set_value (stmt, t);
2397 		    break;
2398 		  }
2399 	    }
2400 	}
2401     }
2402 
2403   processing_debug_stmt = 0;
2404 
2405   update_stmt (stmt);
2406 }
2407 
2408 /* Process deferred debug stmts.  In order to give values better odds
2409    of being successfully remapped, we delay the processing of debug
2410    stmts until all other stmts that might require remapping are
2411    processed.  */
2412 
2413 static void
2414 copy_debug_stmts (copy_body_data *id)
2415 {
2416   size_t i;
2417   gimple stmt;
2418 
2419   if (!id->debug_stmts.exists ())
2420     return;
2421 
2422   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2423     copy_debug_stmt (stmt, id);
2424 
2425   id->debug_stmts.release ();
2426 }
2427 
2428 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2429    another function.  */
2430 
2431 static tree
2432 copy_tree_body (copy_body_data *id)
2433 {
2434   tree fndecl = id->src_fn;
2435   tree body = DECL_SAVED_TREE (fndecl);
2436 
2437   walk_tree (&body, copy_tree_body_r, id, NULL);
2438 
2439   return body;
2440 }
2441 
2442 /* Make a copy of the body of FN so that it can be inserted inline in
2443    another function.  */
2444 
2445 static tree
2446 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2447 	   basic_block entry_block_map, basic_block exit_block_map,
2448 	   bitmap blocks_to_copy, basic_block new_entry)
2449 {
2450   tree fndecl = id->src_fn;
2451   tree body;
2452 
2453   /* If this body has a CFG, walk CFG and copy.  */
2454   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2455   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2456 		        blocks_to_copy, new_entry);
2457   copy_debug_stmts (id);
2458 
2459   return body;
2460 }
2461 
2462 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2463    defined in function FN, or of a data member thereof.  */
2464 
2465 static bool
2466 self_inlining_addr_expr (tree value, tree fn)
2467 {
2468   tree var;
2469 
2470   if (TREE_CODE (value) != ADDR_EXPR)
2471     return false;
2472 
2473   var = get_base_address (TREE_OPERAND (value, 0));
2474 
2475   return var && auto_var_in_fn_p (var, fn);
2476 }
2477 
2478 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2479    lexical block and line number information from base_stmt, if given,
2480    or from the last stmt of the block otherwise.  */
2481 
2482 static gimple
2483 insert_init_debug_bind (copy_body_data *id,
2484 			basic_block bb, tree var, tree value,
2485 			gimple base_stmt)
2486 {
2487   gimple note;
2488   gimple_stmt_iterator gsi;
2489   tree tracked_var;
2490 
2491   if (!gimple_in_ssa_p (id->src_cfun))
2492     return NULL;
2493 
2494   if (!MAY_HAVE_DEBUG_STMTS)
2495     return NULL;
2496 
2497   tracked_var = target_for_debug_bind (var);
2498   if (!tracked_var)
2499     return NULL;
2500 
2501   if (bb)
2502     {
2503       gsi = gsi_last_bb (bb);
2504       if (!base_stmt && !gsi_end_p (gsi))
2505 	base_stmt = gsi_stmt (gsi);
2506     }
2507 
2508   note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2509 
2510   if (bb)
2511     {
2512       if (!gsi_end_p (gsi))
2513 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2514       else
2515 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2516     }
2517 
2518   return note;
2519 }
2520 
2521 static void
2522 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2523 {
2524   /* If VAR represents a zero-sized variable, it's possible that the
2525      assignment statement may result in no gimple statements.  */
2526   if (init_stmt)
2527     {
2528       gimple_stmt_iterator si = gsi_last_bb (bb);
2529 
2530       /* We can end up with init statements that store to a non-register
2531          from a rhs with a conversion.  Handle that here by forcing the
2532 	 rhs into a temporary.  gimple_regimplify_operands is not
2533 	 prepared to do this for us.  */
2534       if (!is_gimple_debug (init_stmt)
2535 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2536 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2537 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2538 	{
2539 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2540 			     gimple_expr_type (init_stmt),
2541 			     gimple_assign_rhs1 (init_stmt));
2542 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2543 					  GSI_NEW_STMT);
2544 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2545 	  gimple_assign_set_rhs1 (init_stmt, rhs);
2546 	}
2547       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2548       gimple_regimplify_operands (init_stmt, &si);
2549 
2550       if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2551 	{
2552 	  tree def = gimple_assign_lhs (init_stmt);
2553 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
2554 	}
2555     }
2556 }
2557 
2558 /* Initialize parameter P with VALUE.  If needed, produce init statement
2559    at the end of BB.  When BB is NULL, we return init statement to be
2560    output later.  */
2561 static gimple
2562 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2563 		     basic_block bb, tree *vars)
2564 {
2565   gimple init_stmt = NULL;
2566   tree var;
2567   tree rhs = value;
2568   tree def = (gimple_in_ssa_p (cfun)
2569 	      ? ssa_default_def (id->src_cfun, p) : NULL);
2570 
2571   if (value
2572       && value != error_mark_node
2573       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2574     {
2575       /* If we can match up types by promotion/demotion do so.  */
2576       if (fold_convertible_p (TREE_TYPE (p), value))
2577 	rhs = fold_convert (TREE_TYPE (p), value);
2578       else
2579 	{
2580 	  /* ???  For valid programs we should not end up here.
2581 	     Still if we end up with truly mismatched types here, fall back
2582 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2583 	     GIMPLE to the following passes.  */
2584 	  if (!is_gimple_reg_type (TREE_TYPE (value))
2585 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2586 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2587 	  else
2588 	    rhs = build_zero_cst (TREE_TYPE (p));
2589 	}
2590     }
2591 
2592   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
2593      here since the type of this decl must be visible to the calling
2594      function.  */
2595   var = copy_decl_to_var (p, id);
2596 
2597   /* Declare this new variable.  */
2598   DECL_CHAIN (var) = *vars;
2599   *vars = var;
2600 
2601   /* Make gimplifier happy about this variable.  */
2602   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2603 
2604   /* If the parameter is never assigned to, has no SSA_NAMEs created,
2605      we would not need to create a new variable here at all, if it
2606      weren't for debug info.  Still, we can just use the argument
2607      value.  */
2608   if (TREE_READONLY (p)
2609       && !TREE_ADDRESSABLE (p)
2610       && value && !TREE_SIDE_EFFECTS (value)
2611       && !def)
2612     {
2613       /* We may produce non-gimple trees by adding NOPs or introduce
2614 	 invalid sharing when operand is not really constant.
2615 	 It is not big deal to prohibit constant propagation here as
2616 	 we will constant propagate in DOM1 pass anyway.  */
2617       if (is_gimple_min_invariant (value)
2618 	  && useless_type_conversion_p (TREE_TYPE (p),
2619 						 TREE_TYPE (value))
2620 	  /* We have to be very careful about ADDR_EXPR.  Make sure
2621 	     the base variable isn't a local variable of the inlined
2622 	     function, e.g., when doing recursive inlining, direct or
2623 	     mutually-recursive or whatever, which is why we don't
2624 	     just test whether fn == current_function_decl.  */
2625 	  && ! self_inlining_addr_expr (value, fn))
2626 	{
2627 	  insert_decl_map (id, p, value);
2628 	  insert_debug_decl_map (id, p, var);
2629 	  return insert_init_debug_bind (id, bb, var, value, NULL);
2630 	}
2631     }
2632 
2633   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2634      that way, when the PARM_DECL is encountered, it will be
2635      automatically replaced by the VAR_DECL.  */
2636   insert_decl_map (id, p, var);
2637 
2638   /* Even if P was TREE_READONLY, the new VAR should not be.
2639      In the original code, we would have constructed a
2640      temporary, and then the function body would have never
2641      changed the value of P.  However, now, we will be
2642      constructing VAR directly.  The constructor body may
2643      change its value multiple times as it is being
2644      constructed.  Therefore, it must not be TREE_READONLY;
2645      the back-end assumes that TREE_READONLY variable is
2646      assigned to only once.  */
2647   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2648     TREE_READONLY (var) = 0;
2649 
2650   /* If there is no setup required and we are in SSA, take the easy route
2651      replacing all SSA names representing the function parameter by the
2652      SSA name passed to function.
2653 
2654      We need to construct map for the variable anyway as it might be used
2655      in different SSA names when parameter is set in function.
2656 
2657      Do replacement at -O0 for const arguments replaced by constant.
2658      This is important for builtin_constant_p and other construct requiring
2659      constant argument to be visible in inlined function body.  */
2660   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2661       && (optimize
2662           || (TREE_READONLY (p)
2663 	      && is_gimple_min_invariant (rhs)))
2664       && (TREE_CODE (rhs) == SSA_NAME
2665 	  || is_gimple_min_invariant (rhs))
2666       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2667     {
2668       insert_decl_map (id, def, rhs);
2669       return insert_init_debug_bind (id, bb, var, rhs, NULL);
2670     }
2671 
2672   /* If the value of argument is never used, don't care about initializing
2673      it.  */
2674   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2675     {
2676       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2677       return insert_init_debug_bind (id, bb, var, rhs, NULL);
2678     }
2679 
2680   /* Initialize this VAR_DECL from the equivalent argument.  Convert
2681      the argument to the proper type in case it was promoted.  */
2682   if (value)
2683     {
2684       if (rhs == error_mark_node)
2685 	{
2686 	  insert_decl_map (id, p, var);
2687 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
2688 	}
2689 
2690       STRIP_USELESS_TYPE_CONVERSION (rhs);
2691 
2692       /* If we are in SSA form properly remap the default definition
2693          or assign to a dummy SSA name if the parameter is unused and
2694 	 we are not optimizing.  */
2695       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2696 	{
2697 	  if (def)
2698 	    {
2699 	      def = remap_ssa_name (def, id);
2700 	      init_stmt = gimple_build_assign (def, rhs);
2701 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2702 	      set_ssa_default_def (cfun, var, NULL);
2703 	    }
2704 	  else if (!optimize)
2705 	    {
2706 	      def = make_ssa_name (var, NULL);
2707 	      init_stmt = gimple_build_assign (def, rhs);
2708 	    }
2709 	}
2710       else
2711         init_stmt = gimple_build_assign (var, rhs);
2712 
2713       if (bb && init_stmt)
2714         insert_init_stmt (id, bb, init_stmt);
2715     }
2716   return init_stmt;
2717 }
2718 
2719 /* Generate code to initialize the parameters of the function at the
2720    top of the stack in ID from the GIMPLE_CALL STMT.  */
2721 
2722 static void
2723 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2724 			       tree fn, basic_block bb)
2725 {
2726   tree parms;
2727   size_t i;
2728   tree p;
2729   tree vars = NULL_TREE;
2730   tree static_chain = gimple_call_chain (stmt);
2731 
2732   /* Figure out what the parameters are.  */
2733   parms = DECL_ARGUMENTS (fn);
2734 
2735   /* Loop through the parameter declarations, replacing each with an
2736      equivalent VAR_DECL, appropriately initialized.  */
2737   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2738     {
2739       tree val;
2740       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2741       setup_one_parameter (id, p, val, fn, bb, &vars);
2742     }
2743   /* After remapping parameters remap their types.  This has to be done
2744      in a second loop over all parameters to appropriately remap
2745      variable sized arrays when the size is specified in a
2746      parameter following the array.  */
2747   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2748     {
2749       tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2750       if (varp
2751 	  && TREE_CODE (*varp) == VAR_DECL)
2752 	{
2753 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2754 		      ? ssa_default_def (id->src_cfun, p) : NULL);
2755 	  tree var = *varp;
2756 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2757 	  /* Also remap the default definition if it was remapped
2758 	     to the default definition of the parameter replacement
2759 	     by the parameter setup.  */
2760 	  if (def)
2761 	    {
2762 	      tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2763 	      if (defp
2764 		  && TREE_CODE (*defp) == SSA_NAME
2765 		  && SSA_NAME_VAR (*defp) == var)
2766 		TREE_TYPE (*defp) = TREE_TYPE (var);
2767 	    }
2768 	}
2769     }
2770 
2771   /* Initialize the static chain.  */
2772   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2773   gcc_assert (fn != current_function_decl);
2774   if (p)
2775     {
2776       /* No static chain?  Seems like a bug in tree-nested.c.  */
2777       gcc_assert (static_chain);
2778 
2779       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2780     }
2781 
2782   declare_inline_vars (id->block, vars);
2783 }
2784 
2785 
2786 /* Declare a return variable to replace the RESULT_DECL for the
2787    function we are calling.  An appropriate DECL_STMT is returned.
2788    The USE_STMT is filled to contain a use of the declaration to
2789    indicate the return value of the function.
2790 
2791    RETURN_SLOT, if non-null is place where to store the result.  It
2792    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
2793    was the LHS of the MODIFY_EXPR to which this call is the RHS.
2794 
2795    The return value is a (possibly null) value that holds the result
2796    as seen by the caller.  */
2797 
2798 static tree
2799 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2800 			 basic_block entry_bb)
2801 {
2802   tree callee = id->src_fn;
2803   tree result = DECL_RESULT (callee);
2804   tree callee_type = TREE_TYPE (result);
2805   tree caller_type;
2806   tree var, use;
2807 
2808   /* Handle type-mismatches in the function declaration return type
2809      vs. the call expression.  */
2810   if (modify_dest)
2811     caller_type = TREE_TYPE (modify_dest);
2812   else
2813     caller_type = TREE_TYPE (TREE_TYPE (callee));
2814 
2815   /* We don't need to do anything for functions that don't return anything.  */
2816   if (VOID_TYPE_P (callee_type))
2817     return NULL_TREE;
2818 
2819   /* If there was a return slot, then the return value is the
2820      dereferenced address of that object.  */
2821   if (return_slot)
2822     {
2823       /* The front end shouldn't have used both return_slot and
2824 	 a modify expression.  */
2825       gcc_assert (!modify_dest);
2826       if (DECL_BY_REFERENCE (result))
2827 	{
2828 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
2829 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2830 
2831 	  /* We are going to construct *&return_slot and we can't do that
2832 	     for variables believed to be not addressable.
2833 
2834 	     FIXME: This check possibly can match, because values returned
2835 	     via return slot optimization are not believed to have address
2836 	     taken by alias analysis.  */
2837 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2838 	  var = return_slot_addr;
2839 	}
2840       else
2841 	{
2842 	  var = return_slot;
2843 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
2844 	  if (TREE_ADDRESSABLE (result))
2845 	    mark_addressable (var);
2846 	}
2847       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2848            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2849 	  && !DECL_GIMPLE_REG_P (result)
2850 	  && DECL_P (var))
2851 	DECL_GIMPLE_REG_P (var) = 0;
2852       use = NULL;
2853       goto done;
2854     }
2855 
2856   /* All types requiring non-trivial constructors should have been handled.  */
2857   gcc_assert (!TREE_ADDRESSABLE (callee_type));
2858 
2859   /* Attempt to avoid creating a new temporary variable.  */
2860   if (modify_dest
2861       && TREE_CODE (modify_dest) != SSA_NAME)
2862     {
2863       bool use_it = false;
2864 
2865       /* We can't use MODIFY_DEST if there's type promotion involved.  */
2866       if (!useless_type_conversion_p (callee_type, caller_type))
2867 	use_it = false;
2868 
2869       /* ??? If we're assigning to a variable sized type, then we must
2870 	 reuse the destination variable, because we've no good way to
2871 	 create variable sized temporaries at this point.  */
2872       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2873 	use_it = true;
2874 
2875       /* If the callee cannot possibly modify MODIFY_DEST, then we can
2876 	 reuse it as the result of the call directly.  Don't do this if
2877 	 it would promote MODIFY_DEST to addressable.  */
2878       else if (TREE_ADDRESSABLE (result))
2879 	use_it = false;
2880       else
2881 	{
2882 	  tree base_m = get_base_address (modify_dest);
2883 
2884 	  /* If the base isn't a decl, then it's a pointer, and we don't
2885 	     know where that's going to go.  */
2886 	  if (!DECL_P (base_m))
2887 	    use_it = false;
2888 	  else if (is_global_var (base_m))
2889 	    use_it = false;
2890 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2891 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2892 		   && !DECL_GIMPLE_REG_P (result)
2893 		   && DECL_GIMPLE_REG_P (base_m))
2894 	    use_it = false;
2895 	  else if (!TREE_ADDRESSABLE (base_m))
2896 	    use_it = true;
2897 	}
2898 
2899       if (use_it)
2900 	{
2901 	  var = modify_dest;
2902 	  use = NULL;
2903 	  goto done;
2904 	}
2905     }
2906 
2907   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2908 
2909   var = copy_result_decl_to_var (result, id);
2910   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2911 
2912   /* Do not have the rest of GCC warn about this variable as it should
2913      not be visible to the user.  */
2914   TREE_NO_WARNING (var) = 1;
2915 
2916   declare_inline_vars (id->block, var);
2917 
2918   /* Build the use expr.  If the return type of the function was
2919      promoted, convert it back to the expected type.  */
2920   use = var;
2921   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2922     {
2923       /* If we can match up types by promotion/demotion do so.  */
2924       if (fold_convertible_p (caller_type, var))
2925 	use = fold_convert (caller_type, var);
2926       else
2927 	{
2928 	  /* ???  For valid programs we should not end up here.
2929 	     Still if we end up with truly mismatched types here, fall back
2930 	     to using a MEM_REF to not leak invalid GIMPLE to the following
2931 	     passes.  */
2932 	  /* Prevent var from being written into SSA form.  */
2933 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2934 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2935 	    DECL_GIMPLE_REG_P (var) = false;
2936 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
2937 	    TREE_ADDRESSABLE (var) = true;
2938 	  use = fold_build2 (MEM_REF, caller_type,
2939 			     build_fold_addr_expr (var),
2940 			     build_int_cst (ptr_type_node, 0));
2941 	}
2942     }
2943 
2944   STRIP_USELESS_TYPE_CONVERSION (use);
2945 
2946   if (DECL_BY_REFERENCE (result))
2947     {
2948       TREE_ADDRESSABLE (var) = 1;
2949       var = build_fold_addr_expr (var);
2950     }
2951 
2952  done:
2953   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2954      way, when the RESULT_DECL is encountered, it will be
2955      automatically replaced by the VAR_DECL.
2956 
2957      When returning by reference, ensure that RESULT_DECL remaps to
2958      gimple_val.  */
2959   if (DECL_BY_REFERENCE (result)
2960       && !is_gimple_val (var))
2961     {
2962       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2963       insert_decl_map (id, result, temp);
2964       /* When RESULT_DECL is in SSA form, we need to remap and initialize
2965 	 it's default_def SSA_NAME.  */
2966       if (gimple_in_ssa_p (id->src_cfun)
2967 	  && is_gimple_reg (result))
2968 	{
2969 	  temp = make_ssa_name (temp, NULL);
2970 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
2971 	}
2972       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2973     }
2974   else
2975     insert_decl_map (id, result, var);
2976 
2977   /* Remember this so we can ignore it in remap_decls.  */
2978   id->retvar = var;
2979 
2980   return use;
2981 }
2982 
2983 /* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
2984    to a local label.  */
2985 
2986 static tree
2987 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2988 {
2989   tree node = *nodep;
2990   tree fn = (tree) fnp;
2991 
2992   if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2993     return node;
2994 
2995   if (TYPE_P (node))
2996     *walk_subtrees = 0;
2997 
2998   return NULL_TREE;
2999 }
3000 
3001 /* Determine if the function can be copied.  If so return NULL.  If
3002    not return a string describng the reason for failure.  */
3003 
3004 static const char *
3005 copy_forbidden (struct function *fun, tree fndecl)
3006 {
3007   const char *reason = fun->cannot_be_copied_reason;
3008   tree decl;
3009   unsigned ix;
3010 
3011   /* Only examine the function once.  */
3012   if (fun->cannot_be_copied_set)
3013     return reason;
3014 
3015   /* We cannot copy a function that receives a non-local goto
3016      because we cannot remap the destination label used in the
3017      function that is performing the non-local goto.  */
3018   /* ??? Actually, this should be possible, if we work at it.
3019      No doubt there's just a handful of places that simply
3020      assume it doesn't happen and don't substitute properly.  */
3021   if (fun->has_nonlocal_label)
3022     {
3023       reason = G_("function %q+F can never be copied "
3024 		  "because it receives a non-local goto");
3025       goto fail;
3026     }
3027 
3028   FOR_EACH_LOCAL_DECL (fun, ix, decl)
3029     if (TREE_CODE (decl) == VAR_DECL
3030 	&& TREE_STATIC (decl)
3031 	&& !DECL_EXTERNAL (decl)
3032 	&& DECL_INITIAL (decl)
3033 	&& walk_tree_without_duplicates (&DECL_INITIAL (decl),
3034 					 has_label_address_in_static_1,
3035 					 fndecl))
3036       {
3037 	reason = G_("function %q+F can never be copied because it saves "
3038 		    "address of local label in a static variable");
3039 	goto fail;
3040       }
3041 
3042  fail:
3043   fun->cannot_be_copied_reason = reason;
3044   fun->cannot_be_copied_set = true;
3045   return reason;
3046 }
3047 
3048 
3049 static const char *inline_forbidden_reason;
3050 
3051 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3052    iff a function can not be inlined.  Also sets the reason why. */
3053 
3054 static tree
3055 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3056 			 struct walk_stmt_info *wip)
3057 {
3058   tree fn = (tree) wip->info;
3059   tree t;
3060   gimple stmt = gsi_stmt (*gsi);
3061 
3062   switch (gimple_code (stmt))
3063     {
3064     case GIMPLE_CALL:
3065       /* Refuse to inline alloca call unless user explicitly forced so as
3066 	 this may change program's memory overhead drastically when the
3067 	 function using alloca is called in loop.  In GCC present in
3068 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3069 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3070 	 VLA objects as those can't cause unbounded growth (they're always
3071 	 wrapped inside stack_save/stack_restore regions.  */
3072       if (gimple_alloca_call_p (stmt)
3073 	  && !gimple_call_alloca_for_var_p (stmt)
3074 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3075 	{
3076 	  inline_forbidden_reason
3077 	    = G_("function %q+F can never be inlined because it uses "
3078 		 "alloca (override using the always_inline attribute)");
3079 	  *handled_ops_p = true;
3080 	  return fn;
3081 	}
3082 
3083       t = gimple_call_fndecl (stmt);
3084       if (t == NULL_TREE)
3085 	break;
3086 
3087       /* We cannot inline functions that call setjmp.  */
3088       if (setjmp_call_p (t))
3089 	{
3090 	  inline_forbidden_reason
3091 	    = G_("function %q+F can never be inlined because it uses setjmp");
3092 	  *handled_ops_p = true;
3093 	  return t;
3094 	}
3095 
3096       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3097 	switch (DECL_FUNCTION_CODE (t))
3098 	  {
3099 	    /* We cannot inline functions that take a variable number of
3100 	       arguments.  */
3101 	  case BUILT_IN_VA_START:
3102 	  case BUILT_IN_NEXT_ARG:
3103 	  case BUILT_IN_VA_END:
3104 	    inline_forbidden_reason
3105 	      = G_("function %q+F can never be inlined because it "
3106 		   "uses variable argument lists");
3107 	    *handled_ops_p = true;
3108 	    return t;
3109 
3110 	  case BUILT_IN_LONGJMP:
3111 	    /* We can't inline functions that call __builtin_longjmp at
3112 	       all.  The non-local goto machinery really requires the
3113 	       destination be in a different function.  If we allow the
3114 	       function calling __builtin_longjmp to be inlined into the
3115 	       function calling __builtin_setjmp, Things will Go Awry.  */
3116 	    inline_forbidden_reason
3117 	      = G_("function %q+F can never be inlined because "
3118 		   "it uses setjmp-longjmp exception handling");
3119 	    *handled_ops_p = true;
3120 	    return t;
3121 
3122 	  case BUILT_IN_NONLOCAL_GOTO:
3123 	    /* Similarly.  */
3124 	    inline_forbidden_reason
3125 	      = G_("function %q+F can never be inlined because "
3126 		   "it uses non-local goto");
3127 	    *handled_ops_p = true;
3128 	    return t;
3129 
3130 	  case BUILT_IN_RETURN:
3131 	  case BUILT_IN_APPLY_ARGS:
3132 	    /* If a __builtin_apply_args caller would be inlined,
3133 	       it would be saving arguments of the function it has
3134 	       been inlined into.  Similarly __builtin_return would
3135 	       return from the function the inline has been inlined into.  */
3136 	    inline_forbidden_reason
3137 	      = G_("function %q+F can never be inlined because "
3138 		   "it uses __builtin_return or __builtin_apply_args");
3139 	    *handled_ops_p = true;
3140 	    return t;
3141 
3142 	  default:
3143 	    break;
3144 	  }
3145       break;
3146 
3147     case GIMPLE_GOTO:
3148       t = gimple_goto_dest (stmt);
3149 
3150       /* We will not inline a function which uses computed goto.  The
3151 	 addresses of its local labels, which may be tucked into
3152 	 global storage, are of course not constant across
3153 	 instantiations, which causes unexpected behavior.  */
3154       if (TREE_CODE (t) != LABEL_DECL)
3155 	{
3156 	  inline_forbidden_reason
3157 	    = G_("function %q+F can never be inlined "
3158 		 "because it contains a computed goto");
3159 	  *handled_ops_p = true;
3160 	  return t;
3161 	}
3162       break;
3163 
3164     default:
3165       break;
3166     }
3167 
3168   *handled_ops_p = false;
3169   return NULL_TREE;
3170 }
3171 
3172 /* Return true if FNDECL is a function that cannot be inlined into
3173    another one.  */
3174 
3175 static bool
3176 inline_forbidden_p (tree fndecl)
3177 {
3178   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3179   struct walk_stmt_info wi;
3180   struct pointer_set_t *visited_nodes;
3181   basic_block bb;
3182   bool forbidden_p = false;
3183 
3184   /* First check for shared reasons not to copy the code.  */
3185   inline_forbidden_reason = copy_forbidden (fun, fndecl);
3186   if (inline_forbidden_reason != NULL)
3187     return true;
3188 
3189   /* Next, walk the statements of the function looking for
3190      constraucts we can't handle, or are non-optimal for inlining.  */
3191   visited_nodes = pointer_set_create ();
3192   memset (&wi, 0, sizeof (wi));
3193   wi.info = (void *) fndecl;
3194   wi.pset = visited_nodes;
3195 
3196   FOR_EACH_BB_FN (bb, fun)
3197     {
3198       gimple ret;
3199       gimple_seq seq = bb_seq (bb);
3200       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3201       forbidden_p = (ret != NULL);
3202       if (forbidden_p)
3203 	break;
3204     }
3205 
3206   pointer_set_destroy (visited_nodes);
3207   return forbidden_p;
3208 }
3209 
3210 /* Return false if the function FNDECL cannot be inlined on account of its
3211    attributes, true otherwise.  */
3212 static bool
3213 function_attribute_inlinable_p (const_tree fndecl)
3214 {
3215   if (targetm.attribute_table)
3216     {
3217       const_tree a;
3218 
3219       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3220 	{
3221 	  const_tree name = TREE_PURPOSE (a);
3222 	  int i;
3223 
3224 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3225 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3226 	      return targetm.function_attribute_inlinable_p (fndecl);
3227 	}
3228     }
3229 
3230   return true;
3231 }
3232 
3233 /* Returns nonzero if FN is a function that does not have any
3234    fundamental inline blocking properties.  */
3235 
3236 bool
3237 tree_inlinable_function_p (tree fn)
3238 {
3239   bool inlinable = true;
3240   bool do_warning;
3241   tree always_inline;
3242 
3243   /* If we've already decided this function shouldn't be inlined,
3244      there's no need to check again.  */
3245   if (DECL_UNINLINABLE (fn))
3246     return false;
3247 
3248   /* We only warn for functions declared `inline' by the user.  */
3249   do_warning = (warn_inline
3250 		&& DECL_DECLARED_INLINE_P (fn)
3251 		&& !DECL_NO_INLINE_WARNING_P (fn)
3252 		&& !DECL_IN_SYSTEM_HEADER (fn));
3253 
3254   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3255 
3256   if (flag_no_inline
3257       && always_inline == NULL)
3258     {
3259       if (do_warning)
3260         warning (OPT_Winline, "function %q+F can never be inlined because it "
3261                  "is suppressed using -fno-inline", fn);
3262       inlinable = false;
3263     }
3264 
3265   else if (!function_attribute_inlinable_p (fn))
3266     {
3267       if (do_warning)
3268         warning (OPT_Winline, "function %q+F can never be inlined because it "
3269                  "uses attributes conflicting with inlining", fn);
3270       inlinable = false;
3271     }
3272 
3273   else if (inline_forbidden_p (fn))
3274     {
3275       /* See if we should warn about uninlinable functions.  Previously,
3276 	 some of these warnings would be issued while trying to expand
3277 	 the function inline, but that would cause multiple warnings
3278 	 about functions that would for example call alloca.  But since
3279 	 this a property of the function, just one warning is enough.
3280 	 As a bonus we can now give more details about the reason why a
3281 	 function is not inlinable.  */
3282       if (always_inline)
3283 	error (inline_forbidden_reason, fn);
3284       else if (do_warning)
3285 	warning (OPT_Winline, inline_forbidden_reason, fn);
3286 
3287       inlinable = false;
3288     }
3289 
3290   /* Squirrel away the result so that we don't have to check again.  */
3291   DECL_UNINLINABLE (fn) = !inlinable;
3292 
3293   return inlinable;
3294 }
3295 
3296 /* Estimate the cost of a memory move.  Use machine dependent
3297    word size and take possible memcpy call into account.  */
3298 
3299 int
3300 estimate_move_cost (tree type)
3301 {
3302   HOST_WIDE_INT size;
3303 
3304   gcc_assert (!VOID_TYPE_P (type));
3305 
3306   if (TREE_CODE (type) == VECTOR_TYPE)
3307     {
3308       enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3309       enum machine_mode simd
3310 	= targetm.vectorize.preferred_simd_mode (inner);
3311       int simd_mode_size = GET_MODE_SIZE (simd);
3312       return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3313 	      / simd_mode_size);
3314     }
3315 
3316   size = int_size_in_bytes (type);
3317 
3318   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3319     /* Cost of a memcpy call, 3 arguments and the call.  */
3320     return 4;
3321   else
3322     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3323 }
3324 
3325 /* Returns cost of operation CODE, according to WEIGHTS  */
3326 
3327 static int
3328 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3329 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3330 {
3331   switch (code)
3332     {
3333     /* These are "free" conversions, or their presumed cost
3334        is folded into other operations.  */
3335     case RANGE_EXPR:
3336     CASE_CONVERT:
3337     case COMPLEX_EXPR:
3338     case PAREN_EXPR:
3339     case VIEW_CONVERT_EXPR:
3340       return 0;
3341 
3342     /* Assign cost of 1 to usual operations.
3343        ??? We may consider mapping RTL costs to this.  */
3344     case COND_EXPR:
3345     case VEC_COND_EXPR:
3346     case VEC_PERM_EXPR:
3347 
3348     case PLUS_EXPR:
3349     case POINTER_PLUS_EXPR:
3350     case MINUS_EXPR:
3351     case MULT_EXPR:
3352     case MULT_HIGHPART_EXPR:
3353     case FMA_EXPR:
3354 
3355     case ADDR_SPACE_CONVERT_EXPR:
3356     case FIXED_CONVERT_EXPR:
3357     case FIX_TRUNC_EXPR:
3358 
3359     case NEGATE_EXPR:
3360     case FLOAT_EXPR:
3361     case MIN_EXPR:
3362     case MAX_EXPR:
3363     case ABS_EXPR:
3364 
3365     case LSHIFT_EXPR:
3366     case RSHIFT_EXPR:
3367     case LROTATE_EXPR:
3368     case RROTATE_EXPR:
3369     case VEC_LSHIFT_EXPR:
3370     case VEC_RSHIFT_EXPR:
3371 
3372     case BIT_IOR_EXPR:
3373     case BIT_XOR_EXPR:
3374     case BIT_AND_EXPR:
3375     case BIT_NOT_EXPR:
3376 
3377     case TRUTH_ANDIF_EXPR:
3378     case TRUTH_ORIF_EXPR:
3379     case TRUTH_AND_EXPR:
3380     case TRUTH_OR_EXPR:
3381     case TRUTH_XOR_EXPR:
3382     case TRUTH_NOT_EXPR:
3383 
3384     case LT_EXPR:
3385     case LE_EXPR:
3386     case GT_EXPR:
3387     case GE_EXPR:
3388     case EQ_EXPR:
3389     case NE_EXPR:
3390     case ORDERED_EXPR:
3391     case UNORDERED_EXPR:
3392 
3393     case UNLT_EXPR:
3394     case UNLE_EXPR:
3395     case UNGT_EXPR:
3396     case UNGE_EXPR:
3397     case UNEQ_EXPR:
3398     case LTGT_EXPR:
3399 
3400     case CONJ_EXPR:
3401 
3402     case PREDECREMENT_EXPR:
3403     case PREINCREMENT_EXPR:
3404     case POSTDECREMENT_EXPR:
3405     case POSTINCREMENT_EXPR:
3406 
3407     case REALIGN_LOAD_EXPR:
3408 
3409     case REDUC_MAX_EXPR:
3410     case REDUC_MIN_EXPR:
3411     case REDUC_PLUS_EXPR:
3412     case WIDEN_SUM_EXPR:
3413     case WIDEN_MULT_EXPR:
3414     case DOT_PROD_EXPR:
3415     case WIDEN_MULT_PLUS_EXPR:
3416     case WIDEN_MULT_MINUS_EXPR:
3417     case WIDEN_LSHIFT_EXPR:
3418 
3419     case VEC_WIDEN_MULT_HI_EXPR:
3420     case VEC_WIDEN_MULT_LO_EXPR:
3421     case VEC_WIDEN_MULT_EVEN_EXPR:
3422     case VEC_WIDEN_MULT_ODD_EXPR:
3423     case VEC_UNPACK_HI_EXPR:
3424     case VEC_UNPACK_LO_EXPR:
3425     case VEC_UNPACK_FLOAT_HI_EXPR:
3426     case VEC_UNPACK_FLOAT_LO_EXPR:
3427     case VEC_PACK_TRUNC_EXPR:
3428     case VEC_PACK_SAT_EXPR:
3429     case VEC_PACK_FIX_TRUNC_EXPR:
3430     case VEC_WIDEN_LSHIFT_HI_EXPR:
3431     case VEC_WIDEN_LSHIFT_LO_EXPR:
3432 
3433       return 1;
3434 
3435     /* Few special cases of expensive operations.  This is useful
3436        to avoid inlining on functions having too many of these.  */
3437     case TRUNC_DIV_EXPR:
3438     case CEIL_DIV_EXPR:
3439     case FLOOR_DIV_EXPR:
3440     case ROUND_DIV_EXPR:
3441     case EXACT_DIV_EXPR:
3442     case TRUNC_MOD_EXPR:
3443     case CEIL_MOD_EXPR:
3444     case FLOOR_MOD_EXPR:
3445     case ROUND_MOD_EXPR:
3446     case RDIV_EXPR:
3447       if (TREE_CODE (op2) != INTEGER_CST)
3448         return weights->div_mod_cost;
3449       return 1;
3450 
3451     default:
3452       /* We expect a copy assignment with no operator.  */
3453       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3454       return 0;
3455     }
3456 }
3457 
3458 
3459 /* Estimate number of instructions that will be created by expanding
3460    the statements in the statement sequence STMTS.
3461    WEIGHTS contains weights attributed to various constructs.  */
3462 
3463 static
3464 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3465 {
3466   int cost;
3467   gimple_stmt_iterator gsi;
3468 
3469   cost = 0;
3470   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3471     cost += estimate_num_insns (gsi_stmt (gsi), weights);
3472 
3473   return cost;
3474 }
3475 
3476 
3477 /* Estimate number of instructions that will be created by expanding STMT.
3478    WEIGHTS contains weights attributed to various constructs.  */
3479 
3480 int
3481 estimate_num_insns (gimple stmt, eni_weights *weights)
3482 {
3483   unsigned cost, i;
3484   enum gimple_code code = gimple_code (stmt);
3485   tree lhs;
3486   tree rhs;
3487 
3488   switch (code)
3489     {
3490     case GIMPLE_ASSIGN:
3491       /* Try to estimate the cost of assignments.  We have three cases to
3492 	 deal with:
3493 	 1) Simple assignments to registers;
3494 	 2) Stores to things that must live in memory.  This includes
3495 	    "normal" stores to scalars, but also assignments of large
3496 	    structures, or constructors of big arrays;
3497 
3498 	 Let us look at the first two cases, assuming we have "a = b + C":
3499 	 <GIMPLE_ASSIGN <var_decl "a">
3500 	        <plus_expr <var_decl "b"> <constant C>>
3501 	 If "a" is a GIMPLE register, the assignment to it is free on almost
3502 	 any target, because "a" usually ends up in a real register.  Hence
3503 	 the only cost of this expression comes from the PLUS_EXPR, and we
3504 	 can ignore the GIMPLE_ASSIGN.
3505 	 If "a" is not a GIMPLE register, the assignment to "a" will most
3506 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3507 	 of moving something into "a", which we compute using the function
3508 	 estimate_move_cost.  */
3509       if (gimple_clobber_p (stmt))
3510 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
3511 
3512       lhs = gimple_assign_lhs (stmt);
3513       rhs = gimple_assign_rhs1 (stmt);
3514 
3515       cost = 0;
3516 
3517       /* Account for the cost of moving to / from memory.  */
3518       if (gimple_store_p (stmt))
3519 	cost += estimate_move_cost (TREE_TYPE (lhs));
3520       if (gimple_assign_load_p (stmt))
3521 	cost += estimate_move_cost (TREE_TYPE (rhs));
3522 
3523       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3524       				      gimple_assign_rhs1 (stmt),
3525 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3526 				      == GIMPLE_BINARY_RHS
3527 				      ? gimple_assign_rhs2 (stmt) : NULL);
3528       break;
3529 
3530     case GIMPLE_COND:
3531       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3532       				         gimple_op (stmt, 0),
3533 				         gimple_op (stmt, 1));
3534       break;
3535 
3536     case GIMPLE_SWITCH:
3537       /* Take into account cost of the switch + guess 2 conditional jumps for
3538          each case label.
3539 
3540 	 TODO: once the switch expansion logic is sufficiently separated, we can
3541 	 do better job on estimating cost of the switch.  */
3542       if (weights->time_based)
3543         cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3544       else
3545         cost = gimple_switch_num_labels (stmt) * 2;
3546       break;
3547 
3548     case GIMPLE_CALL:
3549       {
3550 	tree decl = gimple_call_fndecl (stmt);
3551 	struct cgraph_node *node = NULL;
3552 
3553 	/* Do not special case builtins where we see the body.
3554 	   This just confuse inliner.  */
3555 	if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3556 	  ;
3557 	/* For buitins that are likely expanded to nothing or
3558 	   inlined do not account operand costs.  */
3559 	else if (is_simple_builtin (decl))
3560 	  return 0;
3561 	else if (is_inexpensive_builtin (decl))
3562 	  return weights->target_builtin_call_cost;
3563 	else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3564 	  {
3565 	    /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3566 	       specialize the cheap expansion we do here.
3567 	       ???  This asks for a more general solution.  */
3568 	    switch (DECL_FUNCTION_CODE (decl))
3569 	      {
3570 		case BUILT_IN_POW:
3571 		case BUILT_IN_POWF:
3572 		case BUILT_IN_POWL:
3573 		  if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3574 		      && REAL_VALUES_EQUAL
3575 			   (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3576 		    return estimate_operator_cost (MULT_EXPR, weights,
3577 						   gimple_call_arg (stmt, 0),
3578 						   gimple_call_arg (stmt, 0));
3579 		  break;
3580 
3581 		default:
3582 		  break;
3583 	      }
3584 	  }
3585 
3586 	cost = node ? weights->call_cost : weights->indirect_call_cost;
3587 	if (gimple_call_lhs (stmt))
3588 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3589 	for (i = 0; i < gimple_call_num_args (stmt); i++)
3590 	  {
3591 	    tree arg = gimple_call_arg (stmt, i);
3592 	    cost += estimate_move_cost (TREE_TYPE (arg));
3593 	  }
3594 	break;
3595       }
3596 
3597     case GIMPLE_RETURN:
3598       return weights->return_cost;
3599 
3600     case GIMPLE_GOTO:
3601     case GIMPLE_LABEL:
3602     case GIMPLE_NOP:
3603     case GIMPLE_PHI:
3604     case GIMPLE_PREDICT:
3605     case GIMPLE_DEBUG:
3606       return 0;
3607 
3608     case GIMPLE_ASM:
3609       return asm_str_count (gimple_asm_string (stmt));
3610 
3611     case GIMPLE_RESX:
3612       /* This is either going to be an external function call with one
3613 	 argument, or two register copy statements plus a goto.  */
3614       return 2;
3615 
3616     case GIMPLE_EH_DISPATCH:
3617       /* ??? This is going to turn into a switch statement.  Ideally
3618 	 we'd have a look at the eh region and estimate the number of
3619 	 edges involved.  */
3620       return 10;
3621 
3622     case GIMPLE_BIND:
3623       return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3624 
3625     case GIMPLE_EH_FILTER:
3626       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3627 
3628     case GIMPLE_CATCH:
3629       return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3630 
3631     case GIMPLE_TRY:
3632       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3633               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3634 
3635     /* OpenMP directives are generally very expensive.  */
3636 
3637     case GIMPLE_OMP_RETURN:
3638     case GIMPLE_OMP_SECTIONS_SWITCH:
3639     case GIMPLE_OMP_ATOMIC_STORE:
3640     case GIMPLE_OMP_CONTINUE:
3641       /* ...except these, which are cheap.  */
3642       return 0;
3643 
3644     case GIMPLE_OMP_ATOMIC_LOAD:
3645       return weights->omp_cost;
3646 
3647     case GIMPLE_OMP_FOR:
3648       return (weights->omp_cost
3649               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3650               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3651 
3652     case GIMPLE_OMP_PARALLEL:
3653     case GIMPLE_OMP_TASK:
3654     case GIMPLE_OMP_CRITICAL:
3655     case GIMPLE_OMP_MASTER:
3656     case GIMPLE_OMP_ORDERED:
3657     case GIMPLE_OMP_SECTION:
3658     case GIMPLE_OMP_SECTIONS:
3659     case GIMPLE_OMP_SINGLE:
3660       return (weights->omp_cost
3661               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3662 
3663     case GIMPLE_TRANSACTION:
3664       return (weights->tm_cost
3665 	      + estimate_num_insns_seq (gimple_transaction_body (stmt),
3666 					weights));
3667 
3668     default:
3669       gcc_unreachable ();
3670     }
3671 
3672   return cost;
3673 }
3674 
3675 /* Estimate number of instructions that will be created by expanding
3676    function FNDECL.  WEIGHTS contains weights attributed to various
3677    constructs.  */
3678 
3679 int
3680 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3681 {
3682   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3683   gimple_stmt_iterator bsi;
3684   basic_block bb;
3685   int n = 0;
3686 
3687   gcc_assert (my_function && my_function->cfg);
3688   FOR_EACH_BB_FN (bb, my_function)
3689     {
3690       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3691 	n += estimate_num_insns (gsi_stmt (bsi), weights);
3692     }
3693 
3694   return n;
3695 }
3696 
3697 
3698 /* Initializes weights used by estimate_num_insns.  */
3699 
3700 void
3701 init_inline_once (void)
3702 {
3703   eni_size_weights.call_cost = 1;
3704   eni_size_weights.indirect_call_cost = 3;
3705   eni_size_weights.target_builtin_call_cost = 1;
3706   eni_size_weights.div_mod_cost = 1;
3707   eni_size_weights.omp_cost = 40;
3708   eni_size_weights.tm_cost = 10;
3709   eni_size_weights.time_based = false;
3710   eni_size_weights.return_cost = 1;
3711 
3712   /* Estimating time for call is difficult, since we have no idea what the
3713      called function does.  In the current uses of eni_time_weights,
3714      underestimating the cost does less harm than overestimating it, so
3715      we choose a rather small value here.  */
3716   eni_time_weights.call_cost = 10;
3717   eni_time_weights.indirect_call_cost = 15;
3718   eni_time_weights.target_builtin_call_cost = 1;
3719   eni_time_weights.div_mod_cost = 10;
3720   eni_time_weights.omp_cost = 40;
3721   eni_time_weights.tm_cost = 40;
3722   eni_time_weights.time_based = true;
3723   eni_time_weights.return_cost = 2;
3724 }
3725 
3726 /* Estimate the number of instructions in a gimple_seq. */
3727 
3728 int
3729 count_insns_seq (gimple_seq seq, eni_weights *weights)
3730 {
3731   gimple_stmt_iterator gsi;
3732   int n = 0;
3733   for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3734     n += estimate_num_insns (gsi_stmt (gsi), weights);
3735 
3736   return n;
3737 }
3738 
3739 
3740 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
3741 
3742 static void
3743 prepend_lexical_block (tree current_block, tree new_block)
3744 {
3745   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3746   BLOCK_SUBBLOCKS (current_block) = new_block;
3747   BLOCK_SUPERCONTEXT (new_block) = current_block;
3748 }
3749 
3750 /* Add local variables from CALLEE to CALLER.  */
3751 
3752 static inline void
3753 add_local_variables (struct function *callee, struct function *caller,
3754 		     copy_body_data *id)
3755 {
3756   tree var;
3757   unsigned ix;
3758 
3759   FOR_EACH_LOCAL_DECL (callee, ix, var)
3760     if (!can_be_nonlocal (var, id))
3761       {
3762         tree new_var = remap_decl (var, id);
3763 
3764         /* Remap debug-expressions.  */
3765 	if (TREE_CODE (new_var) == VAR_DECL
3766 	    && DECL_DEBUG_EXPR_IS_FROM (new_var)
3767 	    && new_var != var)
3768 	  {
3769 	    tree tem = DECL_DEBUG_EXPR (var);
3770 	    bool old_regimplify = id->regimplify;
3771 	    id->remapping_type_depth++;
3772 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
3773 	    id->remapping_type_depth--;
3774 	    id->regimplify = old_regimplify;
3775 	    SET_DECL_DEBUG_EXPR (new_var, tem);
3776 	  }
3777 	add_local_decl (caller, new_var);
3778       }
3779 }
3780 
3781 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
3782 
3783 static bool
3784 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3785 {
3786   tree use_retvar;
3787   tree fn;
3788   struct pointer_map_t *st, *dst;
3789   tree return_slot;
3790   tree modify_dest;
3791   location_t saved_location;
3792   struct cgraph_edge *cg_edge;
3793   cgraph_inline_failed_t reason;
3794   basic_block return_block;
3795   edge e;
3796   gimple_stmt_iterator gsi, stmt_gsi;
3797   bool successfully_inlined = FALSE;
3798   bool purge_dead_abnormal_edges;
3799 
3800   /* Set input_location here so we get the right instantiation context
3801      if we call instantiate_decl from inlinable_function_p.  */
3802   /* FIXME: instantiate_decl isn't called by inlinable_function_p.  */
3803   saved_location = input_location;
3804   input_location = gimple_location (stmt);
3805 
3806   /* From here on, we're only interested in CALL_EXPRs.  */
3807   if (gimple_code (stmt) != GIMPLE_CALL)
3808     goto egress;
3809 
3810   cg_edge = cgraph_edge (id->dst_node, stmt);
3811   gcc_checking_assert (cg_edge);
3812   /* First, see if we can figure out what function is being called.
3813      If we cannot, then there is no hope of inlining the function.  */
3814   if (cg_edge->indirect_unknown_callee)
3815     goto egress;
3816   fn = cg_edge->callee->symbol.decl;
3817   gcc_checking_assert (fn);
3818 
3819   /* If FN is a declaration of a function in a nested scope that was
3820      globally declared inline, we don't set its DECL_INITIAL.
3821      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3822      C++ front-end uses it for cdtors to refer to their internal
3823      declarations, that are not real functions.  Fortunately those
3824      don't have trees to be saved, so we can tell by checking their
3825      gimple_body.  */
3826   if (!DECL_INITIAL (fn)
3827       && DECL_ABSTRACT_ORIGIN (fn)
3828       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3829     fn = DECL_ABSTRACT_ORIGIN (fn);
3830 
3831   /* Don't try to inline functions that are not well-suited to inlining.  */
3832   if (cg_edge->inline_failed)
3833     {
3834       reason = cg_edge->inline_failed;
3835       /* If this call was originally indirect, we do not want to emit any
3836 	 inlining related warnings or sorry messages because there are no
3837 	 guarantees regarding those.  */
3838       if (cg_edge->indirect_inlining_edge)
3839 	goto egress;
3840 
3841       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3842           /* For extern inline functions that get redefined we always
3843 	     silently ignored always_inline flag. Better behaviour would
3844 	     be to be able to keep both bodies and use extern inline body
3845 	     for inlining, but we can't do that because frontends overwrite
3846 	     the body.  */
3847 	  && !cg_edge->callee->local.redefined_extern_inline
3848 	  /* Avoid warnings during early inline pass. */
3849 	  && cgraph_global_info_ready
3850 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
3851 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3852 	{
3853 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
3854 		 cgraph_inline_failed_string (reason));
3855 	  error ("called from here");
3856 	}
3857       else if (warn_inline
3858 	       && DECL_DECLARED_INLINE_P (fn)
3859 	       && !DECL_NO_INLINE_WARNING_P (fn)
3860 	       && !DECL_IN_SYSTEM_HEADER (fn)
3861 	       && reason != CIF_UNSPECIFIED
3862 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3863 	       /* Do not warn about not inlined recursive calls.  */
3864 	       && !cgraph_edge_recursive_p (cg_edge)
3865 	       /* Avoid warnings during early inline pass. */
3866 	       && cgraph_global_info_ready)
3867 	{
3868 	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3869 		   fn, _(cgraph_inline_failed_string (reason)));
3870 	  warning (OPT_Winline, "called from here");
3871 	}
3872       goto egress;
3873     }
3874   fn = cg_edge->callee->symbol.decl;
3875 
3876 #ifdef ENABLE_CHECKING
3877   if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
3878     verify_cgraph_node (cg_edge->callee);
3879 #endif
3880 
3881   /* We will be inlining this callee.  */
3882   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3883 
3884   /* Update the callers EH personality.  */
3885   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
3886     DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
3887       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
3888 
3889   /* Split the block holding the GIMPLE_CALL.  */
3890   e = split_block (bb, stmt);
3891   bb = e->src;
3892   return_block = e->dest;
3893   remove_edge (e);
3894 
3895   /* split_block splits after the statement; work around this by
3896      moving the call into the second block manually.  Not pretty,
3897      but seems easier than doing the CFG manipulation by hand
3898      when the GIMPLE_CALL is in the last statement of BB.  */
3899   stmt_gsi = gsi_last_bb (bb);
3900   gsi_remove (&stmt_gsi, false);
3901 
3902   /* If the GIMPLE_CALL was in the last statement of BB, it may have
3903      been the source of abnormal edges.  In this case, schedule
3904      the removal of dead abnormal edges.  */
3905   gsi = gsi_start_bb (return_block);
3906   if (gsi_end_p (gsi))
3907     {
3908       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3909       purge_dead_abnormal_edges = true;
3910     }
3911   else
3912     {
3913       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3914       purge_dead_abnormal_edges = false;
3915     }
3916 
3917   stmt_gsi = gsi_start_bb (return_block);
3918 
3919   /* Build a block containing code to initialize the arguments, the
3920      actual inline expansion of the body, and a label for the return
3921      statements within the function to jump to.  The type of the
3922      statement expression is the return type of the function call.
3923      ???  If the call does not have an associated block then we will
3924      remap all callee blocks to NULL, effectively dropping most of
3925      its debug information.  This should only happen for calls to
3926      artificial decls inserted by the compiler itself.  We need to
3927      either link the inlined blocks into the caller block tree or
3928      not refer to them in any way to not break GC for locations.  */
3929   if (gimple_block (stmt))
3930     {
3931       id->block = make_node (BLOCK);
3932       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3933       BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
3934       prepend_lexical_block (gimple_block (stmt), id->block);
3935     }
3936 
3937   /* Local declarations will be replaced by their equivalents in this
3938      map.  */
3939   st = id->decl_map;
3940   id->decl_map = pointer_map_create ();
3941   dst = id->debug_map;
3942   id->debug_map = NULL;
3943 
3944   /* Record the function we are about to inline.  */
3945   id->src_fn = fn;
3946   id->src_node = cg_edge->callee;
3947   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3948   id->gimple_call = stmt;
3949 
3950   gcc_assert (!id->src_cfun->after_inlining);
3951 
3952   id->entry_bb = bb;
3953   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3954     {
3955       gimple_stmt_iterator si = gsi_last_bb (bb);
3956       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3957       						   NOT_TAKEN),
3958 			GSI_NEW_STMT);
3959     }
3960   initialize_inlined_parameters (id, stmt, fn, bb);
3961 
3962   if (DECL_INITIAL (fn))
3963     {
3964       if (gimple_block (stmt))
3965 	{
3966 	  tree *var;
3967 
3968 	  prepend_lexical_block (id->block,
3969 				 remap_blocks (DECL_INITIAL (fn), id));
3970 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
3971 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
3972 				   == NULL_TREE));
3973 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
3974 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
3975 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
3976 	     under it.  The parameters can be then evaluated in the debugger,
3977 	     but don't show in backtraces.  */
3978 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
3979 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
3980 	      {
3981 		tree v = *var;
3982 		*var = TREE_CHAIN (v);
3983 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
3984 		BLOCK_VARS (id->block) = v;
3985 	      }
3986 	    else
3987 	      var = &TREE_CHAIN (*var);
3988 	}
3989       else
3990 	remap_blocks_to_null (DECL_INITIAL (fn), id);
3991     }
3992 
3993   /* Return statements in the function body will be replaced by jumps
3994      to the RET_LABEL.  */
3995   gcc_assert (DECL_INITIAL (fn));
3996   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3997 
3998   /* Find the LHS to which the result of this call is assigned.  */
3999   return_slot = NULL;
4000   if (gimple_call_lhs (stmt))
4001     {
4002       modify_dest = gimple_call_lhs (stmt);
4003 
4004       /* The function which we are inlining might not return a value,
4005 	 in which case we should issue a warning that the function
4006 	 does not return a value.  In that case the optimizers will
4007 	 see that the variable to which the value is assigned was not
4008 	 initialized.  We do not want to issue a warning about that
4009 	 uninitialized variable.  */
4010       if (DECL_P (modify_dest))
4011 	TREE_NO_WARNING (modify_dest) = 1;
4012 
4013       if (gimple_call_return_slot_opt_p (stmt))
4014 	{
4015 	  return_slot = modify_dest;
4016 	  modify_dest = NULL;
4017 	}
4018     }
4019   else
4020     modify_dest = NULL;
4021 
4022   /* If we are inlining a call to the C++ operator new, we don't want
4023      to use type based alias analysis on the return value.  Otherwise
4024      we may get confused if the compiler sees that the inlined new
4025      function returns a pointer which was just deleted.  See bug
4026      33407.  */
4027   if (DECL_IS_OPERATOR_NEW (fn))
4028     {
4029       return_slot = NULL;
4030       modify_dest = NULL;
4031     }
4032 
4033   /* Declare the return variable for the function.  */
4034   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4035 
4036   /* Add local vars in this inlined callee to caller.  */
4037   add_local_variables (id->src_cfun, cfun, id);
4038 
4039   if (dump_file && (dump_flags & TDF_DETAILS))
4040     {
4041       fprintf (dump_file, "Inlining ");
4042       print_generic_expr (dump_file, id->src_fn, 0);
4043       fprintf (dump_file, " to ");
4044       print_generic_expr (dump_file, id->dst_fn, 0);
4045       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4046     }
4047 
4048   /* This is it.  Duplicate the callee body.  Assume callee is
4049      pre-gimplified.  Note that we must not alter the caller
4050      function in any way before this point, as this CALL_EXPR may be
4051      a self-referential call; if we're calling ourselves, we need to
4052      duplicate our body before altering anything.  */
4053   copy_body (id, bb->count,
4054   	     cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
4055 	     bb, return_block, NULL, NULL);
4056 
4057   /* Reset the escaped solution.  */
4058   if (cfun->gimple_df)
4059     pt_solution_reset (&cfun->gimple_df->escaped);
4060 
4061   /* Clean up.  */
4062   if (id->debug_map)
4063     {
4064       pointer_map_destroy (id->debug_map);
4065       id->debug_map = dst;
4066     }
4067   pointer_map_destroy (id->decl_map);
4068   id->decl_map = st;
4069 
4070   /* Unlink the calls virtual operands before replacing it.  */
4071   unlink_stmt_vdef (stmt);
4072 
4073   /* If the inlined function returns a result that we care about,
4074      substitute the GIMPLE_CALL with an assignment of the return
4075      variable to the LHS of the call.  That is, if STMT was
4076      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4077   if (use_retvar && gimple_call_lhs (stmt))
4078     {
4079       gimple old_stmt = stmt;
4080       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4081       gsi_replace (&stmt_gsi, stmt, false);
4082       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4083     }
4084   else
4085     {
4086       /* Handle the case of inlining a function with no return
4087 	 statement, which causes the return value to become undefined.  */
4088       if (gimple_call_lhs (stmt)
4089 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4090 	{
4091 	  tree name = gimple_call_lhs (stmt);
4092 	  tree var = SSA_NAME_VAR (name);
4093 	  tree def = ssa_default_def (cfun, var);
4094 
4095 	  if (def)
4096 	    {
4097 	      /* If the variable is used undefined, make this name
4098 		 undefined via a move.  */
4099 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4100 	      gsi_replace (&stmt_gsi, stmt, true);
4101 	    }
4102 	  else
4103 	    {
4104 	      /* Otherwise make this variable undefined.  */
4105 	      gsi_remove (&stmt_gsi, true);
4106 	      set_ssa_default_def (cfun, var, name);
4107 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4108 	    }
4109 	}
4110       else
4111         gsi_remove (&stmt_gsi, true);
4112     }
4113 
4114   if (purge_dead_abnormal_edges)
4115     {
4116       gimple_purge_dead_eh_edges (return_block);
4117       gimple_purge_dead_abnormal_call_edges (return_block);
4118     }
4119 
4120   /* If the value of the new expression is ignored, that's OK.  We
4121      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4122      the equivalent inlined version either.  */
4123   if (is_gimple_assign (stmt))
4124     {
4125       gcc_assert (gimple_assign_single_p (stmt)
4126 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4127       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4128     }
4129 
4130   /* Output the inlining info for this abstract function, since it has been
4131      inlined.  If we don't do this now, we can lose the information about the
4132      variables in the function when the blocks get blown away as soon as we
4133      remove the cgraph node.  */
4134   if (gimple_block (stmt))
4135     (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
4136 
4137   /* Update callgraph if needed.  */
4138   cgraph_remove_node (cg_edge->callee);
4139 
4140   id->block = NULL_TREE;
4141   successfully_inlined = TRUE;
4142 
4143  egress:
4144   input_location = saved_location;
4145   return successfully_inlined;
4146 }
4147 
4148 /* Expand call statements reachable from STMT_P.
4149    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4150    in a MODIFY_EXPR.  */
4151 
4152 static bool
4153 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4154 {
4155   gimple_stmt_iterator gsi;
4156 
4157   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4158     {
4159       gimple stmt = gsi_stmt (gsi);
4160 
4161       if (is_gimple_call (stmt)
4162 	  && expand_call_inline (bb, stmt, id))
4163 	return true;
4164     }
4165 
4166   return false;
4167 }
4168 
4169 
4170 /* Walk all basic blocks created after FIRST and try to fold every statement
4171    in the STATEMENTS pointer set.  */
4172 
4173 static void
4174 fold_marked_statements (int first, struct pointer_set_t *statements)
4175 {
4176   for (; first < n_basic_blocks; first++)
4177     if (BASIC_BLOCK (first))
4178       {
4179         gimple_stmt_iterator gsi;
4180 
4181 	for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4182 	     !gsi_end_p (gsi);
4183 	     gsi_next (&gsi))
4184 	  if (pointer_set_contains (statements, gsi_stmt (gsi)))
4185 	    {
4186 	      gimple old_stmt = gsi_stmt (gsi);
4187 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4188 
4189 	      if (old_decl && DECL_BUILT_IN (old_decl))
4190 		{
4191 		  /* Folding builtins can create multiple instructions,
4192 		     we need to look at all of them.  */
4193 		  gimple_stmt_iterator i2 = gsi;
4194 		  gsi_prev (&i2);
4195 		  if (fold_stmt (&gsi))
4196 		    {
4197 		      gimple new_stmt;
4198 		      /* If a builtin at the end of a bb folded into nothing,
4199 			 the following loop won't work.  */
4200 		      if (gsi_end_p (gsi))
4201 			{
4202 			  cgraph_update_edges_for_call_stmt (old_stmt,
4203 							     old_decl, NULL);
4204 			  break;
4205 			}
4206 		      if (gsi_end_p (i2))
4207 			i2 = gsi_start_bb (BASIC_BLOCK (first));
4208 		      else
4209 			gsi_next (&i2);
4210 		      while (1)
4211 			{
4212 			  new_stmt = gsi_stmt (i2);
4213 			  update_stmt (new_stmt);
4214 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4215 							     new_stmt);
4216 
4217 			  if (new_stmt == gsi_stmt (gsi))
4218 			    {
4219 			      /* It is okay to check only for the very last
4220 				 of these statements.  If it is a throwing
4221 				 statement nothing will change.  If it isn't
4222 				 this can remove EH edges.  If that weren't
4223 				 correct then because some intermediate stmts
4224 				 throw, but not the last one.  That would mean
4225 				 we'd have to split the block, which we can't
4226 				 here and we'd loose anyway.  And as builtins
4227 				 probably never throw, this all
4228 				 is mood anyway.  */
4229 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
4230 								  new_stmt))
4231 				gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4232 			      break;
4233 			    }
4234 			  gsi_next (&i2);
4235 			}
4236 		    }
4237 		}
4238 	      else if (fold_stmt (&gsi))
4239 		{
4240 		  /* Re-read the statement from GSI as fold_stmt() may
4241 		     have changed it.  */
4242 		  gimple new_stmt = gsi_stmt (gsi);
4243 		  update_stmt (new_stmt);
4244 
4245 		  if (is_gimple_call (old_stmt)
4246 		      || is_gimple_call (new_stmt))
4247 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4248 						       new_stmt);
4249 
4250 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4251 		    gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4252 		}
4253 	    }
4254       }
4255 }
4256 
4257 /* Return true if BB has at least one abnormal outgoing edge.  */
4258 
4259 static inline bool
4260 has_abnormal_outgoing_edge_p (basic_block bb)
4261 {
4262   edge e;
4263   edge_iterator ei;
4264 
4265   FOR_EACH_EDGE (e, ei, bb->succs)
4266     if (e->flags & EDGE_ABNORMAL)
4267       return true;
4268 
4269   return false;
4270 }
4271 
4272 /* Expand calls to inline functions in the body of FN.  */
4273 
4274 unsigned int
4275 optimize_inline_calls (tree fn)
4276 {
4277   copy_body_data id;
4278   basic_block bb;
4279   int last = n_basic_blocks;
4280   struct gimplify_ctx gctx;
4281   bool inlined_p = false;
4282 
4283   /* Clear out ID.  */
4284   memset (&id, 0, sizeof (id));
4285 
4286   id.src_node = id.dst_node = cgraph_get_node (fn);
4287   gcc_assert (id.dst_node->analyzed);
4288   id.dst_fn = fn;
4289   /* Or any functions that aren't finished yet.  */
4290   if (current_function_decl)
4291     id.dst_fn = current_function_decl;
4292 
4293   id.copy_decl = copy_decl_maybe_to_var;
4294   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4295   id.transform_new_cfg = false;
4296   id.transform_return_to_modify = true;
4297   id.transform_lang_insert_block = NULL;
4298   id.statements_to_fold = pointer_set_create ();
4299 
4300   push_gimplify_context (&gctx);
4301 
4302   /* We make no attempts to keep dominance info up-to-date.  */
4303   free_dominance_info (CDI_DOMINATORS);
4304   free_dominance_info (CDI_POST_DOMINATORS);
4305 
4306   /* Register specific gimple functions.  */
4307   gimple_register_cfg_hooks ();
4308 
4309   /* Reach the trees by walking over the CFG, and note the
4310      enclosing basic-blocks in the call edges.  */
4311   /* We walk the blocks going forward, because inlined function bodies
4312      will split id->current_basic_block, and the new blocks will
4313      follow it; we'll trudge through them, processing their CALL_EXPRs
4314      along the way.  */
4315   FOR_EACH_BB (bb)
4316     inlined_p |= gimple_expand_calls_inline (bb, &id);
4317 
4318   pop_gimplify_context (NULL);
4319 
4320 #ifdef ENABLE_CHECKING
4321     {
4322       struct cgraph_edge *e;
4323 
4324       verify_cgraph_node (id.dst_node);
4325 
4326       /* Double check that we inlined everything we are supposed to inline.  */
4327       for (e = id.dst_node->callees; e; e = e->next_callee)
4328 	gcc_assert (e->inline_failed);
4329     }
4330 #endif
4331 
4332   /* Fold queued statements.  */
4333   fold_marked_statements (last, id.statements_to_fold);
4334   pointer_set_destroy (id.statements_to_fold);
4335 
4336   gcc_assert (!id.debug_stmts.exists ());
4337 
4338   /* If we didn't inline into the function there is nothing to do.  */
4339   if (!inlined_p)
4340     return 0;
4341 
4342   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4343   number_blocks (fn);
4344 
4345   delete_unreachable_blocks_update_callgraph (&id);
4346 #ifdef ENABLE_CHECKING
4347   verify_cgraph_node (id.dst_node);
4348 #endif
4349 
4350   /* It would be nice to check SSA/CFG/statement consistency here, but it is
4351      not possible yet - the IPA passes might make various functions to not
4352      throw and they don't care to proactively update local EH info.  This is
4353      done later in fixup_cfg pass that also execute the verification.  */
4354   return (TODO_update_ssa
4355 	  | TODO_cleanup_cfg
4356 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4357 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4358 	  | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4359 }
4360 
4361 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
4362 
4363 tree
4364 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4365 {
4366   enum tree_code code = TREE_CODE (*tp);
4367   enum tree_code_class cl = TREE_CODE_CLASS (code);
4368 
4369   /* We make copies of most nodes.  */
4370   if (IS_EXPR_CODE_CLASS (cl)
4371       || code == TREE_LIST
4372       || code == TREE_VEC
4373       || code == TYPE_DECL
4374       || code == OMP_CLAUSE)
4375     {
4376       /* Because the chain gets clobbered when we make a copy, we save it
4377 	 here.  */
4378       tree chain = NULL_TREE, new_tree;
4379 
4380       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4381 	chain = TREE_CHAIN (*tp);
4382 
4383       /* Copy the node.  */
4384       new_tree = copy_node (*tp);
4385 
4386       /* Propagate mudflap marked-ness.  */
4387       if (flag_mudflap && mf_marked_p (*tp))
4388         mf_mark (new_tree);
4389 
4390       *tp = new_tree;
4391 
4392       /* Now, restore the chain, if appropriate.  That will cause
4393 	 walk_tree to walk into the chain as well.  */
4394       if (code == PARM_DECL
4395 	  || code == TREE_LIST
4396 	  || code == OMP_CLAUSE)
4397 	TREE_CHAIN (*tp) = chain;
4398 
4399       /* For now, we don't update BLOCKs when we make copies.  So, we
4400 	 have to nullify all BIND_EXPRs.  */
4401       if (TREE_CODE (*tp) == BIND_EXPR)
4402 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4403     }
4404   else if (code == CONSTRUCTOR)
4405     {
4406       /* CONSTRUCTOR nodes need special handling because
4407          we need to duplicate the vector of elements.  */
4408       tree new_tree;
4409 
4410       new_tree = copy_node (*tp);
4411 
4412       /* Propagate mudflap marked-ness.  */
4413       if (flag_mudflap && mf_marked_p (*tp))
4414         mf_mark (new_tree);
4415 
4416       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4417       *tp = new_tree;
4418     }
4419   else if (code == STATEMENT_LIST)
4420     /* We used to just abort on STATEMENT_LIST, but we can run into them
4421        with statement-expressions (c++/40975).  */
4422     copy_statement_list (tp);
4423   else if (TREE_CODE_CLASS (code) == tcc_type)
4424     *walk_subtrees = 0;
4425   else if (TREE_CODE_CLASS (code) == tcc_declaration)
4426     *walk_subtrees = 0;
4427   else if (TREE_CODE_CLASS (code) == tcc_constant)
4428     *walk_subtrees = 0;
4429   return NULL_TREE;
4430 }
4431 
4432 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
4433    information indicating to what new SAVE_EXPR this one should be mapped,
4434    use that one.  Otherwise, create a new node and enter it in ST.  FN is
4435    the function into which the copy will be placed.  */
4436 
4437 static void
4438 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4439 {
4440   struct pointer_map_t *st = (struct pointer_map_t *) st_;
4441   tree *n;
4442   tree t;
4443 
4444   /* See if we already encountered this SAVE_EXPR.  */
4445   n = (tree *) pointer_map_contains (st, *tp);
4446 
4447   /* If we didn't already remap this SAVE_EXPR, do so now.  */
4448   if (!n)
4449     {
4450       t = copy_node (*tp);
4451 
4452       /* Remember this SAVE_EXPR.  */
4453       *pointer_map_insert (st, *tp) = t;
4454       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
4455       *pointer_map_insert (st, t) = t;
4456     }
4457   else
4458     {
4459       /* We've already walked into this SAVE_EXPR; don't do it again.  */
4460       *walk_subtrees = 0;
4461       t = *n;
4462     }
4463 
4464   /* Replace this SAVE_EXPR with the copy.  */
4465   *tp = t;
4466 }
4467 
4468 /* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
4469    copies the declaration and enters it in the splay_tree in DATA (which is
4470    really an `copy_body_data *').  */
4471 
4472 static tree
4473 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4474 			void *data)
4475 {
4476   copy_body_data *id = (copy_body_data *) data;
4477 
4478   /* Don't walk into types.  */
4479   if (TYPE_P (*tp))
4480     *walk_subtrees = 0;
4481 
4482   else if (TREE_CODE (*tp) == LABEL_EXPR)
4483     {
4484       tree decl = TREE_OPERAND (*tp, 0);
4485 
4486       /* Copy the decl and remember the copy.  */
4487       insert_decl_map (id, decl, id->copy_decl (decl, id));
4488     }
4489 
4490   return NULL_TREE;
4491 }
4492 
4493 /* Perform any modifications to EXPR required when it is unsaved.  Does
4494    not recurse into EXPR's subtrees.  */
4495 
4496 static void
4497 unsave_expr_1 (tree expr)
4498 {
4499   switch (TREE_CODE (expr))
4500     {
4501     case TARGET_EXPR:
4502       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4503          It's OK for this to happen if it was part of a subtree that
4504          isn't immediately expanded, such as operand 2 of another
4505          TARGET_EXPR.  */
4506       if (TREE_OPERAND (expr, 1))
4507 	break;
4508 
4509       TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4510       TREE_OPERAND (expr, 3) = NULL_TREE;
4511       break;
4512 
4513     default:
4514       break;
4515     }
4516 }
4517 
4518 /* Called via walk_tree when an expression is unsaved.  Using the
4519    splay_tree pointed to by ST (which is really a `splay_tree'),
4520    remaps all local declarations to appropriate replacements.  */
4521 
4522 static tree
4523 unsave_r (tree *tp, int *walk_subtrees, void *data)
4524 {
4525   copy_body_data *id = (copy_body_data *) data;
4526   struct pointer_map_t *st = id->decl_map;
4527   tree *n;
4528 
4529   /* Only a local declaration (variable or label).  */
4530   if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4531       || TREE_CODE (*tp) == LABEL_DECL)
4532     {
4533       /* Lookup the declaration.  */
4534       n = (tree *) pointer_map_contains (st, *tp);
4535 
4536       /* If it's there, remap it.  */
4537       if (n)
4538 	*tp = *n;
4539     }
4540 
4541   else if (TREE_CODE (*tp) == STATEMENT_LIST)
4542     gcc_unreachable ();
4543   else if (TREE_CODE (*tp) == BIND_EXPR)
4544     copy_bind_expr (tp, walk_subtrees, id);
4545   else if (TREE_CODE (*tp) == SAVE_EXPR
4546 	   || TREE_CODE (*tp) == TARGET_EXPR)
4547     remap_save_expr (tp, st, walk_subtrees);
4548   else
4549     {
4550       copy_tree_r (tp, walk_subtrees, NULL);
4551 
4552       /* Do whatever unsaving is required.  */
4553       unsave_expr_1 (*tp);
4554     }
4555 
4556   /* Keep iterating.  */
4557   return NULL_TREE;
4558 }
4559 
4560 /* Copies everything in EXPR and replaces variables, labels
4561    and SAVE_EXPRs local to EXPR.  */
4562 
4563 tree
4564 unsave_expr_now (tree expr)
4565 {
4566   copy_body_data id;
4567 
4568   /* There's nothing to do for NULL_TREE.  */
4569   if (expr == 0)
4570     return expr;
4571 
4572   /* Set up ID.  */
4573   memset (&id, 0, sizeof (id));
4574   id.src_fn = current_function_decl;
4575   id.dst_fn = current_function_decl;
4576   id.decl_map = pointer_map_create ();
4577   id.debug_map = NULL;
4578 
4579   id.copy_decl = copy_decl_no_change;
4580   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4581   id.transform_new_cfg = false;
4582   id.transform_return_to_modify = false;
4583   id.transform_lang_insert_block = NULL;
4584 
4585   /* Walk the tree once to find local labels.  */
4586   walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4587 
4588   /* Walk the tree again, copying, remapping, and unsaving.  */
4589   walk_tree (&expr, unsave_r, &id, NULL);
4590 
4591   /* Clean up.  */
4592   pointer_map_destroy (id.decl_map);
4593   if (id.debug_map)
4594     pointer_map_destroy (id.debug_map);
4595 
4596   return expr;
4597 }
4598 
4599 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
4600    label, copies the declaration and enters it in the splay_tree in DATA (which
4601    is really a 'copy_body_data *'.  */
4602 
4603 static tree
4604 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4605 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
4606 		        struct walk_stmt_info *wi)
4607 {
4608   copy_body_data *id = (copy_body_data *) wi->info;
4609   gimple stmt = gsi_stmt (*gsip);
4610 
4611   if (gimple_code (stmt) == GIMPLE_LABEL)
4612     {
4613       tree decl = gimple_label_label (stmt);
4614 
4615       /* Copy the decl and remember the copy.  */
4616       insert_decl_map (id, decl, id->copy_decl (decl, id));
4617     }
4618 
4619   return NULL_TREE;
4620 }
4621 
4622 
4623 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4624    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4625    remaps all local declarations to appropriate replacements in gimple
4626    operands. */
4627 
4628 static tree
4629 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4630 {
4631   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4632   copy_body_data *id = (copy_body_data *) wi->info;
4633   struct pointer_map_t *st = id->decl_map;
4634   tree *n;
4635   tree expr = *tp;
4636 
4637   /* Only a local declaration (variable or label).  */
4638   if ((TREE_CODE (expr) == VAR_DECL
4639        && !TREE_STATIC (expr))
4640       || TREE_CODE (expr) == LABEL_DECL)
4641     {
4642       /* Lookup the declaration.  */
4643       n = (tree *) pointer_map_contains (st, expr);
4644 
4645       /* If it's there, remap it.  */
4646       if (n)
4647 	*tp = *n;
4648       *walk_subtrees = 0;
4649     }
4650   else if (TREE_CODE (expr) == STATEMENT_LIST
4651 	   || TREE_CODE (expr) == BIND_EXPR
4652 	   || TREE_CODE (expr) == SAVE_EXPR)
4653     gcc_unreachable ();
4654   else if (TREE_CODE (expr) == TARGET_EXPR)
4655     {
4656       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4657          It's OK for this to happen if it was part of a subtree that
4658          isn't immediately expanded, such as operand 2 of another
4659          TARGET_EXPR.  */
4660       if (!TREE_OPERAND (expr, 1))
4661 	{
4662 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4663 	  TREE_OPERAND (expr, 3) = NULL_TREE;
4664 	}
4665     }
4666 
4667   /* Keep iterating.  */
4668   return NULL_TREE;
4669 }
4670 
4671 
4672 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4673    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4674    remaps all local declarations to appropriate replacements in gimple
4675    statements. */
4676 
4677 static tree
4678 replace_locals_stmt (gimple_stmt_iterator *gsip,
4679 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
4680 		     struct walk_stmt_info *wi)
4681 {
4682   copy_body_data *id = (copy_body_data *) wi->info;
4683   gimple stmt = gsi_stmt (*gsip);
4684 
4685   if (gimple_code (stmt) == GIMPLE_BIND)
4686     {
4687       tree block = gimple_bind_block (stmt);
4688 
4689       if (block)
4690 	{
4691 	  remap_block (&block, id);
4692 	  gimple_bind_set_block (stmt, block);
4693 	}
4694 
4695       /* This will remap a lot of the same decls again, but this should be
4696 	 harmless.  */
4697       if (gimple_bind_vars (stmt))
4698 	gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4699 						 NULL, id));
4700     }
4701 
4702   /* Keep iterating.  */
4703   return NULL_TREE;
4704 }
4705 
4706 
4707 /* Copies everything in SEQ and replaces variables and labels local to
4708    current_function_decl.  */
4709 
4710 gimple_seq
4711 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4712 {
4713   copy_body_data id;
4714   struct walk_stmt_info wi;
4715   struct pointer_set_t *visited;
4716   gimple_seq copy;
4717 
4718   /* There's nothing to do for NULL_TREE.  */
4719   if (seq == NULL)
4720     return seq;
4721 
4722   /* Set up ID.  */
4723   memset (&id, 0, sizeof (id));
4724   id.src_fn = current_function_decl;
4725   id.dst_fn = current_function_decl;
4726   id.decl_map = pointer_map_create ();
4727   id.debug_map = NULL;
4728 
4729   id.copy_decl = copy_decl_no_change;
4730   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4731   id.transform_new_cfg = false;
4732   id.transform_return_to_modify = false;
4733   id.transform_lang_insert_block = NULL;
4734 
4735   /* Walk the tree once to find local labels.  */
4736   memset (&wi, 0, sizeof (wi));
4737   visited = pointer_set_create ();
4738   wi.info = &id;
4739   wi.pset = visited;
4740   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4741   pointer_set_destroy (visited);
4742 
4743   copy = gimple_seq_copy (seq);
4744 
4745   /* Walk the copy, remapping decls.  */
4746   memset (&wi, 0, sizeof (wi));
4747   wi.info = &id;
4748   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4749 
4750   /* Clean up.  */
4751   pointer_map_destroy (id.decl_map);
4752   if (id.debug_map)
4753     pointer_map_destroy (id.debug_map);
4754 
4755   return copy;
4756 }
4757 
4758 
4759 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
4760 
4761 static tree
4762 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4763 {
4764   if (*tp == data)
4765     return (tree) data;
4766   else
4767     return NULL;
4768 }
4769 
4770 DEBUG_FUNCTION bool
4771 debug_find_tree (tree top, tree search)
4772 {
4773   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4774 }
4775 
4776 
4777 /* Declare the variables created by the inliner.  Add all the variables in
4778    VARS to BIND_EXPR.  */
4779 
4780 static void
4781 declare_inline_vars (tree block, tree vars)
4782 {
4783   tree t;
4784   for (t = vars; t; t = DECL_CHAIN (t))
4785     {
4786       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4787       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4788       add_local_decl (cfun, t);
4789     }
4790 
4791   if (block)
4792     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4793 }
4794 
4795 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
4796    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
4797    VAR_DECL translation.  */
4798 
4799 static tree
4800 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4801 {
4802   /* Don't generate debug information for the copy if we wouldn't have
4803      generated it for the copy either.  */
4804   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4805   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4806 
4807   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4808      declaration inspired this copy.  */
4809   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4810 
4811   /* The new variable/label has no RTL, yet.  */
4812   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4813       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4814     SET_DECL_RTL (copy, 0);
4815 
4816   /* These args would always appear unused, if not for this.  */
4817   TREE_USED (copy) = 1;
4818 
4819   /* Set the context for the new declaration.  */
4820   if (!DECL_CONTEXT (decl))
4821     /* Globals stay global.  */
4822     ;
4823   else if (DECL_CONTEXT (decl) != id->src_fn)
4824     /* Things that weren't in the scope of the function we're inlining
4825        from aren't in the scope we're inlining to, either.  */
4826     ;
4827   else if (TREE_STATIC (decl))
4828     /* Function-scoped static variables should stay in the original
4829        function.  */
4830     ;
4831   else
4832     /* Ordinary automatic local variables are now in the scope of the
4833        new function.  */
4834     DECL_CONTEXT (copy) = id->dst_fn;
4835 
4836   return copy;
4837 }
4838 
4839 static tree
4840 copy_decl_to_var (tree decl, copy_body_data *id)
4841 {
4842   tree copy, type;
4843 
4844   gcc_assert (TREE_CODE (decl) == PARM_DECL
4845 	      || TREE_CODE (decl) == RESULT_DECL);
4846 
4847   type = TREE_TYPE (decl);
4848 
4849   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4850 		     VAR_DECL, DECL_NAME (decl), type);
4851   if (DECL_PT_UID_SET_P (decl))
4852     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4853   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4854   TREE_READONLY (copy) = TREE_READONLY (decl);
4855   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4856   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4857 
4858   return copy_decl_for_dup_finish (id, decl, copy);
4859 }
4860 
4861 /* Like copy_decl_to_var, but create a return slot object instead of a
4862    pointer variable for return by invisible reference.  */
4863 
4864 static tree
4865 copy_result_decl_to_var (tree decl, copy_body_data *id)
4866 {
4867   tree copy, type;
4868 
4869   gcc_assert (TREE_CODE (decl) == PARM_DECL
4870 	      || TREE_CODE (decl) == RESULT_DECL);
4871 
4872   type = TREE_TYPE (decl);
4873   if (DECL_BY_REFERENCE (decl))
4874     type = TREE_TYPE (type);
4875 
4876   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4877 		     VAR_DECL, DECL_NAME (decl), type);
4878   if (DECL_PT_UID_SET_P (decl))
4879     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4880   TREE_READONLY (copy) = TREE_READONLY (decl);
4881   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4882   if (!DECL_BY_REFERENCE (decl))
4883     {
4884       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4885       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4886     }
4887 
4888   return copy_decl_for_dup_finish (id, decl, copy);
4889 }
4890 
4891 tree
4892 copy_decl_no_change (tree decl, copy_body_data *id)
4893 {
4894   tree copy;
4895 
4896   copy = copy_node (decl);
4897 
4898   /* The COPY is not abstract; it will be generated in DST_FN.  */
4899   DECL_ABSTRACT (copy) = 0;
4900   lang_hooks.dup_lang_specific_decl (copy);
4901 
4902   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4903      been taken; it's for internal bookkeeping in expand_goto_internal.  */
4904   if (TREE_CODE (copy) == LABEL_DECL)
4905     {
4906       TREE_ADDRESSABLE (copy) = 0;
4907       LABEL_DECL_UID (copy) = -1;
4908     }
4909 
4910   return copy_decl_for_dup_finish (id, decl, copy);
4911 }
4912 
4913 static tree
4914 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4915 {
4916   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4917     return copy_decl_to_var (decl, id);
4918   else
4919     return copy_decl_no_change (decl, id);
4920 }
4921 
4922 /* Return a copy of the function's argument tree.  */
4923 static tree
4924 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4925 			       bitmap args_to_skip, tree *vars)
4926 {
4927   tree arg, *parg;
4928   tree new_parm = NULL;
4929   int i = 0;
4930 
4931   parg = &new_parm;
4932 
4933   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4934     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4935       {
4936         tree new_tree = remap_decl (arg, id);
4937 	if (TREE_CODE (new_tree) != PARM_DECL)
4938 	  new_tree = id->copy_decl (arg, id);
4939         lang_hooks.dup_lang_specific_decl (new_tree);
4940         *parg = new_tree;
4941 	parg = &DECL_CHAIN (new_tree);
4942       }
4943     else if (!pointer_map_contains (id->decl_map, arg))
4944       {
4945 	/* Make an equivalent VAR_DECL.  If the argument was used
4946 	   as temporary variable later in function, the uses will be
4947 	   replaced by local variable.  */
4948 	tree var = copy_decl_to_var (arg, id);
4949 	insert_decl_map (id, arg, var);
4950         /* Declare this new variable.  */
4951         DECL_CHAIN (var) = *vars;
4952         *vars = var;
4953       }
4954   return new_parm;
4955 }
4956 
4957 /* Return a copy of the function's static chain.  */
4958 static tree
4959 copy_static_chain (tree static_chain, copy_body_data * id)
4960 {
4961   tree *chain_copy, *pvar;
4962 
4963   chain_copy = &static_chain;
4964   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4965     {
4966       tree new_tree = remap_decl (*pvar, id);
4967       lang_hooks.dup_lang_specific_decl (new_tree);
4968       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4969       *pvar = new_tree;
4970     }
4971   return static_chain;
4972 }
4973 
4974 /* Return true if the function is allowed to be versioned.
4975    This is a guard for the versioning functionality.  */
4976 
4977 bool
4978 tree_versionable_function_p (tree fndecl)
4979 {
4980   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4981 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4982 }
4983 
4984 /* Delete all unreachable basic blocks and update callgraph.
4985    Doing so is somewhat nontrivial because we need to update all clones and
4986    remove inline function that become unreachable.  */
4987 
4988 static bool
4989 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4990 {
4991   bool changed = false;
4992   basic_block b, next_bb;
4993 
4994   find_unreachable_blocks ();
4995 
4996   /* Delete all unreachable basic blocks.  */
4997 
4998   for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4999     {
5000       next_bb = b->next_bb;
5001 
5002       if (!(b->flags & BB_REACHABLE))
5003 	{
5004           gimple_stmt_iterator bsi;
5005 
5006           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5007 	    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
5008 	      {
5009 	        struct cgraph_edge *e;
5010 		struct cgraph_node *node;
5011 
5012 	        if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5013 		  {
5014 		    if (!e->inline_failed)
5015 		      cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5016 		    else
5017 	              cgraph_remove_edge (e);
5018 		  }
5019 		if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5020 		    && id->dst_node->clones)
5021      		  for (node = id->dst_node->clones; node != id->dst_node;)
5022 		    {
5023 	              if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5024 			{
5025 		          if (!e->inline_failed)
5026 		            cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5027 			  else
5028 	                    cgraph_remove_edge (e);
5029 			}
5030 
5031 		      if (node->clones)
5032 			node = node->clones;
5033 		      else if (node->next_sibling_clone)
5034 			node = node->next_sibling_clone;
5035 		      else
5036 			{
5037 			  while (node != id->dst_node && !node->next_sibling_clone)
5038 			    node = node->clone_of;
5039 			  if (node != id->dst_node)
5040 			    node = node->next_sibling_clone;
5041 			}
5042 		    }
5043 	      }
5044 	  delete_basic_block (b);
5045 	  changed = true;
5046 	}
5047     }
5048 
5049   return changed;
5050 }
5051 
5052 /* Update clone info after duplication.  */
5053 
5054 static void
5055 update_clone_info (copy_body_data * id)
5056 {
5057   struct cgraph_node *node;
5058   if (!id->dst_node->clones)
5059     return;
5060   for (node = id->dst_node->clones; node != id->dst_node;)
5061     {
5062       /* First update replace maps to match the new body.  */
5063       if (node->clone.tree_map)
5064         {
5065 	  unsigned int i;
5066           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5067 	    {
5068 	      struct ipa_replace_map *replace_info;
5069 	      replace_info = (*node->clone.tree_map)[i];
5070 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5071 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5072 	    }
5073 	}
5074       if (node->clones)
5075 	node = node->clones;
5076       else if (node->next_sibling_clone)
5077 	node = node->next_sibling_clone;
5078       else
5079 	{
5080 	  while (node != id->dst_node && !node->next_sibling_clone)
5081 	    node = node->clone_of;
5082 	  if (node != id->dst_node)
5083 	    node = node->next_sibling_clone;
5084 	}
5085     }
5086 }
5087 
5088 /* Create a copy of a function's tree.
5089    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5090    of the original function and the new copied function
5091    respectively.  In case we want to replace a DECL
5092    tree with another tree while duplicating the function's
5093    body, TREE_MAP represents the mapping between these
5094    trees. If UPDATE_CLONES is set, the call_stmt fields
5095    of edges of clones of the function will be updated.
5096 
5097    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5098    from new version.
5099    If SKIP_RETURN is true, the new version will return void.
5100    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5101    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5102 */
5103 void
5104 tree_function_versioning (tree old_decl, tree new_decl,
5105 			  vec<ipa_replace_map_p, va_gc> *tree_map,
5106 			  bool update_clones, bitmap args_to_skip,
5107 			  bool skip_return, bitmap blocks_to_copy,
5108 			  basic_block new_entry)
5109 {
5110   struct cgraph_node *old_version_node;
5111   struct cgraph_node *new_version_node;
5112   copy_body_data id;
5113   tree p;
5114   unsigned i;
5115   struct ipa_replace_map *replace_info;
5116   basic_block old_entry_block, bb;
5117   vec<gimple> init_stmts;
5118   init_stmts.create (10);
5119   tree vars = NULL_TREE;
5120 
5121   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5122 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5123   DECL_POSSIBLY_INLINED (old_decl) = 1;
5124 
5125   old_version_node = cgraph_get_node (old_decl);
5126   gcc_checking_assert (old_version_node);
5127   new_version_node = cgraph_get_node (new_decl);
5128   gcc_checking_assert (new_version_node);
5129 
5130   /* Copy over debug args.  */
5131   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5132     {
5133       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5134       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5135       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5136       old_debug_args = decl_debug_args_lookup (old_decl);
5137       if (old_debug_args)
5138 	{
5139 	  new_debug_args = decl_debug_args_insert (new_decl);
5140 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5141 	}
5142     }
5143 
5144   /* Output the inlining info for this abstract function, since it has been
5145      inlined.  If we don't do this now, we can lose the information about the
5146      variables in the function when the blocks get blown away as soon as we
5147      remove the cgraph node.  */
5148   (*debug_hooks->outlining_inline_function) (old_decl);
5149 
5150   DECL_ARTIFICIAL (new_decl) = 1;
5151   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5152   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5153 
5154   /* Prepare the data structures for the tree copy.  */
5155   memset (&id, 0, sizeof (id));
5156 
5157   /* Generate a new name for the new version. */
5158   id.statements_to_fold = pointer_set_create ();
5159 
5160   id.decl_map = pointer_map_create ();
5161   id.debug_map = NULL;
5162   id.src_fn = old_decl;
5163   id.dst_fn = new_decl;
5164   id.src_node = old_version_node;
5165   id.dst_node = new_version_node;
5166   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5167   if (id.src_node->ipa_transforms_to_apply.exists ())
5168     {
5169       vec<ipa_opt_pass> old_transforms_to_apply
5170 	    = id.dst_node->ipa_transforms_to_apply;
5171       unsigned int i;
5172 
5173       id.dst_node->ipa_transforms_to_apply
5174 	    = id.src_node->ipa_transforms_to_apply.copy ();
5175       for (i = 0; i < old_transforms_to_apply.length (); i++)
5176         id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5177       old_transforms_to_apply.release ();
5178     }
5179 
5180   id.copy_decl = copy_decl_no_change;
5181   id.transform_call_graph_edges
5182     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5183   id.transform_new_cfg = true;
5184   id.transform_return_to_modify = false;
5185   id.transform_lang_insert_block = NULL;
5186 
5187   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5188     (DECL_STRUCT_FUNCTION (old_decl));
5189   initialize_cfun (new_decl, old_decl,
5190 		   old_entry_block->count);
5191   DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5192     = id.src_cfun->gimple_df->ipa_pta;
5193 
5194   /* Copy the function's static chain.  */
5195   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5196   if (p)
5197     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5198       copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5199 			 &id);
5200 
5201   /* If there's a tree_map, prepare for substitution.  */
5202   if (tree_map)
5203     for (i = 0; i < tree_map->length (); i++)
5204       {
5205 	gimple init;
5206 	replace_info = (*tree_map)[i];
5207 	if (replace_info->replace_p)
5208 	  {
5209 	    if (!replace_info->old_tree)
5210 	      {
5211 		int i = replace_info->parm_num;
5212 		tree parm;
5213 		for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5214 		  i --;
5215 		replace_info->old_tree = parm;
5216 	      }
5217 	    gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5218 	    init = setup_one_parameter (&id, replace_info->old_tree,
5219 	    			        replace_info->new_tree, id.src_fn,
5220 				        NULL,
5221 				        &vars);
5222 	    if (init)
5223 	      init_stmts.safe_push (init);
5224 	  }
5225       }
5226   /* Copy the function's arguments.  */
5227   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5228     DECL_ARGUMENTS (new_decl) =
5229       copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5230       				     args_to_skip, &vars);
5231 
5232   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5233   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5234 
5235   declare_inline_vars (DECL_INITIAL (new_decl), vars);
5236 
5237   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5238     /* Add local vars.  */
5239     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5240 
5241   if (DECL_RESULT (old_decl) == NULL_TREE)
5242     ;
5243   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5244     {
5245       DECL_RESULT (new_decl)
5246 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5247 		      RESULT_DECL, NULL_TREE, void_type_node);
5248       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5249       cfun->returns_struct = 0;
5250       cfun->returns_pcc_struct = 0;
5251     }
5252   else
5253     {
5254       tree old_name;
5255       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5256       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5257       if (gimple_in_ssa_p (id.src_cfun)
5258 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5259 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5260 	{
5261 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5262 	  insert_decl_map (&id, old_name, new_name);
5263 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5264 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5265 	}
5266     }
5267 
5268   /* Copy the Function's body.  */
5269   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5270 	     ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5271 
5272   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5273   number_blocks (new_decl);
5274 
5275   /* We want to create the BB unconditionally, so that the addition of
5276      debug stmts doesn't affect BB count, which may in the end cause
5277      codegen differences.  */
5278   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5279   while (init_stmts.length ())
5280     insert_init_stmt (&id, bb, init_stmts.pop ());
5281   update_clone_info (&id);
5282 
5283   /* Remap the nonlocal_goto_save_area, if any.  */
5284   if (cfun->nonlocal_goto_save_area)
5285     {
5286       struct walk_stmt_info wi;
5287 
5288       memset (&wi, 0, sizeof (wi));
5289       wi.info = &id;
5290       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5291     }
5292 
5293   /* Clean up.  */
5294   pointer_map_destroy (id.decl_map);
5295   if (id.debug_map)
5296     pointer_map_destroy (id.debug_map);
5297   free_dominance_info (CDI_DOMINATORS);
5298   free_dominance_info (CDI_POST_DOMINATORS);
5299 
5300   fold_marked_statements (0, id.statements_to_fold);
5301   pointer_set_destroy (id.statements_to_fold);
5302   fold_cond_expr_cond ();
5303   delete_unreachable_blocks_update_callgraph (&id);
5304   if (id.dst_node->analyzed)
5305     cgraph_rebuild_references ();
5306   update_ssa (TODO_update_ssa);
5307 
5308   /* After partial cloning we need to rescale frequencies, so they are
5309      within proper range in the cloned function.  */
5310   if (new_entry)
5311     {
5312       struct cgraph_edge *e;
5313       rebuild_frequencies ();
5314 
5315       new_version_node->count = ENTRY_BLOCK_PTR->count;
5316       for (e = new_version_node->callees; e; e = e->next_callee)
5317 	{
5318 	  basic_block bb = gimple_bb (e->call_stmt);
5319 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5320 							 bb);
5321 	  e->count = bb->count;
5322 	}
5323       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5324 	{
5325 	  basic_block bb = gimple_bb (e->call_stmt);
5326 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5327 							 bb);
5328 	  e->count = bb->count;
5329 	}
5330     }
5331 
5332   free_dominance_info (CDI_DOMINATORS);
5333   free_dominance_info (CDI_POST_DOMINATORS);
5334 
5335   gcc_assert (!id.debug_stmts.exists ());
5336   init_stmts.release ();
5337   pop_cfun ();
5338   return;
5339 }
5340 
5341 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5342    the callee and return the inlined body on success.  */
5343 
5344 tree
5345 maybe_inline_call_in_expr (tree exp)
5346 {
5347   tree fn = get_callee_fndecl (exp);
5348 
5349   /* We can only try to inline "const" functions.  */
5350   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5351     {
5352       struct pointer_map_t *decl_map = pointer_map_create ();
5353       call_expr_arg_iterator iter;
5354       copy_body_data id;
5355       tree param, arg, t;
5356 
5357       /* Remap the parameters.  */
5358       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5359 	   param;
5360 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5361 	*pointer_map_insert (decl_map, param) = arg;
5362 
5363       memset (&id, 0, sizeof (id));
5364       id.src_fn = fn;
5365       id.dst_fn = current_function_decl;
5366       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5367       id.decl_map = decl_map;
5368 
5369       id.copy_decl = copy_decl_no_change;
5370       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5371       id.transform_new_cfg = false;
5372       id.transform_return_to_modify = true;
5373       id.transform_lang_insert_block = NULL;
5374 
5375       /* Make sure not to unshare trees behind the front-end's back
5376 	 since front-end specific mechanisms may rely on sharing.  */
5377       id.regimplify = false;
5378       id.do_not_unshare = true;
5379 
5380       /* We're not inside any EH region.  */
5381       id.eh_lp_nr = 0;
5382 
5383       t = copy_tree_body (&id);
5384       pointer_map_destroy (decl_map);
5385 
5386       /* We can only return something suitable for use in a GENERIC
5387 	 expression tree.  */
5388       if (TREE_CODE (t) == MODIFY_EXPR)
5389 	return TREE_OPERAND (t, 1);
5390     }
5391 
5392    return NULL_TREE;
5393 }
5394 
5395 /* Duplicate a type, fields and all.  */
5396 
5397 tree
5398 build_duplicate_type (tree type)
5399 {
5400   struct copy_body_data id;
5401 
5402   memset (&id, 0, sizeof (id));
5403   id.src_fn = current_function_decl;
5404   id.dst_fn = current_function_decl;
5405   id.src_cfun = cfun;
5406   id.decl_map = pointer_map_create ();
5407   id.debug_map = NULL;
5408   id.copy_decl = copy_decl_no_change;
5409 
5410   type = remap_type_1 (type, &id);
5411 
5412   pointer_map_destroy (id.decl_map);
5413   if (id.debug_map)
5414     pointer_map_destroy (id.debug_map);
5415 
5416   TYPE_CANONICAL (type) = type;
5417 
5418   return type;
5419 }
5420