xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision 53b02e147d4ed531c0d2a5ca9b3e8026ba3e99b5)
1 /* Tree inlining.
2    Copyright (C) 2001-2019 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 
65 /* I'm not real happy about this, but we need to handle gimple and
66    non-gimple trees.  */
67 
68 /* Inlining, Cloning, Versioning, Parallelization
69 
70    Inlining: a function body is duplicated, but the PARM_DECLs are
71    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72    MODIFY_EXPRs that store to a dedicated returned-value variable.
73    The duplicated eh_region info of the copy will later be appended
74    to the info for the caller; the eh_region info in copied throwing
75    statements and RESX statements are adjusted accordingly.
76 
77    Cloning: (only in C++) We have one body for a con/de/structor, and
78    multiple function decls, each with a unique parameter list.
79    Duplicate the body, using the given splay tree; some parameters
80    will become constants (like 0 or 1).
81 
82    Versioning: a function body is duplicated and the result is a new
83    function rather than into blocks of an existing function as with
84    inlining.  Some parameters will become constants.
85 
86    Parallelization: a region of a function is duplicated resulting in
87    a new function.  Variables may be replaced with complex expressions
88    to enable shared variable semantics.
89 
90    All of these will simultaneously lookup any callgraph edges.  If
91    we're going to inline the duplicated function body, and the given
92    function has some cloned callgraph nodes (one for each place this
93    function will be inlined) those callgraph edges will be duplicated.
94    If we're cloning the body, those callgraph edges will be
95    updated to point into the new body.  (Note that the original
96    callgraph node and edge list will not be altered.)
97 
98    See the CALL_EXPR handling case in copy_tree_body_r ().  */
99 
100 /* To Do:
101 
102    o In order to make inlining-on-trees work, we pessimized
103      function-local static constants.  In particular, they are now
104      always output, even when not addressed.  Fix this by treating
105      function-local static constants just like global static
106      constants; the back-end already knows not to output them if they
107      are not needed.
108 
109    o Provide heuristics to clamp inlining of recursive template
110      calls?  */
111 
112 
113 /* Weights that estimate_num_insns uses to estimate the size of the
114    produced code.  */
115 
116 eni_weights eni_size_weights;
117 
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119    to execute the produced code.  */
120 
121 eni_weights eni_time_weights;
122 
123 /* Prototypes.  */
124 
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 				     basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137 
138 /* Insert a tree->tree mapping for ID.  Despite the name suggests
139    that the trees should be variables, it is used for more than that.  */
140 
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144   id->decl_map->put (key, value);
145 
146   /* Always insert an identity map as well.  If we see this same new
147      node again, we won't want to duplicate it a second time.  */
148   if (key != value)
149     id->decl_map->put (value, value);
150 }
151 
152 /* Insert a tree->tree mapping for ID.  This is only used for
153    variables.  */
154 
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158   if (!gimple_in_ssa_p (id->src_cfun))
159     return;
160 
161   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162     return;
163 
164   if (!target_for_debug_bind (key))
165     return;
166 
167   gcc_assert (TREE_CODE (key) == PARM_DECL);
168   gcc_assert (VAR_P (value));
169 
170   if (!id->debug_map)
171     id->debug_map = new hash_map<tree, tree>;
172 
173   id->debug_map->put (key, value);
174 }
175 
176 /* If nonzero, we're remapping the contents of inlined debug
177    statements.  If negative, an error has occurred, such as a
178    reference to a variable that isn't available in the inlined
179    context.  */
180 static int processing_debug_stmt = 0;
181 
182 /* Construct new SSA name for old NAME. ID is the inline context.  */
183 
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187   tree new_tree, var;
188   tree *n;
189 
190   gcc_assert (TREE_CODE (name) == SSA_NAME);
191 
192   n = id->decl_map->get (name);
193   if (n)
194     return unshare_expr (*n);
195 
196   if (processing_debug_stmt)
197     {
198       if (SSA_NAME_IS_DEFAULT_DEF (name)
199 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 	  && id->entry_bb == NULL
201 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 	{
203 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
204 	  gimple *def_temp;
205 	  gimple_stmt_iterator gsi;
206 	  tree val = SSA_NAME_VAR (name);
207 
208 	  n = id->decl_map->get (val);
209 	  if (n != NULL)
210 	    val = *n;
211 	  if (TREE_CODE (val) != PARM_DECL
212 	      && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 	    {
214 	      processing_debug_stmt = -1;
215 	      return name;
216 	    }
217 	  n = id->decl_map->get (val);
218 	  if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 	    return *n;
220 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 	  DECL_ARTIFICIAL (vexpr) = 1;
222 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
223 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 	  insert_decl_map (id, val, vexpr);
227 	  return vexpr;
228 	}
229 
230       processing_debug_stmt = -1;
231       return name;
232     }
233 
234   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
235   var = SSA_NAME_VAR (name);
236   if (!var
237       || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 	  && VAR_P (var)
239 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 	  && DECL_ARTIFICIAL (var)
241 	  && DECL_IGNORED_P (var)
242 	  && !DECL_NAME (var)))
243     {
244       struct ptr_info_def *pi;
245       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246       if (!var && SSA_NAME_IDENTIFIER (name))
247 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248       insert_decl_map (id, name, new_tree);
249       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251       /* At least IPA points-to info can be directly transferred.  */
252       if (id->src_cfun->gimple_df
253 	  && id->src_cfun->gimple_df->ipa_pta
254 	  && POINTER_TYPE_P (TREE_TYPE (name))
255 	  && (pi = SSA_NAME_PTR_INFO (name))
256 	  && !pi->pt.anything)
257 	{
258 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 	  new_pi->pt = pi->pt;
260 	}
261       /* So can range-info.  */
262       if (!POINTER_TYPE_P (TREE_TYPE (name))
263 	  && SSA_NAME_RANGE_INFO (name))
264 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
265 				       SSA_NAME_RANGE_INFO (name));
266       return new_tree;
267     }
268 
269   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
270      in copy_bb.  */
271   new_tree = remap_decl (var, id);
272 
273   /* We might've substituted constant or another SSA_NAME for
274      the variable.
275 
276      Replace the SSA name representing RESULT_DECL by variable during
277      inlining:  this saves us from need to introduce PHI node in a case
278      return value is just partly initialized.  */
279   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
280       && (!SSA_NAME_VAR (name)
281 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
282 	  || !id->transform_return_to_modify))
283     {
284       struct ptr_info_def *pi;
285       new_tree = make_ssa_name (new_tree);
286       insert_decl_map (id, name, new_tree);
287       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
288 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
289       /* At least IPA points-to info can be directly transferred.  */
290       if (id->src_cfun->gimple_df
291 	  && id->src_cfun->gimple_df->ipa_pta
292 	  && POINTER_TYPE_P (TREE_TYPE (name))
293 	  && (pi = SSA_NAME_PTR_INFO (name))
294 	  && !pi->pt.anything)
295 	{
296 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
297 	  new_pi->pt = pi->pt;
298 	}
299       /* So can range-info.  */
300       if (!POINTER_TYPE_P (TREE_TYPE (name))
301 	  && SSA_NAME_RANGE_INFO (name))
302 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
303 				       SSA_NAME_RANGE_INFO (name));
304       if (SSA_NAME_IS_DEFAULT_DEF (name))
305 	{
306 	  /* By inlining function having uninitialized variable, we might
307 	     extend the lifetime (variable might get reused).  This cause
308 	     ICE in the case we end up extending lifetime of SSA name across
309 	     abnormal edge, but also increase register pressure.
310 
311 	     We simply initialize all uninitialized vars by 0 except
312 	     for case we are inlining to very first BB.  We can avoid
313 	     this for all BBs that are not inside strongly connected
314 	     regions of the CFG, but this is expensive to test.  */
315 	  if (id->entry_bb
316 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
317 	      && (!SSA_NAME_VAR (name)
318 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
319 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
320 					     0)->dest
321 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
322 	    {
323 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
324 	      gimple *init_stmt;
325 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
326 
327 	      init_stmt = gimple_build_assign (new_tree, zero);
328 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
329 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
330 	    }
331 	  else
332 	    {
333 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
334 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
335 	    }
336 	}
337     }
338   else
339     insert_decl_map (id, name, new_tree);
340   return new_tree;
341 }
342 
343 /* Remap DECL during the copying of the BLOCK tree for the function.  */
344 
345 tree
346 remap_decl (tree decl, copy_body_data *id)
347 {
348   tree *n;
349 
350   /* We only remap local variables in the current function.  */
351 
352   /* See if we have remapped this declaration.  */
353 
354   n = id->decl_map->get (decl);
355 
356   if (!n && processing_debug_stmt)
357     {
358       processing_debug_stmt = -1;
359       return decl;
360     }
361 
362   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
363      necessary DECLs have already been remapped and we do not want to duplicate
364      a decl coming from outside of the sequence we are copying.  */
365   if (!n
366       && id->prevent_decl_creation_for_types
367       && id->remapping_type_depth > 0
368       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
369     return decl;
370 
371   /* If we didn't already have an equivalent for this declaration, create one
372      now.  */
373   if (!n)
374     {
375       /* Make a copy of the variable or label.  */
376       tree t = id->copy_decl (decl, id);
377 
378       /* Remember it, so that if we encounter this local entity again
379 	 we can reuse this copy.  Do this early because remap_type may
380 	 need this decl for TYPE_STUB_DECL.  */
381       insert_decl_map (id, decl, t);
382 
383       if (!DECL_P (t))
384 	return t;
385 
386       /* Remap types, if necessary.  */
387       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
388       if (TREE_CODE (t) == TYPE_DECL)
389 	{
390 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
391 
392 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
393 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
394 	     is not set on the TYPE_DECL, for example in LTO mode.  */
395 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
396 	    {
397 	      tree x = build_variant_type_copy (TREE_TYPE (t));
398 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
399 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
400 	      DECL_ORIGINAL_TYPE (t) = x;
401 	    }
402 	}
403 
404       /* Remap sizes as necessary.  */
405       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
406       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
407 
408       /* If fields, do likewise for offset and qualifier.  */
409       if (TREE_CODE (t) == FIELD_DECL)
410 	{
411 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
412 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
413 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
414 	}
415 
416       return t;
417     }
418 
419   if (id->do_not_unshare)
420     return *n;
421   else
422     return unshare_expr (*n);
423 }
424 
425 static tree
426 remap_type_1 (tree type, copy_body_data *id)
427 {
428   tree new_tree, t;
429 
430   /* We do need a copy.  build and register it now.  If this is a pointer or
431      reference type, remap the designated type and make a new pointer or
432      reference type.  */
433   if (TREE_CODE (type) == POINTER_TYPE)
434     {
435       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
436 					 TYPE_MODE (type),
437 					 TYPE_REF_CAN_ALIAS_ALL (type));
438       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
439 	new_tree = build_type_attribute_qual_variant (new_tree,
440 						      TYPE_ATTRIBUTES (type),
441 						      TYPE_QUALS (type));
442       insert_decl_map (id, type, new_tree);
443       return new_tree;
444     }
445   else if (TREE_CODE (type) == REFERENCE_TYPE)
446     {
447       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
448 					    TYPE_MODE (type),
449 					    TYPE_REF_CAN_ALIAS_ALL (type));
450       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
451 	new_tree = build_type_attribute_qual_variant (new_tree,
452 						      TYPE_ATTRIBUTES (type),
453 						      TYPE_QUALS (type));
454       insert_decl_map (id, type, new_tree);
455       return new_tree;
456     }
457   else
458     new_tree = copy_node (type);
459 
460   insert_decl_map (id, type, new_tree);
461 
462   /* This is a new type, not a copy of an old type.  Need to reassociate
463      variants.  We can handle everything except the main variant lazily.  */
464   t = TYPE_MAIN_VARIANT (type);
465   if (type != t)
466     {
467       t = remap_type (t, id);
468       TYPE_MAIN_VARIANT (new_tree) = t;
469       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
470       TYPE_NEXT_VARIANT (t) = new_tree;
471     }
472   else
473     {
474       TYPE_MAIN_VARIANT (new_tree) = new_tree;
475       TYPE_NEXT_VARIANT (new_tree) = NULL;
476     }
477 
478   if (TYPE_STUB_DECL (type))
479     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
480 
481   /* Lazily create pointer and reference types.  */
482   TYPE_POINTER_TO (new_tree) = NULL;
483   TYPE_REFERENCE_TO (new_tree) = NULL;
484 
485   /* Copy all types that may contain references to local variables; be sure to
486      preserve sharing in between type and its main variant when possible.  */
487   switch (TREE_CODE (new_tree))
488     {
489     case INTEGER_TYPE:
490     case REAL_TYPE:
491     case FIXED_POINT_TYPE:
492     case ENUMERAL_TYPE:
493     case BOOLEAN_TYPE:
494       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
495 	{
496 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
497 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
498 
499 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
500 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 	}
502       else
503 	{
504 	  t = TYPE_MIN_VALUE (new_tree);
505 	  if (t && TREE_CODE (t) != INTEGER_CST)
506 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
507 
508 	  t = TYPE_MAX_VALUE (new_tree);
509 	  if (t && TREE_CODE (t) != INTEGER_CST)
510 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
511 	}
512       return new_tree;
513 
514     case FUNCTION_TYPE:
515       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
516 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
517 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
518       else
519         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
520       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
521 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
522 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
523       else
524         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
525       return new_tree;
526 
527     case ARRAY_TYPE:
528       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531       else
532 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533 
534       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
535 	{
536 	  gcc_checking_assert (TYPE_DOMAIN (type)
537 			       == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
538 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
539 	}
540       else
541         {
542 	  TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
543 	  /* For array bounds where we have decided not to copy over the bounds
544 	     variable which isn't used in OpenMP/OpenACC region, change them to
545 	     an uninitialized VAR_DECL temporary.  */
546 	  if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
547 	      && id->adjust_array_error_bounds
548 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
549 	    {
550 	      tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
551 	      DECL_ATTRIBUTES (v)
552 		= tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
553 			     DECL_ATTRIBUTES (v));
554 	      TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
555 	    }
556         }
557       break;
558 
559     case RECORD_TYPE:
560     case UNION_TYPE:
561     case QUAL_UNION_TYPE:
562       if (TYPE_MAIN_VARIANT (type) != type
563 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
564 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
565       else
566 	{
567 	  tree f, nf = NULL;
568 
569 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
570 	    {
571 	      t = remap_decl (f, id);
572 	      DECL_CONTEXT (t) = new_tree;
573 	      DECL_CHAIN (t) = nf;
574 	      nf = t;
575 	    }
576 	  TYPE_FIELDS (new_tree) = nreverse (nf);
577 	}
578       break;
579 
580     case OFFSET_TYPE:
581     default:
582       /* Shouldn't have been thought variable sized.  */
583       gcc_unreachable ();
584     }
585 
586   /* All variants of type share the same size, so use the already remaped data.  */
587   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
588     {
589       tree s = TYPE_SIZE (type);
590       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
591       tree su = TYPE_SIZE_UNIT (type);
592       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
593       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
594 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
595 			   || s == mvs);
596       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
597 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
598 			   || su == mvsu);
599       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
600       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
601     }
602   else
603     {
604       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
605       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
606     }
607 
608   return new_tree;
609 }
610 
611 /* Helper function for remap_type_2, called through walk_tree.  */
612 
613 static tree
614 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
615 {
616   copy_body_data *id = (copy_body_data *) data;
617 
618   if (TYPE_P (*tp))
619     *walk_subtrees = 0;
620 
621   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
622     return *tp;
623 
624   return NULL_TREE;
625 }
626 
627 /* Return true if TYPE needs to be remapped because remap_decl on any
628    needed embedded decl returns something other than that decl.  */
629 
630 static bool
631 remap_type_2 (tree type, copy_body_data *id)
632 {
633   tree t;
634 
635 #define RETURN_TRUE_IF_VAR(T) \
636   do								\
637     {								\
638       tree _t = (T);						\
639       if (_t)							\
640 	{							\
641 	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
642 	    return true;					\
643 	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
644 	      && walk_tree (&_t, remap_type_3, id, NULL))	\
645 	    return true;					\
646 	}							\
647     }								\
648   while (0)
649 
650   switch (TREE_CODE (type))
651     {
652     case POINTER_TYPE:
653     case REFERENCE_TYPE:
654     case FUNCTION_TYPE:
655     case METHOD_TYPE:
656       return remap_type_2 (TREE_TYPE (type), id);
657 
658     case INTEGER_TYPE:
659     case REAL_TYPE:
660     case FIXED_POINT_TYPE:
661     case ENUMERAL_TYPE:
662     case BOOLEAN_TYPE:
663       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
664       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
665       return false;
666 
667     case ARRAY_TYPE:
668       if (remap_type_2 (TREE_TYPE (type), id)
669 	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
670 	return true;
671       break;
672 
673     case RECORD_TYPE:
674     case UNION_TYPE:
675     case QUAL_UNION_TYPE:
676       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
677 	if (TREE_CODE (t) == FIELD_DECL)
678 	  {
679 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
680 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
681 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
682 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
683 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
684 	  }
685       break;
686 
687     default:
688       return false;
689     }
690 
691   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
692   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
693   return false;
694 #undef RETURN_TRUE_IF_VAR
695 }
696 
697 tree
698 remap_type (tree type, copy_body_data *id)
699 {
700   tree *node;
701   tree tmp;
702 
703   if (type == NULL)
704     return type;
705 
706   /* See if we have remapped this type.  */
707   node = id->decl_map->get (type);
708   if (node)
709     return *node;
710 
711   /* The type only needs remapping if it's variably modified.  */
712   if (! variably_modified_type_p (type, id->src_fn)
713       /* Don't remap if copy_decl method doesn't always return a new
714 	 decl and for all embedded decls returns the passed in decl.  */
715       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
716     {
717       insert_decl_map (id, type, type);
718       return type;
719     }
720 
721   id->remapping_type_depth++;
722   tmp = remap_type_1 (type, id);
723   id->remapping_type_depth--;
724 
725   return tmp;
726 }
727 
728 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
729 
730 static bool
731 can_be_nonlocal (tree decl, copy_body_data *id)
732 {
733   /* We cannot duplicate function decls.  */
734   if (TREE_CODE (decl) == FUNCTION_DECL)
735     return true;
736 
737   /* Local static vars must be non-local or we get multiple declaration
738      problems.  */
739   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
740     return true;
741 
742   return false;
743 }
744 
745 static tree
746 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
747 	     copy_body_data *id)
748 {
749   tree old_var;
750   tree new_decls = NULL_TREE;
751 
752   /* Remap its variables.  */
753   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
754     {
755       tree new_var;
756 
757       if (can_be_nonlocal (old_var, id))
758 	{
759 	  /* We need to add this variable to the local decls as otherwise
760 	     nothing else will do so.  */
761 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
762 	    add_local_decl (cfun, old_var);
763 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
764 	      && !DECL_IGNORED_P (old_var)
765 	      && nonlocalized_list)
766 	    vec_safe_push (*nonlocalized_list, old_var);
767 	  continue;
768 	}
769 
770       /* Remap the variable.  */
771       new_var = remap_decl (old_var, id);
772 
773       /* If we didn't remap this variable, we can't mess with its
774 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
775 	 already declared somewhere else, so don't declare it here.  */
776 
777       if (new_var == id->retvar)
778 	;
779       else if (!new_var)
780         {
781 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
782 	      && !DECL_IGNORED_P (old_var)
783 	      && nonlocalized_list)
784 	    vec_safe_push (*nonlocalized_list, old_var);
785 	}
786       else
787 	{
788 	  gcc_assert (DECL_P (new_var));
789 	  DECL_CHAIN (new_var) = new_decls;
790 	  new_decls = new_var;
791 
792 	  /* Also copy value-expressions.  */
793 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
794 	    {
795 	      tree tem = DECL_VALUE_EXPR (new_var);
796 	      bool old_regimplify = id->regimplify;
797 	      id->remapping_type_depth++;
798 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
799 	      id->remapping_type_depth--;
800 	      id->regimplify = old_regimplify;
801 	      SET_DECL_VALUE_EXPR (new_var, tem);
802 	    }
803 	}
804     }
805 
806   return nreverse (new_decls);
807 }
808 
809 /* Copy the BLOCK to contain remapped versions of the variables
810    therein.  And hook the new block into the block-tree.  */
811 
812 static void
813 remap_block (tree *block, copy_body_data *id)
814 {
815   tree old_block;
816   tree new_block;
817 
818   /* Make the new block.  */
819   old_block = *block;
820   new_block = make_node (BLOCK);
821   TREE_USED (new_block) = TREE_USED (old_block);
822   BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
823   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
824   BLOCK_NONLOCALIZED_VARS (new_block)
825     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
826   *block = new_block;
827 
828   /* Remap its variables.  */
829   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
830   					&BLOCK_NONLOCALIZED_VARS (new_block),
831 					id);
832 
833   if (id->transform_lang_insert_block)
834     id->transform_lang_insert_block (new_block);
835 
836   /* Remember the remapped block.  */
837   insert_decl_map (id, old_block, new_block);
838 }
839 
840 /* Copy the whole block tree and root it in id->block.  */
841 
842 static tree
843 remap_blocks (tree block, copy_body_data *id)
844 {
845   tree t;
846   tree new_tree = block;
847 
848   if (!block)
849     return NULL;
850 
851   remap_block (&new_tree, id);
852   gcc_assert (new_tree != block);
853   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
854     prepend_lexical_block (new_tree, remap_blocks (t, id));
855   /* Blocks are in arbitrary order, but make things slightly prettier and do
856      not swap order when producing a copy.  */
857   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
858   return new_tree;
859 }
860 
861 /* Remap the block tree rooted at BLOCK to nothing.  */
862 
863 static void
864 remap_blocks_to_null (tree block, copy_body_data *id)
865 {
866   tree t;
867   insert_decl_map (id, block, NULL_TREE);
868   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
869     remap_blocks_to_null (t, id);
870 }
871 
872 /* Remap the location info pointed to by LOCUS.  */
873 
874 static location_t
875 remap_location (location_t locus, copy_body_data *id)
876 {
877   if (LOCATION_BLOCK (locus))
878     {
879       tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
880       gcc_assert (n);
881       if (*n)
882 	return set_block (locus, *n);
883     }
884 
885   locus = LOCATION_LOCUS (locus);
886 
887   if (locus != UNKNOWN_LOCATION && id->block)
888     return set_block (locus, id->block);
889 
890   return locus;
891 }
892 
893 static void
894 copy_statement_list (tree *tp)
895 {
896   tree_stmt_iterator oi, ni;
897   tree new_tree;
898 
899   new_tree = alloc_stmt_list ();
900   ni = tsi_start (new_tree);
901   oi = tsi_start (*tp);
902   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
903   *tp = new_tree;
904 
905   for (; !tsi_end_p (oi); tsi_next (&oi))
906     {
907       tree stmt = tsi_stmt (oi);
908       if (TREE_CODE (stmt) == STATEMENT_LIST)
909 	/* This copy is not redundant; tsi_link_after will smash this
910 	   STATEMENT_LIST into the end of the one we're building, and we
911 	   don't want to do that with the original.  */
912 	copy_statement_list (&stmt);
913       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
914     }
915 }
916 
917 static void
918 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
919 {
920   tree block = BIND_EXPR_BLOCK (*tp);
921   /* Copy (and replace) the statement.  */
922   copy_tree_r (tp, walk_subtrees, NULL);
923   if (block)
924     {
925       remap_block (&block, id);
926       BIND_EXPR_BLOCK (*tp) = block;
927     }
928 
929   if (BIND_EXPR_VARS (*tp))
930     /* This will remap a lot of the same decls again, but this should be
931        harmless.  */
932     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
933 }
934 
935 
936 /* Create a new gimple_seq by remapping all the statements in BODY
937    using the inlining information in ID.  */
938 
939 static gimple_seq
940 remap_gimple_seq (gimple_seq body, copy_body_data *id)
941 {
942   gimple_stmt_iterator si;
943   gimple_seq new_body = NULL;
944 
945   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
946     {
947       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
948       gimple_seq_add_seq (&new_body, new_stmts);
949     }
950 
951   return new_body;
952 }
953 
954 
955 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
956    block using the mapping information in ID.  */
957 
958 static gimple *
959 copy_gimple_bind (gbind *stmt, copy_body_data *id)
960 {
961   gimple *new_bind;
962   tree new_block, new_vars;
963   gimple_seq body, new_body;
964 
965   /* Copy the statement.  Note that we purposely don't use copy_stmt
966      here because we need to remap statements as we copy.  */
967   body = gimple_bind_body (stmt);
968   new_body = remap_gimple_seq (body, id);
969 
970   new_block = gimple_bind_block (stmt);
971   if (new_block)
972     remap_block (&new_block, id);
973 
974   /* This will remap a lot of the same decls again, but this should be
975      harmless.  */
976   new_vars = gimple_bind_vars (stmt);
977   if (new_vars)
978     new_vars = remap_decls (new_vars, NULL, id);
979 
980   new_bind = gimple_build_bind (new_vars, new_body, new_block);
981 
982   return new_bind;
983 }
984 
985 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
986 
987 static bool
988 is_parm (tree decl)
989 {
990   if (TREE_CODE (decl) == SSA_NAME)
991     {
992       decl = SSA_NAME_VAR (decl);
993       if (!decl)
994 	return false;
995     }
996 
997   return (TREE_CODE (decl) == PARM_DECL);
998 }
999 
1000 /* Remap the dependence CLIQUE from the source to the destination function
1001    as specified in ID.  */
1002 
1003 static unsigned short
1004 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1005 {
1006   if (clique == 0 || processing_debug_stmt)
1007     return 0;
1008   if (!id->dependence_map)
1009     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1010   bool existed;
1011   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1012   if (!existed)
1013     {
1014       /* Clique 1 is reserved for local ones set by PTA.  */
1015       if (cfun->last_clique == 0)
1016 	cfun->last_clique = 1;
1017       newc = ++cfun->last_clique;
1018     }
1019   return newc;
1020 }
1021 
1022 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
1023    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
1024    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1025    recursing into the children nodes of *TP.  */
1026 
1027 static tree
1028 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1029 {
1030   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1031   copy_body_data *id = (copy_body_data *) wi_p->info;
1032   tree fn = id->src_fn;
1033 
1034   /* For recursive invocations this is no longer the LHS itself.  */
1035   bool is_lhs = wi_p->is_lhs;
1036   wi_p->is_lhs = false;
1037 
1038   if (TREE_CODE (*tp) == SSA_NAME)
1039     {
1040       *tp = remap_ssa_name (*tp, id);
1041       *walk_subtrees = 0;
1042       if (is_lhs)
1043 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1044       return NULL;
1045     }
1046   else if (auto_var_in_fn_p (*tp, fn))
1047     {
1048       /* Local variables and labels need to be replaced by equivalent
1049 	 variables.  We don't want to copy static variables; there's
1050 	 only one of those, no matter how many times we inline the
1051 	 containing function.  Similarly for globals from an outer
1052 	 function.  */
1053       tree new_decl;
1054 
1055       /* Remap the declaration.  */
1056       new_decl = remap_decl (*tp, id);
1057       gcc_assert (new_decl);
1058       /* Replace this variable with the copy.  */
1059       STRIP_TYPE_NOPS (new_decl);
1060       /* ???  The C++ frontend uses void * pointer zero to initialize
1061          any other type.  This confuses the middle-end type verification.
1062 	 As cloned bodies do not go through gimplification again the fixup
1063 	 there doesn't trigger.  */
1064       if (TREE_CODE (new_decl) == INTEGER_CST
1065 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1066 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1067       *tp = new_decl;
1068       *walk_subtrees = 0;
1069     }
1070   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1071     gcc_unreachable ();
1072   else if (TREE_CODE (*tp) == SAVE_EXPR)
1073     gcc_unreachable ();
1074   else if (TREE_CODE (*tp) == LABEL_DECL
1075 	   && (!DECL_CONTEXT (*tp)
1076 	       || decl_function_context (*tp) == id->src_fn))
1077     /* These may need to be remapped for EH handling.  */
1078     *tp = remap_decl (*tp, id);
1079   else if (TREE_CODE (*tp) == FIELD_DECL)
1080     {
1081       /* If the enclosing record type is variably_modified_type_p, the field
1082 	 has already been remapped.  Otherwise, it need not be.  */
1083       tree *n = id->decl_map->get (*tp);
1084       if (n)
1085 	*tp = *n;
1086       *walk_subtrees = 0;
1087     }
1088   else if (TYPE_P (*tp))
1089     /* Types may need remapping as well.  */
1090     *tp = remap_type (*tp, id);
1091   else if (CONSTANT_CLASS_P (*tp))
1092     {
1093       /* If this is a constant, we have to copy the node iff the type
1094 	 will be remapped.  copy_tree_r will not copy a constant.  */
1095       tree new_type = remap_type (TREE_TYPE (*tp), id);
1096 
1097       if (new_type == TREE_TYPE (*tp))
1098 	*walk_subtrees = 0;
1099 
1100       else if (TREE_CODE (*tp) == INTEGER_CST)
1101 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1102       else
1103 	{
1104 	  *tp = copy_node (*tp);
1105 	  TREE_TYPE (*tp) = new_type;
1106 	}
1107     }
1108   else
1109     {
1110       /* Otherwise, just copy the node.  Note that copy_tree_r already
1111 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1112 
1113       if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1114 	{
1115 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1116 	     that can happen when a pointer argument is an ADDR_EXPR.
1117 	     Recurse here manually to allow that.  */
1118 	  tree ptr = TREE_OPERAND (*tp, 0);
1119 	  tree type = remap_type (TREE_TYPE (*tp), id);
1120 	  tree old = *tp;
1121 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1122 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1123 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1124 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1125 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1126 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1127 	    {
1128 	      MR_DEPENDENCE_CLIQUE (*tp)
1129 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1130 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1131 	    }
1132 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1133 	     remapped a parameter as the property might be valid only
1134 	     for the parameter itself.  */
1135 	  if (TREE_THIS_NOTRAP (old)
1136 	      && (!is_parm (TREE_OPERAND (old, 0))
1137 		  || (!id->transform_parameter && is_parm (ptr))))
1138 	    TREE_THIS_NOTRAP (*tp) = 1;
1139 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1140 	  *walk_subtrees = 0;
1141 	  return NULL;
1142 	}
1143 
1144       /* Here is the "usual case".  Copy this tree node, and then
1145 	 tweak some special cases.  */
1146       copy_tree_r (tp, walk_subtrees, NULL);
1147 
1148       if (TREE_CODE (*tp) != OMP_CLAUSE)
1149 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1150 
1151       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1152 	{
1153 	  /* The copied TARGET_EXPR has never been expanded, even if the
1154 	     original node was expanded already.  */
1155 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1156 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1157 	}
1158       else if (TREE_CODE (*tp) == ADDR_EXPR)
1159 	{
1160 	  /* Variable substitution need not be simple.  In particular,
1161 	     the MEM_REF substitution above.  Make sure that
1162 	     TREE_CONSTANT and friends are up-to-date.  */
1163 	  int invariant = is_gimple_min_invariant (*tp);
1164 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1165 	  recompute_tree_invariant_for_addr_expr (*tp);
1166 
1167 	  /* If this used to be invariant, but is not any longer,
1168 	     then regimplification is probably needed.  */
1169 	  if (invariant && !is_gimple_min_invariant (*tp))
1170 	    id->regimplify = true;
1171 
1172 	  *walk_subtrees = 0;
1173 	}
1174     }
1175 
1176   /* Update the TREE_BLOCK for the cloned expr.  */
1177   if (EXPR_P (*tp))
1178     {
1179       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1180       tree old_block = TREE_BLOCK (*tp);
1181       if (old_block)
1182 	{
1183 	  tree *n;
1184 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1185 	  if (n)
1186 	    new_block = *n;
1187 	}
1188       TREE_SET_BLOCK (*tp, new_block);
1189     }
1190 
1191   /* Keep iterating.  */
1192   return NULL_TREE;
1193 }
1194 
1195 
1196 /* Called from copy_body_id via walk_tree.  DATA is really a
1197    `copy_body_data *'.  */
1198 
1199 tree
1200 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1201 {
1202   copy_body_data *id = (copy_body_data *) data;
1203   tree fn = id->src_fn;
1204   tree new_block;
1205 
1206   /* Begin by recognizing trees that we'll completely rewrite for the
1207      inlining context.  Our output for these trees is completely
1208      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1209      into an edge).  Further down, we'll handle trees that get
1210      duplicated and/or tweaked.  */
1211 
1212   /* When requested, RETURN_EXPRs should be transformed to just the
1213      contained MODIFY_EXPR.  The branch semantics of the return will
1214      be handled elsewhere by manipulating the CFG rather than a statement.  */
1215   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1216     {
1217       tree assignment = TREE_OPERAND (*tp, 0);
1218 
1219       /* If we're returning something, just turn that into an
1220 	 assignment into the equivalent of the original RESULT_DECL.
1221 	 If the "assignment" is just the result decl, the result
1222 	 decl has already been set (e.g. a recent "foo (&result_decl,
1223 	 ...)"); just toss the entire RETURN_EXPR.  */
1224       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1225 	{
1226 	  /* Replace the RETURN_EXPR with (a copy of) the
1227 	     MODIFY_EXPR hanging underneath.  */
1228 	  *tp = copy_node (assignment);
1229 	}
1230       else /* Else the RETURN_EXPR returns no value.  */
1231 	{
1232 	  *tp = NULL;
1233 	  return (tree) (void *)1;
1234 	}
1235     }
1236   else if (TREE_CODE (*tp) == SSA_NAME)
1237     {
1238       *tp = remap_ssa_name (*tp, id);
1239       *walk_subtrees = 0;
1240       return NULL;
1241     }
1242 
1243   /* Local variables and labels need to be replaced by equivalent
1244      variables.  We don't want to copy static variables; there's only
1245      one of those, no matter how many times we inline the containing
1246      function.  Similarly for globals from an outer function.  */
1247   else if (auto_var_in_fn_p (*tp, fn))
1248     {
1249       tree new_decl;
1250 
1251       /* Remap the declaration.  */
1252       new_decl = remap_decl (*tp, id);
1253       gcc_assert (new_decl);
1254       /* Replace this variable with the copy.  */
1255       STRIP_TYPE_NOPS (new_decl);
1256       *tp = new_decl;
1257       *walk_subtrees = 0;
1258     }
1259   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1260     copy_statement_list (tp);
1261   else if (TREE_CODE (*tp) == SAVE_EXPR
1262 	   || TREE_CODE (*tp) == TARGET_EXPR)
1263     remap_save_expr (tp, id->decl_map, walk_subtrees);
1264   else if (TREE_CODE (*tp) == LABEL_DECL
1265 	   && (! DECL_CONTEXT (*tp)
1266 	       || decl_function_context (*tp) == id->src_fn))
1267     /* These may need to be remapped for EH handling.  */
1268     *tp = remap_decl (*tp, id);
1269   else if (TREE_CODE (*tp) == BIND_EXPR)
1270     copy_bind_expr (tp, walk_subtrees, id);
1271   /* Types may need remapping as well.  */
1272   else if (TYPE_P (*tp))
1273     *tp = remap_type (*tp, id);
1274 
1275   /* If this is a constant, we have to copy the node iff the type will be
1276      remapped.  copy_tree_r will not copy a constant.  */
1277   else if (CONSTANT_CLASS_P (*tp))
1278     {
1279       tree new_type = remap_type (TREE_TYPE (*tp), id);
1280 
1281       if (new_type == TREE_TYPE (*tp))
1282 	*walk_subtrees = 0;
1283 
1284       else if (TREE_CODE (*tp) == INTEGER_CST)
1285 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1286       else
1287 	{
1288 	  *tp = copy_node (*tp);
1289 	  TREE_TYPE (*tp) = new_type;
1290 	}
1291     }
1292 
1293   /* Otherwise, just copy the node.  Note that copy_tree_r already
1294      knows not to copy VAR_DECLs, etc., so this is safe.  */
1295   else
1296     {
1297       /* Here we handle trees that are not completely rewritten.
1298 	 First we detect some inlining-induced bogosities for
1299 	 discarding.  */
1300       if (TREE_CODE (*tp) == MODIFY_EXPR
1301 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1302 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1303 	{
1304 	  /* Some assignments VAR = VAR; don't generate any rtl code
1305 	     and thus don't count as variable modification.  Avoid
1306 	     keeping bogosities like 0 = 0.  */
1307 	  tree decl = TREE_OPERAND (*tp, 0), value;
1308 	  tree *n;
1309 
1310 	  n = id->decl_map->get (decl);
1311 	  if (n)
1312 	    {
1313 	      value = *n;
1314 	      STRIP_TYPE_NOPS (value);
1315 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1316 		{
1317 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1318 		  return copy_tree_body_r (tp, walk_subtrees, data);
1319 		}
1320 	    }
1321 	}
1322       else if (TREE_CODE (*tp) == INDIRECT_REF)
1323 	{
1324 	  /* Get rid of *& from inline substitutions that can happen when a
1325 	     pointer argument is an ADDR_EXPR.  */
1326 	  tree decl = TREE_OPERAND (*tp, 0);
1327 	  tree *n = id->decl_map->get (decl);
1328 	  if (n)
1329 	    {
1330 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1331 	         it manually here as we'll eventually get ADDR_EXPRs
1332 		 which lie about their types pointed to.  In this case
1333 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1334 		 but we absolutely rely on that.  As fold_indirect_ref
1335 	         does other useful transformations, try that first, though.  */
1336 	      tree type = TREE_TYPE (*tp);
1337 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1338 	      tree old = *tp;
1339 	      *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1340 	      if (! *tp)
1341 	        {
1342 		  type = remap_type (type, id);
1343 		  if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1344 		    {
1345 		      *tp
1346 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1347 		      /* ???  We should either assert here or build
1348 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1349 			 incompatible types to our IL.  */
1350 		      if (! *tp)
1351 			*tp = TREE_OPERAND (ptr, 0);
1352 		    }
1353 	          else
1354 		    {
1355 	              *tp = build1 (INDIRECT_REF, type, ptr);
1356 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1357 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1358 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1359 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1360 			 have remapped a parameter as the property might be
1361 			 valid only for the parameter itself.  */
1362 		      if (TREE_THIS_NOTRAP (old)
1363 			  && (!is_parm (TREE_OPERAND (old, 0))
1364 			      || (!id->transform_parameter && is_parm (ptr))))
1365 		        TREE_THIS_NOTRAP (*tp) = 1;
1366 		    }
1367 		}
1368 	      *walk_subtrees = 0;
1369 	      return NULL;
1370 	    }
1371 	}
1372       else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1373 	{
1374 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1375 	     that can happen when a pointer argument is an ADDR_EXPR.
1376 	     Recurse here manually to allow that.  */
1377 	  tree ptr = TREE_OPERAND (*tp, 0);
1378 	  tree type = remap_type (TREE_TYPE (*tp), id);
1379 	  tree old = *tp;
1380 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1381 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1382 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1383 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1384 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1385 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1386 	    {
1387 	      MR_DEPENDENCE_CLIQUE (*tp)
1388 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1389 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1390 	    }
1391 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1392 	     remapped a parameter as the property might be valid only
1393 	     for the parameter itself.  */
1394 	  if (TREE_THIS_NOTRAP (old)
1395 	      && (!is_parm (TREE_OPERAND (old, 0))
1396 		  || (!id->transform_parameter && is_parm (ptr))))
1397 	    TREE_THIS_NOTRAP (*tp) = 1;
1398 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1399 	  *walk_subtrees = 0;
1400 	  return NULL;
1401 	}
1402 
1403       /* Here is the "usual case".  Copy this tree node, and then
1404 	 tweak some special cases.  */
1405       copy_tree_r (tp, walk_subtrees, NULL);
1406 
1407       /* If EXPR has block defined, map it to newly constructed block.
1408          When inlining we want EXPRs without block appear in the block
1409 	 of function call if we are not remapping a type.  */
1410       if (EXPR_P (*tp))
1411 	{
1412 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1413 	  if (TREE_BLOCK (*tp))
1414 	    {
1415 	      tree *n;
1416 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1417 	      if (n)
1418 		new_block = *n;
1419 	    }
1420 	  TREE_SET_BLOCK (*tp, new_block);
1421 	}
1422 
1423       if (TREE_CODE (*tp) != OMP_CLAUSE)
1424 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1425 
1426       /* The copied TARGET_EXPR has never been expanded, even if the
1427 	 original node was expanded already.  */
1428       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1429 	{
1430 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1431 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1432 	}
1433 
1434       /* Variable substitution need not be simple.  In particular, the
1435 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1436 	 and friends are up-to-date.  */
1437       else if (TREE_CODE (*tp) == ADDR_EXPR)
1438 	{
1439 	  int invariant = is_gimple_min_invariant (*tp);
1440 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1441 
1442 	  /* Handle the case where we substituted an INDIRECT_REF
1443 	     into the operand of the ADDR_EXPR.  */
1444 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1445 	      && !id->do_not_fold)
1446 	    {
1447 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1448 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1449 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1450 	      *tp = t;
1451 	    }
1452 	  else
1453 	    recompute_tree_invariant_for_addr_expr (*tp);
1454 
1455 	  /* If this used to be invariant, but is not any longer,
1456 	     then regimplification is probably needed.  */
1457 	  if (invariant && !is_gimple_min_invariant (*tp))
1458 	    id->regimplify = true;
1459 
1460 	  *walk_subtrees = 0;
1461 	}
1462     }
1463 
1464   /* Keep iterating.  */
1465   return NULL_TREE;
1466 }
1467 
1468 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1469    source function, map that to the duplicate EH region number in
1470    the destination function.  */
1471 
1472 static int
1473 remap_eh_region_nr (int old_nr, copy_body_data *id)
1474 {
1475   eh_region old_r, new_r;
1476 
1477   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1478   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1479 
1480   return new_r->index;
1481 }
1482 
1483 /* Similar, but operate on INTEGER_CSTs.  */
1484 
1485 static tree
1486 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1487 {
1488   int old_nr, new_nr;
1489 
1490   old_nr = tree_to_shwi (old_t_nr);
1491   new_nr = remap_eh_region_nr (old_nr, id);
1492 
1493   return build_int_cst (integer_type_node, new_nr);
1494 }
1495 
1496 /* Helper for copy_bb.  Remap statement STMT using the inlining
1497    information in ID.  Return the new statement copy.  */
1498 
1499 static gimple_seq
1500 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1501 {
1502   gimple *copy = NULL;
1503   struct walk_stmt_info wi;
1504   bool skip_first = false;
1505   gimple_seq stmts = NULL;
1506 
1507   if (is_gimple_debug (stmt)
1508       && (gimple_debug_nonbind_marker_p (stmt)
1509 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1510 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1511     return NULL;
1512 
1513   /* Begin by recognizing trees that we'll completely rewrite for the
1514      inlining context.  Our output for these trees is completely
1515      different from our input (e.g. RETURN_EXPR is deleted and morphs
1516      into an edge).  Further down, we'll handle trees that get
1517      duplicated and/or tweaked.  */
1518 
1519   /* When requested, GIMPLE_RETURN should be transformed to just the
1520      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1521      be handled elsewhere by manipulating the CFG rather than the
1522      statement.  */
1523   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1524     {
1525       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1526 
1527       /* If we're returning something, just turn that into an
1528 	 assignment to the equivalent of the original RESULT_DECL.
1529 	 If RETVAL is just the result decl, the result decl has
1530 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1531 	 just toss the entire GIMPLE_RETURN.  */
1532       if (retval
1533 	  && (TREE_CODE (retval) != RESULT_DECL
1534 	      && (TREE_CODE (retval) != SSA_NAME
1535 		  || ! SSA_NAME_VAR (retval)
1536 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1537         {
1538 	  copy = gimple_build_assign (id->do_not_unshare
1539 				      ? id->retvar : unshare_expr (id->retvar),
1540 				      retval);
1541 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1542 	  skip_first = true;
1543 	}
1544       else
1545 	return NULL;
1546     }
1547   else if (gimple_has_substatements (stmt))
1548     {
1549       gimple_seq s1, s2;
1550 
1551       /* When cloning bodies from the C++ front end, we will be handed bodies
1552 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1553 	 have embedded statements.  */
1554       switch (gimple_code (stmt))
1555 	{
1556 	case GIMPLE_BIND:
1557 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1558 	  break;
1559 
1560 	case GIMPLE_CATCH:
1561 	  {
1562 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1563 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1564 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1565 	  }
1566 	  break;
1567 
1568 	case GIMPLE_EH_FILTER:
1569 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1570 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1571 	  break;
1572 
1573 	case GIMPLE_TRY:
1574 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1575 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1576 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1577 	  break;
1578 
1579 	case GIMPLE_WITH_CLEANUP_EXPR:
1580 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1581 	  copy = gimple_build_wce (s1);
1582 	  break;
1583 
1584 	case GIMPLE_OMP_PARALLEL:
1585 	  {
1586 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1587 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1588 	    copy = gimple_build_omp_parallel
1589 	             (s1,
1590 		      gimple_omp_parallel_clauses (omp_par_stmt),
1591 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1592 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1593 	  }
1594 	  break;
1595 
1596 	case GIMPLE_OMP_TASK:
1597 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1598 	  copy = gimple_build_omp_task
1599 	           (s1,
1600 		    gimple_omp_task_clauses (stmt),
1601 		    gimple_omp_task_child_fn (stmt),
1602 		    gimple_omp_task_data_arg (stmt),
1603 		    gimple_omp_task_copy_fn (stmt),
1604 		    gimple_omp_task_arg_size (stmt),
1605 		    gimple_omp_task_arg_align (stmt));
1606 	  break;
1607 
1608 	case GIMPLE_OMP_FOR:
1609 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1610 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1611 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1612 				       gimple_omp_for_clauses (stmt),
1613 				       gimple_omp_for_collapse (stmt), s2);
1614 	  {
1615 	    size_t i;
1616 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1617 	      {
1618 		gimple_omp_for_set_index (copy, i,
1619 					  gimple_omp_for_index (stmt, i));
1620 		gimple_omp_for_set_initial (copy, i,
1621 					    gimple_omp_for_initial (stmt, i));
1622 		gimple_omp_for_set_final (copy, i,
1623 					  gimple_omp_for_final (stmt, i));
1624 		gimple_omp_for_set_incr (copy, i,
1625 					 gimple_omp_for_incr (stmt, i));
1626 		gimple_omp_for_set_cond (copy, i,
1627 					 gimple_omp_for_cond (stmt, i));
1628 	      }
1629 	  }
1630 	  break;
1631 
1632 	case GIMPLE_OMP_MASTER:
1633 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1634 	  copy = gimple_build_omp_master (s1);
1635 	  break;
1636 
1637 	case GIMPLE_OMP_TASKGROUP:
1638 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1639 	  copy = gimple_build_omp_taskgroup
1640 		   (s1, gimple_omp_taskgroup_clauses (stmt));
1641 	  break;
1642 
1643 	case GIMPLE_OMP_ORDERED:
1644 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1645 	  copy = gimple_build_omp_ordered
1646 		   (s1,
1647 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1648 	  break;
1649 
1650 	case GIMPLE_OMP_SECTION:
1651 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1652 	  copy = gimple_build_omp_section (s1);
1653 	  break;
1654 
1655 	case GIMPLE_OMP_SECTIONS:
1656 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1657 	  copy = gimple_build_omp_sections
1658 	           (s1, gimple_omp_sections_clauses (stmt));
1659 	  break;
1660 
1661 	case GIMPLE_OMP_SINGLE:
1662 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1663 	  copy = gimple_build_omp_single
1664 	           (s1, gimple_omp_single_clauses (stmt));
1665 	  break;
1666 
1667 	case GIMPLE_OMP_TARGET:
1668 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1669 	  copy = gimple_build_omp_target
1670 		   (s1, gimple_omp_target_kind (stmt),
1671 		    gimple_omp_target_clauses (stmt));
1672 	  break;
1673 
1674 	case GIMPLE_OMP_TEAMS:
1675 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1676 	  copy = gimple_build_omp_teams
1677 		   (s1, gimple_omp_teams_clauses (stmt));
1678 	  break;
1679 
1680 	case GIMPLE_OMP_CRITICAL:
1681 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1682 	  copy = gimple_build_omp_critical (s1,
1683 					    gimple_omp_critical_name
1684 					      (as_a <gomp_critical *> (stmt)),
1685 					    gimple_omp_critical_clauses
1686 					      (as_a <gomp_critical *> (stmt)));
1687 	  break;
1688 
1689 	case GIMPLE_TRANSACTION:
1690 	  {
1691 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1692 	    gtransaction *new_trans_stmt;
1693 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1694 				   id);
1695 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1696 	    gimple_transaction_set_subcode (new_trans_stmt,
1697 	      gimple_transaction_subcode (old_trans_stmt));
1698 	    gimple_transaction_set_label_norm (new_trans_stmt,
1699 	      gimple_transaction_label_norm (old_trans_stmt));
1700 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1701 	      gimple_transaction_label_uninst (old_trans_stmt));
1702 	    gimple_transaction_set_label_over (new_trans_stmt,
1703 	      gimple_transaction_label_over (old_trans_stmt));
1704 	  }
1705 	  break;
1706 
1707 	default:
1708 	  gcc_unreachable ();
1709 	}
1710     }
1711   else
1712     {
1713       if (gimple_assign_copy_p (stmt)
1714 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1715 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1716 	{
1717 	  /* Here we handle statements that are not completely rewritten.
1718 	     First we detect some inlining-induced bogosities for
1719 	     discarding.  */
1720 
1721 	  /* Some assignments VAR = VAR; don't generate any rtl code
1722 	     and thus don't count as variable modification.  Avoid
1723 	     keeping bogosities like 0 = 0.  */
1724 	  tree decl = gimple_assign_lhs (stmt), value;
1725 	  tree *n;
1726 
1727 	  n = id->decl_map->get (decl);
1728 	  if (n)
1729 	    {
1730 	      value = *n;
1731 	      STRIP_TYPE_NOPS (value);
1732 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1733 		return NULL;
1734 	    }
1735 	}
1736 
1737       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1738 	 in a block that we aren't copying during tree_function_versioning,
1739 	 just drop the clobber stmt.  */
1740       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1741 	{
1742 	  tree lhs = gimple_assign_lhs (stmt);
1743 	  if (TREE_CODE (lhs) == MEM_REF
1744 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1745 	    {
1746 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1747 	      if (gimple_bb (def_stmt)
1748 		  && !bitmap_bit_p (id->blocks_to_copy,
1749 				    gimple_bb (def_stmt)->index))
1750 		return NULL;
1751 	    }
1752 	}
1753 
1754       if (gimple_debug_bind_p (stmt))
1755 	{
1756 	  gdebug *copy
1757 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1758 				       gimple_debug_bind_get_value (stmt),
1759 				       stmt);
1760 	  if (id->reset_location)
1761 	    gimple_set_location (copy, input_location);
1762 	  id->debug_stmts.safe_push (copy);
1763 	  gimple_seq_add_stmt (&stmts, copy);
1764 	  return stmts;
1765 	}
1766       if (gimple_debug_source_bind_p (stmt))
1767 	{
1768 	  gdebug *copy = gimple_build_debug_source_bind
1769 	                   (gimple_debug_source_bind_get_var (stmt),
1770 			    gimple_debug_source_bind_get_value (stmt),
1771 			    stmt);
1772 	  if (id->reset_location)
1773 	    gimple_set_location (copy, input_location);
1774 	  id->debug_stmts.safe_push (copy);
1775 	  gimple_seq_add_stmt (&stmts, copy);
1776 	  return stmts;
1777 	}
1778       if (gimple_debug_nonbind_marker_p (stmt))
1779 	{
1780 	  /* If the inlined function has too many debug markers,
1781 	     don't copy them.  */
1782 	  if (id->src_cfun->debug_marker_count
1783 	      > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1784 	    return stmts;
1785 
1786 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1787 	  if (id->reset_location)
1788 	    gimple_set_location (copy, input_location);
1789 	  id->debug_stmts.safe_push (copy);
1790 	  gimple_seq_add_stmt (&stmts, copy);
1791 	  return stmts;
1792 	}
1793 
1794       /* Create a new deep copy of the statement.  */
1795       copy = gimple_copy (stmt);
1796 
1797       /* Clear flags that need revisiting.  */
1798       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1799         {
1800 	  if (gimple_call_tail_p (call_stmt))
1801 	    gimple_call_set_tail (call_stmt, false);
1802 	  if (gimple_call_from_thunk_p (call_stmt))
1803 	    gimple_call_set_from_thunk (call_stmt, false);
1804 	  if (gimple_call_internal_p (call_stmt))
1805 	    switch (gimple_call_internal_fn (call_stmt))
1806 	      {
1807 	      case IFN_GOMP_SIMD_LANE:
1808 	      case IFN_GOMP_SIMD_VF:
1809 	      case IFN_GOMP_SIMD_LAST_LANE:
1810 	      case IFN_GOMP_SIMD_ORDERED_START:
1811 	      case IFN_GOMP_SIMD_ORDERED_END:
1812 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1813 	        break;
1814 	      default:
1815 		break;
1816 	      }
1817 	}
1818 
1819       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1820 	 RESX and EH_DISPATCH.  */
1821       if (id->eh_map)
1822 	switch (gimple_code (copy))
1823 	  {
1824 	  case GIMPLE_CALL:
1825 	    {
1826 	      tree r, fndecl = gimple_call_fndecl (copy);
1827 	      if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1828 		switch (DECL_FUNCTION_CODE (fndecl))
1829 		  {
1830 		  case BUILT_IN_EH_COPY_VALUES:
1831 		    r = gimple_call_arg (copy, 1);
1832 		    r = remap_eh_region_tree_nr (r, id);
1833 		    gimple_call_set_arg (copy, 1, r);
1834 		    /* FALLTHRU */
1835 
1836 		  case BUILT_IN_EH_POINTER:
1837 		  case BUILT_IN_EH_FILTER:
1838 		    r = gimple_call_arg (copy, 0);
1839 		    r = remap_eh_region_tree_nr (r, id);
1840 		    gimple_call_set_arg (copy, 0, r);
1841 		    break;
1842 
1843 		  default:
1844 		    break;
1845 		  }
1846 
1847 	      /* Reset alias info if we didn't apply measures to
1848 		 keep it valid over inlining by setting DECL_PT_UID.  */
1849 	      if (!id->src_cfun->gimple_df
1850 		  || !id->src_cfun->gimple_df->ipa_pta)
1851 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1852 	    }
1853 	    break;
1854 
1855 	  case GIMPLE_RESX:
1856 	    {
1857 	      gresx *resx_stmt = as_a <gresx *> (copy);
1858 	      int r = gimple_resx_region (resx_stmt);
1859 	      r = remap_eh_region_nr (r, id);
1860 	      gimple_resx_set_region (resx_stmt, r);
1861 	    }
1862 	    break;
1863 
1864 	  case GIMPLE_EH_DISPATCH:
1865 	    {
1866 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1867 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1868 	      r = remap_eh_region_nr (r, id);
1869 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1870 	    }
1871 	    break;
1872 
1873 	  default:
1874 	    break;
1875 	  }
1876     }
1877 
1878   /* If STMT has a block defined, map it to the newly constructed block.  */
1879   if (tree block = gimple_block (copy))
1880     {
1881       tree *n;
1882       n = id->decl_map->get (block);
1883       gcc_assert (n);
1884       gimple_set_block (copy, *n);
1885     }
1886 
1887   if (id->reset_location)
1888     gimple_set_location (copy, input_location);
1889 
1890   /* Debug statements ought to be rebuilt and not copied.  */
1891   gcc_checking_assert (!is_gimple_debug (copy));
1892 
1893   /* Remap all the operands in COPY.  */
1894   memset (&wi, 0, sizeof (wi));
1895   wi.info = id;
1896   if (skip_first)
1897     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1898   else
1899     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1900 
1901   /* Clear the copied virtual operands.  We are not remapping them here
1902      but are going to recreate them from scratch.  */
1903   if (gimple_has_mem_ops (copy))
1904     {
1905       gimple_set_vdef (copy, NULL_TREE);
1906       gimple_set_vuse (copy, NULL_TREE);
1907     }
1908 
1909   gimple_seq_add_stmt (&stmts, copy);
1910   return stmts;
1911 }
1912 
1913 
1914 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1915    later  */
1916 
1917 static basic_block
1918 copy_bb (copy_body_data *id, basic_block bb,
1919          profile_count num, profile_count den)
1920 {
1921   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1922   basic_block copy_basic_block;
1923   tree decl;
1924   basic_block prev;
1925 
1926   profile_count::adjust_for_ipa_scaling (&num, &den);
1927 
1928   /* Search for previous copied basic block.  */
1929   prev = bb->prev_bb;
1930   while (!prev->aux)
1931     prev = prev->prev_bb;
1932 
1933   /* create_basic_block() will append every new block to
1934      basic_block_info automatically.  */
1935   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1936   copy_basic_block->count = bb->count.apply_scale (num, den);
1937 
1938   copy_gsi = gsi_start_bb (copy_basic_block);
1939 
1940   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1941     {
1942       gimple_seq stmts;
1943       gimple *stmt = gsi_stmt (gsi);
1944       gimple *orig_stmt = stmt;
1945       gimple_stmt_iterator stmts_gsi;
1946       bool stmt_added = false;
1947 
1948       id->regimplify = false;
1949       stmts = remap_gimple_stmt (stmt, id);
1950 
1951       if (gimple_seq_empty_p (stmts))
1952 	continue;
1953 
1954       seq_gsi = copy_gsi;
1955 
1956       for (stmts_gsi = gsi_start (stmts);
1957 	   !gsi_end_p (stmts_gsi); )
1958 	{
1959 	  stmt = gsi_stmt (stmts_gsi);
1960 
1961 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1962 	  gsi_next (&stmts_gsi);
1963 
1964 	  if (gimple_nop_p (stmt))
1965 	      continue;
1966 
1967 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1968 					    orig_stmt);
1969 
1970 	  /* With return slot optimization we can end up with
1971 	     non-gimple (foo *)&this->m, fix that here.  */
1972 	  if (is_gimple_assign (stmt)
1973 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1974 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1975 	    {
1976 	      tree new_rhs;
1977 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1978 						  gimple_assign_rhs1 (stmt),
1979 						  true, NULL, false,
1980 						  GSI_CONTINUE_LINKING);
1981 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1982 	      id->regimplify = false;
1983 	    }
1984 
1985 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1986 
1987 	  if (id->regimplify)
1988 	    gimple_regimplify_operands (stmt, &seq_gsi);
1989 
1990 	  stmt_added = true;
1991 	}
1992 
1993       if (!stmt_added)
1994 	continue;
1995 
1996       /* If copy_basic_block has been empty at the start of this iteration,
1997 	 call gsi_start_bb again to get at the newly added statements.  */
1998       if (gsi_end_p (copy_gsi))
1999 	copy_gsi = gsi_start_bb (copy_basic_block);
2000       else
2001 	gsi_next (&copy_gsi);
2002 
2003       /* Process the new statement.  The call to gimple_regimplify_operands
2004 	 possibly turned the statement into multiple statements, we
2005 	 need to process all of them.  */
2006       do
2007 	{
2008 	  tree fn;
2009 	  gcall *call_stmt;
2010 
2011 	  stmt = gsi_stmt (copy_gsi);
2012 	  call_stmt = dyn_cast <gcall *> (stmt);
2013 	  if (call_stmt
2014 	      && gimple_call_va_arg_pack_p (call_stmt)
2015 	      && id->call_stmt
2016 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
2017 	    {
2018 	      /* __builtin_va_arg_pack () should be replaced by
2019 		 all arguments corresponding to ... in the caller.  */
2020 	      tree p;
2021 	      gcall *new_call;
2022 	      vec<tree> argarray;
2023 	      size_t nargs = gimple_call_num_args (id->call_stmt);
2024 	      size_t n;
2025 
2026 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2027 		nargs--;
2028 
2029 	      /* Create the new array of arguments.  */
2030 	      n = nargs + gimple_call_num_args (call_stmt);
2031 	      argarray.create (n);
2032 	      argarray.safe_grow_cleared (n);
2033 
2034 	      /* Copy all the arguments before '...'  */
2035 	      memcpy (argarray.address (),
2036 		      gimple_call_arg_ptr (call_stmt, 0),
2037 		      gimple_call_num_args (call_stmt) * sizeof (tree));
2038 
2039 	      /* Append the arguments passed in '...'  */
2040 	      memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2041 		      gimple_call_arg_ptr (id->call_stmt, 0)
2042 		      + (gimple_call_num_args (id->call_stmt) - nargs),
2043 		      nargs * sizeof (tree));
2044 
2045 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2046 						argarray);
2047 
2048 	      argarray.release ();
2049 
2050 	      /* Copy all GIMPLE_CALL flags, location and block, except
2051 		 GF_CALL_VA_ARG_PACK.  */
2052 	      gimple_call_copy_flags (new_call, call_stmt);
2053 	      gimple_call_set_va_arg_pack (new_call, false);
2054 	      /* location includes block.  */
2055 	      gimple_set_location (new_call, gimple_location (stmt));
2056 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2057 
2058 	      gsi_replace (&copy_gsi, new_call, false);
2059 	      stmt = new_call;
2060 	    }
2061 	  else if (call_stmt
2062 		   && id->call_stmt
2063 		   && (decl = gimple_call_fndecl (stmt))
2064 		   && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2065 	    {
2066 	      /* __builtin_va_arg_pack_len () should be replaced by
2067 		 the number of anonymous arguments.  */
2068 	      size_t nargs = gimple_call_num_args (id->call_stmt);
2069 	      tree count, p;
2070 	      gimple *new_stmt;
2071 
2072 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2073 		nargs--;
2074 
2075 	      if (!gimple_call_lhs (stmt))
2076 		{
2077 		  /* Drop unused calls.  */
2078 		  gsi_remove (&copy_gsi, false);
2079 		  continue;
2080 		}
2081 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2082 		{
2083 		  count = build_int_cst (integer_type_node, nargs);
2084 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2085 		  gsi_replace (&copy_gsi, new_stmt, false);
2086 		  stmt = new_stmt;
2087 		}
2088 	      else if (nargs != 0)
2089 		{
2090 		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2091 		  count = build_int_cst (integer_type_node, nargs);
2092 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2093 						  PLUS_EXPR, newlhs, count);
2094 		  gimple_call_set_lhs (stmt, newlhs);
2095 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2096 		}
2097 	    }
2098 	  else if (call_stmt
2099 		   && id->call_stmt
2100 		   && gimple_call_internal_p (stmt)
2101 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2102 	    {
2103 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
2104 	      gsi_remove (&copy_gsi, false);
2105 	      continue;
2106 	    }
2107 
2108 	  /* Statements produced by inlining can be unfolded, especially
2109 	     when we constant propagated some operands.  We can't fold
2110 	     them right now for two reasons:
2111 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2112 	     2) we can't change function calls to builtins.
2113 	     So we just mark statement for later folding.  We mark
2114 	     all new statements, instead just statements that has changed
2115 	     by some nontrivial substitution so even statements made
2116 	     foldable indirectly are updated.  If this turns out to be
2117 	     expensive, copy_body can be told to watch for nontrivial
2118 	     changes.  */
2119 	  if (id->statements_to_fold)
2120 	    id->statements_to_fold->add (stmt);
2121 
2122 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2123 	     callgraph edges and update or duplicate them.  */
2124 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2125 	    {
2126 	      struct cgraph_edge *edge;
2127 
2128 	      switch (id->transform_call_graph_edges)
2129 		{
2130 		case CB_CGE_DUPLICATE:
2131 		  edge = id->src_node->get_edge (orig_stmt);
2132 		  if (edge)
2133 		    {
2134 		      struct cgraph_edge *old_edge = edge;
2135 		      profile_count old_cnt = edge->count;
2136 		      edge = edge->clone (id->dst_node, call_stmt,
2137 					  gimple_uid (stmt),
2138 					  num, den,
2139 					  true);
2140 
2141 		      /* Speculative calls consist of two edges - direct and
2142 			 indirect.  Duplicate the whole thing and distribute
2143 			 frequencies accordingly.  */
2144 		      if (edge->speculative)
2145 			{
2146 			  struct cgraph_edge *direct, *indirect;
2147 			  struct ipa_ref *ref;
2148 
2149 			  gcc_assert (!edge->indirect_unknown_callee);
2150 			  old_edge->speculative_call_info (direct, indirect, ref);
2151 
2152 			  profile_count indir_cnt = indirect->count;
2153 			  indirect = indirect->clone (id->dst_node, call_stmt,
2154 						      gimple_uid (stmt),
2155 						      num, den,
2156 						      true);
2157 
2158 			  profile_probability prob
2159 			     = indir_cnt.probability_in (old_cnt + indir_cnt);
2160 			  indirect->count
2161 			     = copy_basic_block->count.apply_probability (prob);
2162 			  edge->count = copy_basic_block->count - indirect->count;
2163 			  id->dst_node->clone_reference (ref, stmt);
2164 			}
2165 		      else
2166 			edge->count = copy_basic_block->count;
2167 		    }
2168 		  break;
2169 
2170 		case CB_CGE_MOVE_CLONES:
2171 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2172 								call_stmt);
2173 		  edge = id->dst_node->get_edge (stmt);
2174 		  break;
2175 
2176 		case CB_CGE_MOVE:
2177 		  edge = id->dst_node->get_edge (orig_stmt);
2178 		  if (edge)
2179 		    edge->set_call_stmt (call_stmt);
2180 		  break;
2181 
2182 		default:
2183 		  gcc_unreachable ();
2184 		}
2185 
2186 	      /* Constant propagation on argument done during inlining
2187 		 may create new direct call.  Produce an edge for it.  */
2188 	      if ((!edge
2189 		   || (edge->indirect_inlining_edge
2190 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2191 		  && id->dst_node->definition
2192 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2193 		{
2194 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2195 
2196 		  /* We have missing edge in the callgraph.  This can happen
2197 		     when previous inlining turned an indirect call into a
2198 		     direct call by constant propagating arguments or we are
2199 		     producing dead clone (for further cloning).  In all
2200 		     other cases we hit a bug (incorrect node sharing is the
2201 		     most common reason for missing edges).  */
2202 		  gcc_assert (!dest->definition
2203 			      || dest->address_taken
2204 		  	      || !id->src_node->definition
2205 			      || !id->dst_node->definition);
2206 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2207 		    id->dst_node->create_edge_including_clones
2208 		      (dest, orig_stmt, call_stmt, bb->count,
2209 		       CIF_ORIGINALLY_INDIRECT_CALL);
2210 		  else
2211 		    id->dst_node->create_edge (dest, call_stmt,
2212 					bb->count)->inline_failed
2213 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2214 		  if (dump_file)
2215 		    {
2216 		      fprintf (dump_file, "Created new direct edge to %s\n",
2217 			       dest->name ());
2218 		    }
2219 		}
2220 
2221 	      notice_special_calls (as_a <gcall *> (stmt));
2222 	    }
2223 
2224 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2225 				      id->eh_map, id->eh_lp_nr);
2226 
2227 	  gsi_next (&copy_gsi);
2228 	}
2229       while (!gsi_end_p (copy_gsi));
2230 
2231       copy_gsi = gsi_last_bb (copy_basic_block);
2232     }
2233 
2234   return copy_basic_block;
2235 }
2236 
2237 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2238    form is quite easy, since dominator relationship for old basic blocks does
2239    not change.
2240 
2241    There is however exception where inlining might change dominator relation
2242    across EH edges from basic block within inlined functions destinating
2243    to landing pads in function we inline into.
2244 
2245    The function fills in PHI_RESULTs of such PHI nodes if they refer
2246    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2247    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2248    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2249    set, and this means that there will be no overlapping live ranges
2250    for the underlying symbol.
2251 
2252    This might change in future if we allow redirecting of EH edges and
2253    we might want to change way build CFG pre-inlining to include
2254    all the possible edges then.  */
2255 static void
2256 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2257 				  bool can_throw, bool nonlocal_goto)
2258 {
2259   edge e;
2260   edge_iterator ei;
2261 
2262   FOR_EACH_EDGE (e, ei, bb->succs)
2263     if (!e->dest->aux
2264 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2265       {
2266 	gphi *phi;
2267 	gphi_iterator si;
2268 
2269 	if (!nonlocal_goto)
2270 	  gcc_assert (e->flags & EDGE_EH);
2271 
2272 	if (!can_throw)
2273 	  gcc_assert (!(e->flags & EDGE_EH));
2274 
2275 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2276 	  {
2277 	    edge re;
2278 
2279 	    phi = si.phi ();
2280 
2281 	    /* For abnormal goto/call edges the receiver can be the
2282 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2283 
2284 	    gcc_assert ((e->flags & EDGE_EH)
2285 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2286 
2287 	    re = find_edge (ret_bb, e->dest);
2288 	    gcc_checking_assert (re);
2289 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2290 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2291 
2292 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2293 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2294 	  }
2295       }
2296 }
2297 
2298 /* Insert clobbers for automatic variables of inlined ID->src_fn
2299    function at the start of basic block BB.  */
2300 
2301 static void
2302 add_clobbers_to_eh_landing_pad (basic_block bb, copy_body_data *id)
2303 {
2304   tree var;
2305   unsigned int i;
2306   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2307     if (VAR_P (var)
2308 	&& !DECL_HARD_REGISTER (var)
2309 	&& !TREE_THIS_VOLATILE (var)
2310 	&& !DECL_HAS_VALUE_EXPR_P (var)
2311 	&& !is_gimple_reg (var)
2312 	&& auto_var_in_fn_p (var, id->src_fn)
2313 	&& !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2314       {
2315 	tree *t = id->decl_map->get (var);
2316 	if (!t)
2317 	  continue;
2318 	tree new_var = *t;
2319 	if (VAR_P (new_var)
2320 	    && !DECL_HARD_REGISTER (new_var)
2321 	    && !TREE_THIS_VOLATILE (new_var)
2322 	    && !DECL_HAS_VALUE_EXPR_P (new_var)
2323 	    && !is_gimple_reg (new_var)
2324 	    && auto_var_in_fn_p (new_var, id->dst_fn))
2325 	  {
2326 	    gimple_stmt_iterator gsi = gsi_after_labels (bb);
2327 	    tree clobber = build_clobber (TREE_TYPE (new_var));
2328 	    gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2329 	    gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2330 	  }
2331       }
2332 }
2333 
2334 /* Copy edges from BB into its copy constructed earlier, scale profile
2335    accordingly.  Edges will be taken care of later.  Assume aux
2336    pointers to point to the copies of each BB.  Return true if any
2337    debug stmts are left after a statement that must end the basic block.  */
2338 
2339 static bool
2340 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2341 		   basic_block ret_bb, basic_block abnormal_goto_dest,
2342 		   copy_body_data *id)
2343 {
2344   basic_block new_bb = (basic_block) bb->aux;
2345   edge_iterator ei;
2346   edge old_edge;
2347   gimple_stmt_iterator si;
2348   bool need_debug_cleanup = false;
2349 
2350   /* Use the indices from the original blocks to create edges for the
2351      new ones.  */
2352   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2353     if (!(old_edge->flags & EDGE_EH))
2354       {
2355 	edge new_edge;
2356 	int flags = old_edge->flags;
2357 	location_t locus = old_edge->goto_locus;
2358 
2359 	/* Return edges do get a FALLTHRU flag when they get inlined.  */
2360 	if (old_edge->dest->index == EXIT_BLOCK
2361 	    && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2362 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2363 	  flags |= EDGE_FALLTHRU;
2364 
2365 	new_edge
2366 	  = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2367 	new_edge->probability = old_edge->probability;
2368 	if (!id->reset_location)
2369 	  new_edge->goto_locus = remap_location (locus, id);
2370       }
2371 
2372   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2373     return false;
2374 
2375   /* When doing function splitting, we must decrease count of the return block
2376      which was previously reachable by block we did not copy.  */
2377   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2378     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2379       if (old_edge->src->index != ENTRY_BLOCK
2380 	  && !old_edge->src->aux)
2381 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2382 
2383   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2384     {
2385       gimple *copy_stmt;
2386       bool can_throw, nonlocal_goto;
2387 
2388       copy_stmt = gsi_stmt (si);
2389       if (!is_gimple_debug (copy_stmt))
2390 	update_stmt (copy_stmt);
2391 
2392       /* Do this before the possible split_block.  */
2393       gsi_next (&si);
2394 
2395       /* If this tree could throw an exception, there are two
2396          cases where we need to add abnormal edge(s): the
2397          tree wasn't in a region and there is a "current
2398          region" in the caller; or the original tree had
2399          EH edges.  In both cases split the block after the tree,
2400          and add abnormal edge(s) as needed; we need both
2401          those from the callee and the caller.
2402          We check whether the copy can throw, because the const
2403          propagation can change an INDIRECT_REF which throws
2404          into a COMPONENT_REF which doesn't.  If the copy
2405          can throw, the original could also throw.  */
2406       can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2407       nonlocal_goto
2408 	= (stmt_can_make_abnormal_goto (copy_stmt)
2409 	   && !computed_goto_p (copy_stmt));
2410 
2411       if (can_throw || nonlocal_goto)
2412 	{
2413 	  if (!gsi_end_p (si))
2414 	    {
2415 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2416 		gsi_next (&si);
2417 	      if (gsi_end_p (si))
2418 		need_debug_cleanup = true;
2419 	    }
2420 	  if (!gsi_end_p (si))
2421 	    /* Note that bb's predecessor edges aren't necessarily
2422 	       right at this point; split_block doesn't care.  */
2423 	    {
2424 	      edge e = split_block (new_bb, copy_stmt);
2425 
2426 	      new_bb = e->dest;
2427 	      new_bb->aux = e->src->aux;
2428 	      si = gsi_start_bb (new_bb);
2429 	    }
2430 	}
2431 
2432       bool update_probs = false;
2433 
2434       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2435 	{
2436 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2437 	  update_probs = true;
2438 	}
2439       else if (can_throw)
2440 	{
2441 	  make_eh_edges (copy_stmt);
2442 	  update_probs = true;
2443 	}
2444 
2445       /* EH edges may not match old edges.  Copy as much as possible.  */
2446       if (update_probs)
2447 	{
2448           edge e;
2449           edge_iterator ei;
2450 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2451 
2452           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2453             if ((old_edge->flags & EDGE_EH)
2454 		&& (e = find_edge (copy_stmt_bb,
2455 				   (basic_block) old_edge->dest->aux))
2456 		&& (e->flags & EDGE_EH))
2457 	      e->probability = old_edge->probability;
2458 
2459           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2460 	    if (e->flags & EDGE_EH)
2461 	      {
2462 		if (!e->probability.initialized_p ())
2463 		  e->probability = profile_probability::never ();
2464 		if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2465 		  {
2466 		    add_clobbers_to_eh_landing_pad (e->dest, id);
2467 		    id->add_clobbers_to_eh_landing_pads = 0;
2468 		  }
2469 	      }
2470         }
2471 
2472 
2473       /* If the call we inline cannot make abnormal goto do not add
2474          additional abnormal edges but only retain those already present
2475 	 in the original function body.  */
2476       if (abnormal_goto_dest == NULL)
2477 	nonlocal_goto = false;
2478       if (nonlocal_goto)
2479 	{
2480 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2481 
2482 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2483 	    nonlocal_goto = false;
2484 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2485 	     in OpenMP regions which aren't allowed to be left abnormally.
2486 	     So, no need to add abnormal edge in that case.  */
2487 	  else if (is_gimple_call (copy_stmt)
2488 		   && gimple_call_internal_p (copy_stmt)
2489 		   && (gimple_call_internal_fn (copy_stmt)
2490 		       == IFN_ABNORMAL_DISPATCHER)
2491 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2492 	    nonlocal_goto = false;
2493 	  else
2494 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2495 				   EDGE_ABNORMAL);
2496 	}
2497 
2498       if ((can_throw || nonlocal_goto)
2499 	  && gimple_in_ssa_p (cfun))
2500 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2501 					  can_throw, nonlocal_goto);
2502     }
2503   return need_debug_cleanup;
2504 }
2505 
2506 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2507    was possibly split and new outgoing EH edges inserted.
2508    BB points to the block of original function and AUX pointers links
2509    the original and newly copied blocks.  */
2510 
2511 static void
2512 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2513 {
2514   basic_block const new_bb = (basic_block) bb->aux;
2515   edge_iterator ei;
2516   gphi *phi;
2517   gphi_iterator si;
2518   edge new_edge;
2519   bool inserted = false;
2520 
2521   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2522     {
2523       tree res, new_res;
2524       gphi *new_phi;
2525 
2526       phi = si.phi ();
2527       res = PHI_RESULT (phi);
2528       new_res = res;
2529       if (!virtual_operand_p (res))
2530 	{
2531 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2532 	  if (EDGE_COUNT (new_bb->preds) == 0)
2533 	    {
2534 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2535 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2536 	    }
2537 	  else
2538 	    {
2539 	      new_phi = create_phi_node (new_res, new_bb);
2540 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2541 		{
2542 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2543 					     bb);
2544 		  tree arg;
2545 		  tree new_arg;
2546 		  edge_iterator ei2;
2547 		  location_t locus;
2548 
2549 		  /* When doing partial cloning, we allow PHIs on the entry
2550 		     block as long as all the arguments are the same.
2551 		     Find any input edge to see argument to copy.  */
2552 		  if (!old_edge)
2553 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2554 		      if (!old_edge->src->aux)
2555 			break;
2556 
2557 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2558 		  new_arg = arg;
2559 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2560 		  gcc_assert (new_arg);
2561 		  /* With return slot optimization we can end up with
2562 		     non-gimple (foo *)&this->m, fix that here.  */
2563 		  if (TREE_CODE (new_arg) != SSA_NAME
2564 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2565 		      && !is_gimple_val (new_arg))
2566 		    {
2567 		      gimple_seq stmts = NULL;
2568 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2569 						      NULL);
2570 		      gsi_insert_seq_on_edge (new_edge, stmts);
2571 		      inserted = true;
2572 		    }
2573 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2574 		  if (id->reset_location)
2575 		    locus = input_location;
2576 		  else
2577 		    locus = remap_location (locus, id);
2578 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2579 		}
2580 	    }
2581 	}
2582     }
2583 
2584   /* Commit the delayed edge insertions.  */
2585   if (inserted)
2586     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2587       gsi_commit_one_edge_insert (new_edge, NULL);
2588 }
2589 
2590 
2591 /* Wrapper for remap_decl so it can be used as a callback.  */
2592 
2593 static tree
2594 remap_decl_1 (tree decl, void *data)
2595 {
2596   return remap_decl (decl, (copy_body_data *) data);
2597 }
2598 
2599 /* Build struct function and associated datastructures for the new clone
2600    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2601    the cfun to the function of new_fndecl (and current_function_decl too).  */
2602 
2603 static void
2604 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2605 {
2606   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2607 
2608   if (!DECL_ARGUMENTS (new_fndecl))
2609     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2610   if (!DECL_RESULT (new_fndecl))
2611     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2612 
2613   /* Register specific tree functions.  */
2614   gimple_register_cfg_hooks ();
2615 
2616   /* Get clean struct function.  */
2617   push_struct_function (new_fndecl);
2618 
2619   /* We will rebuild these, so just sanity check that they are empty.  */
2620   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2621   gcc_assert (cfun->local_decls == NULL);
2622   gcc_assert (cfun->cfg == NULL);
2623   gcc_assert (cfun->decl == new_fndecl);
2624 
2625   /* Copy items we preserve during cloning.  */
2626   cfun->static_chain_decl = src_cfun->static_chain_decl;
2627   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2628   cfun->function_end_locus = src_cfun->function_end_locus;
2629   cfun->curr_properties = src_cfun->curr_properties;
2630   cfun->last_verified = src_cfun->last_verified;
2631   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2632   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2633   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2634   cfun->stdarg = src_cfun->stdarg;
2635   cfun->after_inlining = src_cfun->after_inlining;
2636   cfun->can_throw_non_call_exceptions
2637     = src_cfun->can_throw_non_call_exceptions;
2638   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2639   cfun->returns_struct = src_cfun->returns_struct;
2640   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2641 
2642   init_empty_tree_cfg ();
2643 
2644   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2645 
2646   profile_count num = count;
2647   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2648   profile_count::adjust_for_ipa_scaling (&num, &den);
2649 
2650   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2651     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2652 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2653   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2654     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2655 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2656   if (src_cfun->eh)
2657     init_eh_for_function ();
2658 
2659   if (src_cfun->gimple_df)
2660     {
2661       init_tree_ssa (cfun);
2662       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2663       if (cfun->gimple_df->in_ssa_p)
2664 	init_ssa_operands (cfun);
2665     }
2666 }
2667 
2668 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2669    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2670    successor has multiple predecessors, reset them, otherwise keep
2671    their value.  */
2672 
2673 static void
2674 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2675 {
2676   edge e;
2677   edge_iterator ei;
2678   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2679 
2680   if (gsi_end_p (si)
2681       || gsi_one_before_end_p (si)
2682       || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2683 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2684     return;
2685 
2686   FOR_EACH_EDGE (e, ei, new_bb->succs)
2687     {
2688       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2689       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2690       while (is_gimple_debug (gsi_stmt (ssi)))
2691 	{
2692 	  gimple *stmt = gsi_stmt (ssi);
2693 	  gdebug *new_stmt;
2694 	  tree var;
2695 	  tree value;
2696 
2697 	  /* For the last edge move the debug stmts instead of copying
2698 	     them.  */
2699 	  if (ei_one_before_end_p (ei))
2700 	    {
2701 	      si = ssi;
2702 	      gsi_prev (&ssi);
2703 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2704 		{
2705 		  gimple_debug_bind_reset_value (stmt);
2706 		  gimple_set_location (stmt, UNKNOWN_LOCATION);
2707 		}
2708 	      gsi_remove (&si, false);
2709 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2710 	      continue;
2711 	    }
2712 
2713 	  if (gimple_debug_bind_p (stmt))
2714 	    {
2715 	      var = gimple_debug_bind_get_var (stmt);
2716 	      if (single_pred_p (e->dest))
2717 		{
2718 		  value = gimple_debug_bind_get_value (stmt);
2719 		  value = unshare_expr (value);
2720 		  new_stmt = gimple_build_debug_bind (var, value, stmt);
2721 		}
2722 	      else
2723 		new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2724 	    }
2725 	  else if (gimple_debug_source_bind_p (stmt))
2726 	    {
2727 	      var = gimple_debug_source_bind_get_var (stmt);
2728 	      value = gimple_debug_source_bind_get_value (stmt);
2729 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2730 	    }
2731 	  else if (gimple_debug_nonbind_marker_p (stmt))
2732 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2733 	  else
2734 	    gcc_unreachable ();
2735 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2736 	  id->debug_stmts.safe_push (new_stmt);
2737 	  gsi_prev (&ssi);
2738 	}
2739     }
2740 }
2741 
2742 /* Make a copy of the sub-loops of SRC_PARENT and place them
2743    as siblings of DEST_PARENT.  */
2744 
2745 static void
2746 copy_loops (copy_body_data *id,
2747 	    struct loop *dest_parent, struct loop *src_parent)
2748 {
2749   struct loop *src_loop = src_parent->inner;
2750   while (src_loop)
2751     {
2752       if (!id->blocks_to_copy
2753 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2754 	{
2755 	  struct loop *dest_loop = alloc_loop ();
2756 
2757 	  /* Assign the new loop its header and latch and associate
2758 	     those with the new loop.  */
2759 	  dest_loop->header = (basic_block)src_loop->header->aux;
2760 	  dest_loop->header->loop_father = dest_loop;
2761 	  if (src_loop->latch != NULL)
2762 	    {
2763 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2764 	      dest_loop->latch->loop_father = dest_loop;
2765 	    }
2766 
2767 	  /* Copy loop meta-data.  */
2768 	  copy_loop_info (src_loop, dest_loop);
2769 	  if (dest_loop->unroll)
2770 	    cfun->has_unroll = true;
2771 	  if (dest_loop->force_vectorize)
2772 	    cfun->has_force_vectorize_loops = true;
2773 	  if (id->src_cfun->last_clique != 0)
2774 	    dest_loop->owned_clique
2775 	      = remap_dependence_clique (id,
2776 					 src_loop->owned_clique
2777 					 ? src_loop->owned_clique : 1);
2778 
2779 	  /* Finally place it into the loop array and the loop tree.  */
2780 	  place_new_loop (cfun, dest_loop);
2781 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2782 
2783 	  if (src_loop->simduid)
2784 	    {
2785 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2786 	      cfun->has_simduid_loops = true;
2787 	    }
2788 
2789 	  /* Recurse.  */
2790 	  copy_loops (id, dest_loop, src_loop);
2791 	}
2792       src_loop = src_loop->next;
2793     }
2794 }
2795 
2796 /* Call redirect_call_stmt_to_callee on all calls in BB.  */
2797 
2798 void
2799 redirect_all_calls (copy_body_data * id, basic_block bb)
2800 {
2801   gimple_stmt_iterator si;
2802   gimple *last = last_stmt (bb);
2803   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2804     {
2805       gimple *stmt = gsi_stmt (si);
2806       if (is_gimple_call (stmt))
2807 	{
2808 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2809 	  if (edge)
2810 	    {
2811 	      edge->redirect_call_stmt_to_callee ();
2812 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2813 		gimple_purge_dead_eh_edges (bb);
2814 	    }
2815 	}
2816     }
2817 }
2818 
2819 /* Make a copy of the body of FN so that it can be inserted inline in
2820    another function.  Walks FN via CFG, returns new fndecl.  */
2821 
2822 static tree
2823 copy_cfg_body (copy_body_data * id,
2824 	       basic_block entry_block_map, basic_block exit_block_map,
2825 	       basic_block new_entry)
2826 {
2827   tree callee_fndecl = id->src_fn;
2828   /* Original cfun for the callee, doesn't change.  */
2829   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2830   struct function *cfun_to_copy;
2831   basic_block bb;
2832   tree new_fndecl = NULL;
2833   bool need_debug_cleanup = false;
2834   int last;
2835   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2836   profile_count num = entry_block_map->count;
2837 
2838   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2839 
2840   /* Register specific tree functions.  */
2841   gimple_register_cfg_hooks ();
2842 
2843   /* If we are inlining just region of the function, make sure to connect
2844      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2845      part of loop, we must compute frequency and probability of
2846      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2847      probabilities of edges incoming from nonduplicated region.  */
2848   if (new_entry)
2849     {
2850       edge e;
2851       edge_iterator ei;
2852       den = profile_count::zero ();
2853 
2854       FOR_EACH_EDGE (e, ei, new_entry->preds)
2855 	if (!e->src->aux)
2856 	  den += e->count ();
2857       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2858     }
2859 
2860   profile_count::adjust_for_ipa_scaling (&num, &den);
2861 
2862   /* Must have a CFG here at this point.  */
2863   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2864 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2865 
2866 
2867   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2868   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2869   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2870   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2871 
2872   /* Duplicate any exception-handling regions.  */
2873   if (cfun->eh)
2874     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2875 				       remap_decl_1, id);
2876 
2877   /* Use aux pointers to map the original blocks to copy.  */
2878   FOR_EACH_BB_FN (bb, cfun_to_copy)
2879     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2880       {
2881 	basic_block new_bb = copy_bb (id, bb, num, den);
2882 	bb->aux = new_bb;
2883 	new_bb->aux = bb;
2884 	new_bb->loop_father = entry_block_map->loop_father;
2885       }
2886 
2887   last = last_basic_block_for_fn (cfun);
2888 
2889   /* Now that we've duplicated the blocks, duplicate their edges.  */
2890   basic_block abnormal_goto_dest = NULL;
2891   if (id->call_stmt
2892       && stmt_can_make_abnormal_goto (id->call_stmt))
2893     {
2894       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2895 
2896       bb = gimple_bb (id->call_stmt);
2897       gsi_next (&gsi);
2898       if (gsi_end_p (gsi))
2899 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2900     }
2901   FOR_ALL_BB_FN (bb, cfun_to_copy)
2902     if (!id->blocks_to_copy
2903 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2904       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2905 					       abnormal_goto_dest, id);
2906 
2907   if (new_entry)
2908     {
2909       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2910 			  EDGE_FALLTHRU);
2911       e->probability = profile_probability::always ();
2912     }
2913 
2914   /* Duplicate the loop tree, if available and wanted.  */
2915   if (loops_for_fn (src_cfun) != NULL
2916       && current_loops != NULL)
2917     {
2918       copy_loops (id, entry_block_map->loop_father,
2919 		  get_loop (src_cfun, 0));
2920       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2921       loops_state_set (LOOPS_NEED_FIXUP);
2922     }
2923 
2924   /* If the loop tree in the source function needed fixup, mark the
2925      destination loop tree for fixup, too.  */
2926   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2927     loops_state_set (LOOPS_NEED_FIXUP);
2928 
2929   if (gimple_in_ssa_p (cfun))
2930     FOR_ALL_BB_FN (bb, cfun_to_copy)
2931       if (!id->blocks_to_copy
2932 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2933 	copy_phis_for_bb (bb, id);
2934 
2935   FOR_ALL_BB_FN (bb, cfun_to_copy)
2936     if (bb->aux)
2937       {
2938 	if (need_debug_cleanup
2939 	    && bb->index != ENTRY_BLOCK
2940 	    && bb->index != EXIT_BLOCK)
2941 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2942 	/* Update call edge destinations.  This cannot be done before loop
2943 	   info is updated, because we may split basic blocks.  */
2944 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2945 	    && bb->index != ENTRY_BLOCK
2946 	    && bb->index != EXIT_BLOCK)
2947 	  redirect_all_calls (id, (basic_block)bb->aux);
2948 	((basic_block)bb->aux)->aux = NULL;
2949 	bb->aux = NULL;
2950       }
2951 
2952   /* Zero out AUX fields of newly created block during EH edge
2953      insertion. */
2954   for (; last < last_basic_block_for_fn (cfun); last++)
2955     {
2956       if (need_debug_cleanup)
2957 	maybe_move_debug_stmts_to_successors (id,
2958 					      BASIC_BLOCK_FOR_FN (cfun, last));
2959       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2960       /* Update call edge destinations.  This cannot be done before loop
2961 	 info is updated, because we may split basic blocks.  */
2962       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2963 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2964     }
2965   entry_block_map->aux = NULL;
2966   exit_block_map->aux = NULL;
2967 
2968   if (id->eh_map)
2969     {
2970       delete id->eh_map;
2971       id->eh_map = NULL;
2972     }
2973   if (id->dependence_map)
2974     {
2975       delete id->dependence_map;
2976       id->dependence_map = NULL;
2977     }
2978 
2979   return new_fndecl;
2980 }
2981 
2982 /* Copy the debug STMT using ID.  We deal with these statements in a
2983    special way: if any variable in their VALUE expression wasn't
2984    remapped yet, we won't remap it, because that would get decl uids
2985    out of sync, causing codegen differences between -g and -g0.  If
2986    this arises, we drop the VALUE expression altogether.  */
2987 
2988 static void
2989 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2990 {
2991   tree t, *n;
2992   struct walk_stmt_info wi;
2993 
2994   if (tree block = gimple_block (stmt))
2995     {
2996       n = id->decl_map->get (block);
2997       gimple_set_block (stmt, n ? *n : id->block);
2998     }
2999 
3000   if (gimple_debug_nonbind_marker_p (stmt))
3001     return;
3002 
3003   /* Remap all the operands in COPY.  */
3004   memset (&wi, 0, sizeof (wi));
3005   wi.info = id;
3006 
3007   processing_debug_stmt = 1;
3008 
3009   if (gimple_debug_source_bind_p (stmt))
3010     t = gimple_debug_source_bind_get_var (stmt);
3011   else if (gimple_debug_bind_p (stmt))
3012     t = gimple_debug_bind_get_var (stmt);
3013   else
3014     gcc_unreachable ();
3015 
3016   if (TREE_CODE (t) == PARM_DECL && id->debug_map
3017       && (n = id->debug_map->get (t)))
3018     {
3019       gcc_assert (VAR_P (*n));
3020       t = *n;
3021     }
3022   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3023     /* T is a non-localized variable.  */;
3024   else
3025     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3026 
3027   if (gimple_debug_bind_p (stmt))
3028     {
3029       gimple_debug_bind_set_var (stmt, t);
3030 
3031       if (gimple_debug_bind_has_value_p (stmt))
3032 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3033 		   remap_gimple_op_r, &wi, NULL);
3034 
3035       /* Punt if any decl couldn't be remapped.  */
3036       if (processing_debug_stmt < 0)
3037 	gimple_debug_bind_reset_value (stmt);
3038     }
3039   else if (gimple_debug_source_bind_p (stmt))
3040     {
3041       gimple_debug_source_bind_set_var (stmt, t);
3042       /* When inlining and source bind refers to one of the optimized
3043 	 away parameters, change the source bind into normal debug bind
3044 	 referring to the corresponding DEBUG_EXPR_DECL that should have
3045 	 been bound before the call stmt.  */
3046       t = gimple_debug_source_bind_get_value (stmt);
3047       if (t != NULL_TREE
3048 	  && TREE_CODE (t) == PARM_DECL
3049 	  && id->call_stmt)
3050 	{
3051 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3052 	  unsigned int i;
3053 	  if (debug_args != NULL)
3054 	    {
3055 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3056 		if ((**debug_args)[i] == DECL_ORIGIN (t)
3057 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3058 		  {
3059 		    t = (**debug_args)[i + 1];
3060 		    stmt->subcode = GIMPLE_DEBUG_BIND;
3061 		    gimple_debug_bind_set_value (stmt, t);
3062 		    break;
3063 		  }
3064 	    }
3065 	}
3066       if (gimple_debug_source_bind_p (stmt))
3067 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3068 		   remap_gimple_op_r, &wi, NULL);
3069     }
3070 
3071   processing_debug_stmt = 0;
3072 
3073   update_stmt (stmt);
3074 }
3075 
3076 /* Process deferred debug stmts.  In order to give values better odds
3077    of being successfully remapped, we delay the processing of debug
3078    stmts until all other stmts that might require remapping are
3079    processed.  */
3080 
3081 static void
3082 copy_debug_stmts (copy_body_data *id)
3083 {
3084   size_t i;
3085   gdebug *stmt;
3086 
3087   if (!id->debug_stmts.exists ())
3088     return;
3089 
3090   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3091     copy_debug_stmt (stmt, id);
3092 
3093   id->debug_stmts.release ();
3094 }
3095 
3096 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3097    another function.  */
3098 
3099 static tree
3100 copy_tree_body (copy_body_data *id)
3101 {
3102   tree fndecl = id->src_fn;
3103   tree body = DECL_SAVED_TREE (fndecl);
3104 
3105   walk_tree (&body, copy_tree_body_r, id, NULL);
3106 
3107   return body;
3108 }
3109 
3110 /* Make a copy of the body of FN so that it can be inserted inline in
3111    another function.  */
3112 
3113 static tree
3114 copy_body (copy_body_data *id,
3115 	   basic_block entry_block_map, basic_block exit_block_map,
3116 	   basic_block new_entry)
3117 {
3118   tree fndecl = id->src_fn;
3119   tree body;
3120 
3121   /* If this body has a CFG, walk CFG and copy.  */
3122   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3123   body = copy_cfg_body (id, entry_block_map, exit_block_map,
3124 			new_entry);
3125   copy_debug_stmts (id);
3126 
3127   return body;
3128 }
3129 
3130 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3131    defined in function FN, or of a data member thereof.  */
3132 
3133 static bool
3134 self_inlining_addr_expr (tree value, tree fn)
3135 {
3136   tree var;
3137 
3138   if (TREE_CODE (value) != ADDR_EXPR)
3139     return false;
3140 
3141   var = get_base_address (TREE_OPERAND (value, 0));
3142 
3143   return var && auto_var_in_fn_p (var, fn);
3144 }
3145 
3146 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3147    lexical block and line number information from base_stmt, if given,
3148    or from the last stmt of the block otherwise.  */
3149 
3150 static gimple *
3151 insert_init_debug_bind (copy_body_data *id,
3152 			basic_block bb, tree var, tree value,
3153 			gimple *base_stmt)
3154 {
3155   gimple *note;
3156   gimple_stmt_iterator gsi;
3157   tree tracked_var;
3158 
3159   if (!gimple_in_ssa_p (id->src_cfun))
3160     return NULL;
3161 
3162   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3163     return NULL;
3164 
3165   tracked_var = target_for_debug_bind (var);
3166   if (!tracked_var)
3167     return NULL;
3168 
3169   if (bb)
3170     {
3171       gsi = gsi_last_bb (bb);
3172       if (!base_stmt && !gsi_end_p (gsi))
3173 	base_stmt = gsi_stmt (gsi);
3174     }
3175 
3176   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3177 
3178   if (bb)
3179     {
3180       if (!gsi_end_p (gsi))
3181 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3182       else
3183 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3184     }
3185 
3186   return note;
3187 }
3188 
3189 static void
3190 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3191 {
3192   /* If VAR represents a zero-sized variable, it's possible that the
3193      assignment statement may result in no gimple statements.  */
3194   if (init_stmt)
3195     {
3196       gimple_stmt_iterator si = gsi_last_bb (bb);
3197 
3198       /* We can end up with init statements that store to a non-register
3199          from a rhs with a conversion.  Handle that here by forcing the
3200 	 rhs into a temporary.  gimple_regimplify_operands is not
3201 	 prepared to do this for us.  */
3202       if (!is_gimple_debug (init_stmt)
3203 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3204 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3205 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3206 	{
3207 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3208 			     gimple_expr_type (init_stmt),
3209 			     gimple_assign_rhs1 (init_stmt));
3210 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3211 					  GSI_NEW_STMT);
3212 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3213 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3214 	}
3215       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3216       gimple_regimplify_operands (init_stmt, &si);
3217 
3218       if (!is_gimple_debug (init_stmt))
3219 	{
3220 	  tree def = gimple_assign_lhs (init_stmt);
3221 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3222 	}
3223     }
3224 }
3225 
3226 /* Initialize parameter P with VALUE.  If needed, produce init statement
3227    at the end of BB.  When BB is NULL, we return init statement to be
3228    output later.  */
3229 static gimple *
3230 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3231 		     basic_block bb, tree *vars)
3232 {
3233   gimple *init_stmt = NULL;
3234   tree var;
3235   tree rhs = value;
3236   tree def = (gimple_in_ssa_p (cfun)
3237 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3238 
3239   if (value
3240       && value != error_mark_node
3241       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3242     {
3243       /* If we can match up types by promotion/demotion do so.  */
3244       if (fold_convertible_p (TREE_TYPE (p), value))
3245 	rhs = fold_convert (TREE_TYPE (p), value);
3246       else
3247 	{
3248 	  /* ???  For valid programs we should not end up here.
3249 	     Still if we end up with truly mismatched types here, fall back
3250 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3251 	     GIMPLE to the following passes.  */
3252 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3253 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3254 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3255 	  else
3256 	    rhs = build_zero_cst (TREE_TYPE (p));
3257 	}
3258     }
3259 
3260   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3261      here since the type of this decl must be visible to the calling
3262      function.  */
3263   var = copy_decl_to_var (p, id);
3264 
3265   /* Declare this new variable.  */
3266   DECL_CHAIN (var) = *vars;
3267   *vars = var;
3268 
3269   /* Make gimplifier happy about this variable.  */
3270   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3271 
3272   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3273      we would not need to create a new variable here at all, if it
3274      weren't for debug info.  Still, we can just use the argument
3275      value.  */
3276   if (TREE_READONLY (p)
3277       && !TREE_ADDRESSABLE (p)
3278       && value && !TREE_SIDE_EFFECTS (value)
3279       && !def)
3280     {
3281       /* We may produce non-gimple trees by adding NOPs or introduce
3282 	 invalid sharing when operand is not really constant.
3283 	 It is not big deal to prohibit constant propagation here as
3284 	 we will constant propagate in DOM1 pass anyway.  */
3285       if (is_gimple_min_invariant (value)
3286 	  && useless_type_conversion_p (TREE_TYPE (p),
3287 						 TREE_TYPE (value))
3288 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3289 	     the base variable isn't a local variable of the inlined
3290 	     function, e.g., when doing recursive inlining, direct or
3291 	     mutually-recursive or whatever, which is why we don't
3292 	     just test whether fn == current_function_decl.  */
3293 	  && ! self_inlining_addr_expr (value, fn))
3294 	{
3295 	  insert_decl_map (id, p, value);
3296 	  insert_debug_decl_map (id, p, var);
3297 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3298 	}
3299     }
3300 
3301   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3302      that way, when the PARM_DECL is encountered, it will be
3303      automatically replaced by the VAR_DECL.  */
3304   insert_decl_map (id, p, var);
3305 
3306   /* Even if P was TREE_READONLY, the new VAR should not be.
3307      In the original code, we would have constructed a
3308      temporary, and then the function body would have never
3309      changed the value of P.  However, now, we will be
3310      constructing VAR directly.  The constructor body may
3311      change its value multiple times as it is being
3312      constructed.  Therefore, it must not be TREE_READONLY;
3313      the back-end assumes that TREE_READONLY variable is
3314      assigned to only once.  */
3315   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3316     TREE_READONLY (var) = 0;
3317 
3318   /* If there is no setup required and we are in SSA, take the easy route
3319      replacing all SSA names representing the function parameter by the
3320      SSA name passed to function.
3321 
3322      We need to construct map for the variable anyway as it might be used
3323      in different SSA names when parameter is set in function.
3324 
3325      Do replacement at -O0 for const arguments replaced by constant.
3326      This is important for builtin_constant_p and other construct requiring
3327      constant argument to be visible in inlined function body.  */
3328   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3329       && (optimize
3330           || (TREE_READONLY (p)
3331 	      && is_gimple_min_invariant (rhs)))
3332       && (TREE_CODE (rhs) == SSA_NAME
3333 	  || is_gimple_min_invariant (rhs))
3334       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3335     {
3336       insert_decl_map (id, def, rhs);
3337       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3338     }
3339 
3340   /* If the value of argument is never used, don't care about initializing
3341      it.  */
3342   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3343     {
3344       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3345       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3346     }
3347 
3348   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3349      the argument to the proper type in case it was promoted.  */
3350   if (value)
3351     {
3352       if (rhs == error_mark_node)
3353 	{
3354 	  insert_decl_map (id, p, var);
3355 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3356 	}
3357 
3358       STRIP_USELESS_TYPE_CONVERSION (rhs);
3359 
3360       /* If we are in SSA form properly remap the default definition
3361          or assign to a dummy SSA name if the parameter is unused and
3362 	 we are not optimizing.  */
3363       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3364 	{
3365 	  if (def)
3366 	    {
3367 	      def = remap_ssa_name (def, id);
3368 	      init_stmt = gimple_build_assign (def, rhs);
3369 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3370 	      set_ssa_default_def (cfun, var, NULL);
3371 	    }
3372 	  else if (!optimize)
3373 	    {
3374 	      def = make_ssa_name (var);
3375 	      init_stmt = gimple_build_assign (def, rhs);
3376 	    }
3377 	}
3378       else
3379         init_stmt = gimple_build_assign (var, rhs);
3380 
3381       if (bb && init_stmt)
3382         insert_init_stmt (id, bb, init_stmt);
3383     }
3384   return init_stmt;
3385 }
3386 
3387 /* Generate code to initialize the parameters of the function at the
3388    top of the stack in ID from the GIMPLE_CALL STMT.  */
3389 
3390 static void
3391 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3392 			       tree fn, basic_block bb)
3393 {
3394   tree parms;
3395   size_t i;
3396   tree p;
3397   tree vars = NULL_TREE;
3398   tree static_chain = gimple_call_chain (stmt);
3399 
3400   /* Figure out what the parameters are.  */
3401   parms = DECL_ARGUMENTS (fn);
3402 
3403   /* Loop through the parameter declarations, replacing each with an
3404      equivalent VAR_DECL, appropriately initialized.  */
3405   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3406     {
3407       tree val;
3408       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3409       setup_one_parameter (id, p, val, fn, bb, &vars);
3410     }
3411   /* After remapping parameters remap their types.  This has to be done
3412      in a second loop over all parameters to appropriately remap
3413      variable sized arrays when the size is specified in a
3414      parameter following the array.  */
3415   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3416     {
3417       tree *varp = id->decl_map->get (p);
3418       if (varp && VAR_P (*varp))
3419 	{
3420 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3421 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3422 	  tree var = *varp;
3423 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3424 	  /* Also remap the default definition if it was remapped
3425 	     to the default definition of the parameter replacement
3426 	     by the parameter setup.  */
3427 	  if (def)
3428 	    {
3429 	      tree *defp = id->decl_map->get (def);
3430 	      if (defp
3431 		  && TREE_CODE (*defp) == SSA_NAME
3432 		  && SSA_NAME_VAR (*defp) == var)
3433 		TREE_TYPE (*defp) = TREE_TYPE (var);
3434 	    }
3435 	}
3436     }
3437 
3438   /* Initialize the static chain.  */
3439   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3440   gcc_assert (fn != current_function_decl);
3441   if (p)
3442     {
3443       /* No static chain?  Seems like a bug in tree-nested.c.  */
3444       gcc_assert (static_chain);
3445 
3446       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3447     }
3448 
3449   declare_inline_vars (id->block, vars);
3450 }
3451 
3452 
3453 /* Declare a return variable to replace the RESULT_DECL for the
3454    function we are calling.  An appropriate DECL_STMT is returned.
3455    The USE_STMT is filled to contain a use of the declaration to
3456    indicate the return value of the function.
3457 
3458    RETURN_SLOT, if non-null is place where to store the result.  It
3459    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3460    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3461 
3462    The return value is a (possibly null) value that holds the result
3463    as seen by the caller.  */
3464 
3465 static tree
3466 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3467 			 basic_block entry_bb)
3468 {
3469   tree callee = id->src_fn;
3470   tree result = DECL_RESULT (callee);
3471   tree callee_type = TREE_TYPE (result);
3472   tree caller_type;
3473   tree var, use;
3474 
3475   /* Handle type-mismatches in the function declaration return type
3476      vs. the call expression.  */
3477   if (modify_dest)
3478     caller_type = TREE_TYPE (modify_dest);
3479   else
3480     caller_type = TREE_TYPE (TREE_TYPE (callee));
3481 
3482   /* We don't need to do anything for functions that don't return anything.  */
3483   if (VOID_TYPE_P (callee_type))
3484     return NULL_TREE;
3485 
3486   /* If there was a return slot, then the return value is the
3487      dereferenced address of that object.  */
3488   if (return_slot)
3489     {
3490       /* The front end shouldn't have used both return_slot and
3491 	 a modify expression.  */
3492       gcc_assert (!modify_dest);
3493       if (DECL_BY_REFERENCE (result))
3494 	{
3495 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3496 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3497 
3498 	  /* We are going to construct *&return_slot and we can't do that
3499 	     for variables believed to be not addressable.
3500 
3501 	     FIXME: This check possibly can match, because values returned
3502 	     via return slot optimization are not believed to have address
3503 	     taken by alias analysis.  */
3504 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3505 	  var = return_slot_addr;
3506 	}
3507       else
3508 	{
3509 	  var = return_slot;
3510 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3511 	  if (TREE_ADDRESSABLE (result))
3512 	    mark_addressable (var);
3513 	}
3514       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3515            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3516 	  && !DECL_GIMPLE_REG_P (result)
3517 	  && DECL_P (var))
3518 	DECL_GIMPLE_REG_P (var) = 0;
3519       use = NULL;
3520       goto done;
3521     }
3522 
3523   /* All types requiring non-trivial constructors should have been handled.  */
3524   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3525 
3526   /* Attempt to avoid creating a new temporary variable.  */
3527   if (modify_dest
3528       && TREE_CODE (modify_dest) != SSA_NAME)
3529     {
3530       bool use_it = false;
3531 
3532       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3533       if (!useless_type_conversion_p (callee_type, caller_type))
3534 	use_it = false;
3535 
3536       /* ??? If we're assigning to a variable sized type, then we must
3537 	 reuse the destination variable, because we've no good way to
3538 	 create variable sized temporaries at this point.  */
3539       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3540 	use_it = true;
3541 
3542       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3543 	 reuse it as the result of the call directly.  Don't do this if
3544 	 it would promote MODIFY_DEST to addressable.  */
3545       else if (TREE_ADDRESSABLE (result))
3546 	use_it = false;
3547       else
3548 	{
3549 	  tree base_m = get_base_address (modify_dest);
3550 
3551 	  /* If the base isn't a decl, then it's a pointer, and we don't
3552 	     know where that's going to go.  */
3553 	  if (!DECL_P (base_m))
3554 	    use_it = false;
3555 	  else if (is_global_var (base_m))
3556 	    use_it = false;
3557 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3558 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3559 		   && !DECL_GIMPLE_REG_P (result)
3560 		   && DECL_GIMPLE_REG_P (base_m))
3561 	    use_it = false;
3562 	  else if (!TREE_ADDRESSABLE (base_m))
3563 	    use_it = true;
3564 	}
3565 
3566       if (use_it)
3567 	{
3568 	  var = modify_dest;
3569 	  use = NULL;
3570 	  goto done;
3571 	}
3572     }
3573 
3574   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3575 
3576   var = copy_result_decl_to_var (result, id);
3577   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3578 
3579   /* Do not have the rest of GCC warn about this variable as it should
3580      not be visible to the user.  */
3581   TREE_NO_WARNING (var) = 1;
3582 
3583   declare_inline_vars (id->block, var);
3584 
3585   /* Build the use expr.  If the return type of the function was
3586      promoted, convert it back to the expected type.  */
3587   use = var;
3588   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3589     {
3590       /* If we can match up types by promotion/demotion do so.  */
3591       if (fold_convertible_p (caller_type, var))
3592 	use = fold_convert (caller_type, var);
3593       else
3594 	{
3595 	  /* ???  For valid programs we should not end up here.
3596 	     Still if we end up with truly mismatched types here, fall back
3597 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3598 	     passes.  */
3599 	  /* Prevent var from being written into SSA form.  */
3600 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3601 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3602 	    DECL_GIMPLE_REG_P (var) = false;
3603 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3604 	    TREE_ADDRESSABLE (var) = true;
3605 	  use = fold_build2 (MEM_REF, caller_type,
3606 			     build_fold_addr_expr (var),
3607 			     build_int_cst (ptr_type_node, 0));
3608 	}
3609     }
3610 
3611   STRIP_USELESS_TYPE_CONVERSION (use);
3612 
3613   if (DECL_BY_REFERENCE (result))
3614     {
3615       TREE_ADDRESSABLE (var) = 1;
3616       var = build_fold_addr_expr (var);
3617     }
3618 
3619  done:
3620   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3621      way, when the RESULT_DECL is encountered, it will be
3622      automatically replaced by the VAR_DECL.
3623 
3624      When returning by reference, ensure that RESULT_DECL remaps to
3625      gimple_val.  */
3626   if (DECL_BY_REFERENCE (result)
3627       && !is_gimple_val (var))
3628     {
3629       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3630       insert_decl_map (id, result, temp);
3631       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3632 	 it's default_def SSA_NAME.  */
3633       if (gimple_in_ssa_p (id->src_cfun)
3634 	  && is_gimple_reg (result))
3635 	{
3636 	  temp = make_ssa_name (temp);
3637 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3638 	}
3639       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3640     }
3641   else
3642     insert_decl_map (id, result, var);
3643 
3644   /* Remember this so we can ignore it in remap_decls.  */
3645   id->retvar = var;
3646   return use;
3647 }
3648 
3649 /* Determine if the function can be copied.  If so return NULL.  If
3650    not return a string describng the reason for failure.  */
3651 
3652 const char *
3653 copy_forbidden (struct function *fun)
3654 {
3655   const char *reason = fun->cannot_be_copied_reason;
3656 
3657   /* Only examine the function once.  */
3658   if (fun->cannot_be_copied_set)
3659     return reason;
3660 
3661   /* We cannot copy a function that receives a non-local goto
3662      because we cannot remap the destination label used in the
3663      function that is performing the non-local goto.  */
3664   /* ??? Actually, this should be possible, if we work at it.
3665      No doubt there's just a handful of places that simply
3666      assume it doesn't happen and don't substitute properly.  */
3667   if (fun->has_nonlocal_label)
3668     {
3669       reason = G_("function %q+F can never be copied "
3670 		  "because it receives a non-local goto");
3671       goto fail;
3672     }
3673 
3674   if (fun->has_forced_label_in_static)
3675     {
3676       reason = G_("function %q+F can never be copied because it saves "
3677 		  "address of local label in a static variable");
3678       goto fail;
3679     }
3680 
3681  fail:
3682   fun->cannot_be_copied_reason = reason;
3683   fun->cannot_be_copied_set = true;
3684   return reason;
3685 }
3686 
3687 
3688 static const char *inline_forbidden_reason;
3689 
3690 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3691    iff a function cannot be inlined.  Also sets the reason why. */
3692 
3693 static tree
3694 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3695 			 struct walk_stmt_info *wip)
3696 {
3697   tree fn = (tree) wip->info;
3698   tree t;
3699   gimple *stmt = gsi_stmt (*gsi);
3700 
3701   switch (gimple_code (stmt))
3702     {
3703     case GIMPLE_CALL:
3704       /* Refuse to inline alloca call unless user explicitly forced so as
3705 	 this may change program's memory overhead drastically when the
3706 	 function using alloca is called in loop.  In GCC present in
3707 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3708 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3709 	 VLA objects as those can't cause unbounded growth (they're always
3710 	 wrapped inside stack_save/stack_restore regions.  */
3711       if (gimple_maybe_alloca_call_p (stmt)
3712 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3713 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3714 	{
3715 	  inline_forbidden_reason
3716 	    = G_("function %q+F can never be inlined because it uses "
3717 		 "alloca (override using the always_inline attribute)");
3718 	  *handled_ops_p = true;
3719 	  return fn;
3720 	}
3721 
3722       t = gimple_call_fndecl (stmt);
3723       if (t == NULL_TREE)
3724 	break;
3725 
3726       /* We cannot inline functions that call setjmp.  */
3727       if (setjmp_call_p (t))
3728 	{
3729 	  inline_forbidden_reason
3730 	    = G_("function %q+F can never be inlined because it uses setjmp");
3731 	  *handled_ops_p = true;
3732 	  return t;
3733 	}
3734 
3735       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3736 	switch (DECL_FUNCTION_CODE (t))
3737 	  {
3738 	    /* We cannot inline functions that take a variable number of
3739 	       arguments.  */
3740 	  case BUILT_IN_VA_START:
3741 	  case BUILT_IN_NEXT_ARG:
3742 	  case BUILT_IN_VA_END:
3743 	    inline_forbidden_reason
3744 	      = G_("function %q+F can never be inlined because it "
3745 		   "uses variable argument lists");
3746 	    *handled_ops_p = true;
3747 	    return t;
3748 
3749 	  case BUILT_IN_LONGJMP:
3750 	    /* We can't inline functions that call __builtin_longjmp at
3751 	       all.  The non-local goto machinery really requires the
3752 	       destination be in a different function.  If we allow the
3753 	       function calling __builtin_longjmp to be inlined into the
3754 	       function calling __builtin_setjmp, Things will Go Awry.  */
3755 	    inline_forbidden_reason
3756 	      = G_("function %q+F can never be inlined because "
3757 		   "it uses setjmp-longjmp exception handling");
3758 	    *handled_ops_p = true;
3759 	    return t;
3760 
3761 	  case BUILT_IN_NONLOCAL_GOTO:
3762 	    /* Similarly.  */
3763 	    inline_forbidden_reason
3764 	      = G_("function %q+F can never be inlined because "
3765 		   "it uses non-local goto");
3766 	    *handled_ops_p = true;
3767 	    return t;
3768 
3769 	  case BUILT_IN_RETURN:
3770 	  case BUILT_IN_APPLY_ARGS:
3771 	    /* If a __builtin_apply_args caller would be inlined,
3772 	       it would be saving arguments of the function it has
3773 	       been inlined into.  Similarly __builtin_return would
3774 	       return from the function the inline has been inlined into.  */
3775 	    inline_forbidden_reason
3776 	      = G_("function %q+F can never be inlined because "
3777 		   "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3778 	    *handled_ops_p = true;
3779 	    return t;
3780 
3781 	  default:
3782 	    break;
3783 	  }
3784       break;
3785 
3786     case GIMPLE_GOTO:
3787       t = gimple_goto_dest (stmt);
3788 
3789       /* We will not inline a function which uses computed goto.  The
3790 	 addresses of its local labels, which may be tucked into
3791 	 global storage, are of course not constant across
3792 	 instantiations, which causes unexpected behavior.  */
3793       if (TREE_CODE (t) != LABEL_DECL)
3794 	{
3795 	  inline_forbidden_reason
3796 	    = G_("function %q+F can never be inlined "
3797 		 "because it contains a computed goto");
3798 	  *handled_ops_p = true;
3799 	  return t;
3800 	}
3801       break;
3802 
3803     default:
3804       break;
3805     }
3806 
3807   *handled_ops_p = false;
3808   return NULL_TREE;
3809 }
3810 
3811 /* Return true if FNDECL is a function that cannot be inlined into
3812    another one.  */
3813 
3814 static bool
3815 inline_forbidden_p (tree fndecl)
3816 {
3817   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3818   struct walk_stmt_info wi;
3819   basic_block bb;
3820   bool forbidden_p = false;
3821 
3822   /* First check for shared reasons not to copy the code.  */
3823   inline_forbidden_reason = copy_forbidden (fun);
3824   if (inline_forbidden_reason != NULL)
3825     return true;
3826 
3827   /* Next, walk the statements of the function looking for
3828      constraucts we can't handle, or are non-optimal for inlining.  */
3829   hash_set<tree> visited_nodes;
3830   memset (&wi, 0, sizeof (wi));
3831   wi.info = (void *) fndecl;
3832   wi.pset = &visited_nodes;
3833 
3834   FOR_EACH_BB_FN (bb, fun)
3835     {
3836       gimple *ret;
3837       gimple_seq seq = bb_seq (bb);
3838       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3839       forbidden_p = (ret != NULL);
3840       if (forbidden_p)
3841 	break;
3842     }
3843 
3844   return forbidden_p;
3845 }
3846 
3847 /* Return false if the function FNDECL cannot be inlined on account of its
3848    attributes, true otherwise.  */
3849 static bool
3850 function_attribute_inlinable_p (const_tree fndecl)
3851 {
3852   if (targetm.attribute_table)
3853     {
3854       const_tree a;
3855 
3856       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3857 	{
3858 	  const_tree name = TREE_PURPOSE (a);
3859 	  int i;
3860 
3861 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3862 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3863 	      return targetm.function_attribute_inlinable_p (fndecl);
3864 	}
3865     }
3866 
3867   return true;
3868 }
3869 
3870 /* Returns nonzero if FN is a function that does not have any
3871    fundamental inline blocking properties.  */
3872 
3873 bool
3874 tree_inlinable_function_p (tree fn)
3875 {
3876   bool inlinable = true;
3877   bool do_warning;
3878   tree always_inline;
3879 
3880   /* If we've already decided this function shouldn't be inlined,
3881      there's no need to check again.  */
3882   if (DECL_UNINLINABLE (fn))
3883     return false;
3884 
3885   /* We only warn for functions declared `inline' by the user.  */
3886   do_warning = (warn_inline
3887 		&& DECL_DECLARED_INLINE_P (fn)
3888 		&& !DECL_NO_INLINE_WARNING_P (fn)
3889 		&& !DECL_IN_SYSTEM_HEADER (fn));
3890 
3891   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3892 
3893   if (flag_no_inline
3894       && always_inline == NULL)
3895     {
3896       if (do_warning)
3897 	warning (OPT_Winline, "function %q+F can never be inlined because it "
3898 		 "is suppressed using %<-fno-inline%>", fn);
3899       inlinable = false;
3900     }
3901 
3902   else if (!function_attribute_inlinable_p (fn))
3903     {
3904       if (do_warning)
3905         warning (OPT_Winline, "function %q+F can never be inlined because it "
3906                  "uses attributes conflicting with inlining", fn);
3907       inlinable = false;
3908     }
3909 
3910   else if (inline_forbidden_p (fn))
3911     {
3912       /* See if we should warn about uninlinable functions.  Previously,
3913 	 some of these warnings would be issued while trying to expand
3914 	 the function inline, but that would cause multiple warnings
3915 	 about functions that would for example call alloca.  But since
3916 	 this a property of the function, just one warning is enough.
3917 	 As a bonus we can now give more details about the reason why a
3918 	 function is not inlinable.  */
3919       if (always_inline)
3920 	error (inline_forbidden_reason, fn);
3921       else if (do_warning)
3922 	warning (OPT_Winline, inline_forbidden_reason, fn);
3923 
3924       inlinable = false;
3925     }
3926 
3927   /* Squirrel away the result so that we don't have to check again.  */
3928   DECL_UNINLINABLE (fn) = !inlinable;
3929 
3930   return inlinable;
3931 }
3932 
3933 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3934    word size and take possible memcpy call into account and return
3935    cost based on whether optimizing for size or speed according to SPEED_P.  */
3936 
3937 int
3938 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3939 {
3940   HOST_WIDE_INT size;
3941 
3942   gcc_assert (!VOID_TYPE_P (type));
3943 
3944   if (TREE_CODE (type) == VECTOR_TYPE)
3945     {
3946       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3947       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3948       int orig_mode_size
3949 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3950       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3951       return ((orig_mode_size + simd_mode_size - 1)
3952 	      / simd_mode_size);
3953     }
3954 
3955   size = int_size_in_bytes (type);
3956 
3957   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3958     /* Cost of a memcpy call, 3 arguments and the call.  */
3959     return 4;
3960   else
3961     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3962 }
3963 
3964 /* Returns cost of operation CODE, according to WEIGHTS  */
3965 
3966 static int
3967 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3968 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3969 {
3970   switch (code)
3971     {
3972     /* These are "free" conversions, or their presumed cost
3973        is folded into other operations.  */
3974     case RANGE_EXPR:
3975     CASE_CONVERT:
3976     case COMPLEX_EXPR:
3977     case PAREN_EXPR:
3978     case VIEW_CONVERT_EXPR:
3979       return 0;
3980 
3981     /* Assign cost of 1 to usual operations.
3982        ??? We may consider mapping RTL costs to this.  */
3983     case COND_EXPR:
3984     case VEC_COND_EXPR:
3985     case VEC_PERM_EXPR:
3986 
3987     case PLUS_EXPR:
3988     case POINTER_PLUS_EXPR:
3989     case POINTER_DIFF_EXPR:
3990     case MINUS_EXPR:
3991     case MULT_EXPR:
3992     case MULT_HIGHPART_EXPR:
3993 
3994     case ADDR_SPACE_CONVERT_EXPR:
3995     case FIXED_CONVERT_EXPR:
3996     case FIX_TRUNC_EXPR:
3997 
3998     case NEGATE_EXPR:
3999     case FLOAT_EXPR:
4000     case MIN_EXPR:
4001     case MAX_EXPR:
4002     case ABS_EXPR:
4003     case ABSU_EXPR:
4004 
4005     case LSHIFT_EXPR:
4006     case RSHIFT_EXPR:
4007     case LROTATE_EXPR:
4008     case RROTATE_EXPR:
4009 
4010     case BIT_IOR_EXPR:
4011     case BIT_XOR_EXPR:
4012     case BIT_AND_EXPR:
4013     case BIT_NOT_EXPR:
4014 
4015     case TRUTH_ANDIF_EXPR:
4016     case TRUTH_ORIF_EXPR:
4017     case TRUTH_AND_EXPR:
4018     case TRUTH_OR_EXPR:
4019     case TRUTH_XOR_EXPR:
4020     case TRUTH_NOT_EXPR:
4021 
4022     case LT_EXPR:
4023     case LE_EXPR:
4024     case GT_EXPR:
4025     case GE_EXPR:
4026     case EQ_EXPR:
4027     case NE_EXPR:
4028     case ORDERED_EXPR:
4029     case UNORDERED_EXPR:
4030 
4031     case UNLT_EXPR:
4032     case UNLE_EXPR:
4033     case UNGT_EXPR:
4034     case UNGE_EXPR:
4035     case UNEQ_EXPR:
4036     case LTGT_EXPR:
4037 
4038     case CONJ_EXPR:
4039 
4040     case PREDECREMENT_EXPR:
4041     case PREINCREMENT_EXPR:
4042     case POSTDECREMENT_EXPR:
4043     case POSTINCREMENT_EXPR:
4044 
4045     case REALIGN_LOAD_EXPR:
4046 
4047     case WIDEN_SUM_EXPR:
4048     case WIDEN_MULT_EXPR:
4049     case DOT_PROD_EXPR:
4050     case SAD_EXPR:
4051     case WIDEN_MULT_PLUS_EXPR:
4052     case WIDEN_MULT_MINUS_EXPR:
4053     case WIDEN_LSHIFT_EXPR:
4054 
4055     case VEC_WIDEN_MULT_HI_EXPR:
4056     case VEC_WIDEN_MULT_LO_EXPR:
4057     case VEC_WIDEN_MULT_EVEN_EXPR:
4058     case VEC_WIDEN_MULT_ODD_EXPR:
4059     case VEC_UNPACK_HI_EXPR:
4060     case VEC_UNPACK_LO_EXPR:
4061     case VEC_UNPACK_FLOAT_HI_EXPR:
4062     case VEC_UNPACK_FLOAT_LO_EXPR:
4063     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4064     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4065     case VEC_PACK_TRUNC_EXPR:
4066     case VEC_PACK_SAT_EXPR:
4067     case VEC_PACK_FIX_TRUNC_EXPR:
4068     case VEC_PACK_FLOAT_EXPR:
4069     case VEC_WIDEN_LSHIFT_HI_EXPR:
4070     case VEC_WIDEN_LSHIFT_LO_EXPR:
4071     case VEC_DUPLICATE_EXPR:
4072     case VEC_SERIES_EXPR:
4073 
4074       return 1;
4075 
4076     /* Few special cases of expensive operations.  This is useful
4077        to avoid inlining on functions having too many of these.  */
4078     case TRUNC_DIV_EXPR:
4079     case CEIL_DIV_EXPR:
4080     case FLOOR_DIV_EXPR:
4081     case ROUND_DIV_EXPR:
4082     case EXACT_DIV_EXPR:
4083     case TRUNC_MOD_EXPR:
4084     case CEIL_MOD_EXPR:
4085     case FLOOR_MOD_EXPR:
4086     case ROUND_MOD_EXPR:
4087     case RDIV_EXPR:
4088       if (TREE_CODE (op2) != INTEGER_CST)
4089         return weights->div_mod_cost;
4090       return 1;
4091 
4092     /* Bit-field insertion needs several shift and mask operations.  */
4093     case BIT_INSERT_EXPR:
4094       return 3;
4095 
4096     default:
4097       /* We expect a copy assignment with no operator.  */
4098       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4099       return 0;
4100     }
4101 }
4102 
4103 
4104 /* Estimate number of instructions that will be created by expanding
4105    the statements in the statement sequence STMTS.
4106    WEIGHTS contains weights attributed to various constructs.  */
4107 
4108 int
4109 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4110 {
4111   int cost;
4112   gimple_stmt_iterator gsi;
4113 
4114   cost = 0;
4115   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4116     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4117 
4118   return cost;
4119 }
4120 
4121 
4122 /* Estimate number of instructions that will be created by expanding STMT.
4123    WEIGHTS contains weights attributed to various constructs.  */
4124 
4125 int
4126 estimate_num_insns (gimple *stmt, eni_weights *weights)
4127 {
4128   unsigned cost, i;
4129   enum gimple_code code = gimple_code (stmt);
4130   tree lhs;
4131   tree rhs;
4132 
4133   switch (code)
4134     {
4135     case GIMPLE_ASSIGN:
4136       /* Try to estimate the cost of assignments.  We have three cases to
4137 	 deal with:
4138 	 1) Simple assignments to registers;
4139 	 2) Stores to things that must live in memory.  This includes
4140 	    "normal" stores to scalars, but also assignments of large
4141 	    structures, or constructors of big arrays;
4142 
4143 	 Let us look at the first two cases, assuming we have "a = b + C":
4144 	 <GIMPLE_ASSIGN <var_decl "a">
4145 	        <plus_expr <var_decl "b"> <constant C>>
4146 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4147 	 any target, because "a" usually ends up in a real register.  Hence
4148 	 the only cost of this expression comes from the PLUS_EXPR, and we
4149 	 can ignore the GIMPLE_ASSIGN.
4150 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4151 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4152 	 of moving something into "a", which we compute using the function
4153 	 estimate_move_cost.  */
4154       if (gimple_clobber_p (stmt))
4155 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4156 
4157       lhs = gimple_assign_lhs (stmt);
4158       rhs = gimple_assign_rhs1 (stmt);
4159 
4160       cost = 0;
4161 
4162       /* Account for the cost of moving to / from memory.  */
4163       if (gimple_store_p (stmt))
4164 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4165       if (gimple_assign_load_p (stmt))
4166 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4167 
4168       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4169       				      gimple_assign_rhs1 (stmt),
4170 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4171 				      == GIMPLE_BINARY_RHS
4172 				      ? gimple_assign_rhs2 (stmt) : NULL);
4173       break;
4174 
4175     case GIMPLE_COND:
4176       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4177       				         gimple_op (stmt, 0),
4178 				         gimple_op (stmt, 1));
4179       break;
4180 
4181     case GIMPLE_SWITCH:
4182       {
4183 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4184 	/* Take into account cost of the switch + guess 2 conditional jumps for
4185 	   each case label.
4186 
4187 	   TODO: once the switch expansion logic is sufficiently separated, we can
4188 	   do better job on estimating cost of the switch.  */
4189 	if (weights->time_based)
4190 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4191 	else
4192 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4193       }
4194       break;
4195 
4196     case GIMPLE_CALL:
4197       {
4198 	tree decl;
4199 
4200 	if (gimple_call_internal_p (stmt))
4201 	  return 0;
4202 	else if ((decl = gimple_call_fndecl (stmt))
4203 		 && fndecl_built_in_p (decl))
4204 	  {
4205 	    /* Do not special case builtins where we see the body.
4206 	       This just confuse inliner.  */
4207 	    struct cgraph_node *node;
4208 	    if (!(node = cgraph_node::get (decl))
4209 		|| node->definition)
4210 	      ;
4211 	    /* For buitins that are likely expanded to nothing or
4212 	       inlined do not account operand costs.  */
4213 	    else if (is_simple_builtin (decl))
4214 	      return 0;
4215 	    else if (is_inexpensive_builtin (decl))
4216 	      return weights->target_builtin_call_cost;
4217 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4218 	      {
4219 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4220 		   specialize the cheap expansion we do here.
4221 		   ???  This asks for a more general solution.  */
4222 		switch (DECL_FUNCTION_CODE (decl))
4223 		  {
4224 		    case BUILT_IN_POW:
4225 		    case BUILT_IN_POWF:
4226 		    case BUILT_IN_POWL:
4227 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4228 			  && (real_equal
4229 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4230 			       &dconst2)))
4231 			return estimate_operator_cost
4232 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4233 			     gimple_call_arg (stmt, 0));
4234 		      break;
4235 
4236 		    default:
4237 		      break;
4238 		  }
4239 	      }
4240 	  }
4241 
4242 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4243 	if (gimple_call_lhs (stmt))
4244 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4245 				      weights->time_based);
4246 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4247 	  {
4248 	    tree arg = gimple_call_arg (stmt, i);
4249 	    cost += estimate_move_cost (TREE_TYPE (arg),
4250 					weights->time_based);
4251 	  }
4252 	break;
4253       }
4254 
4255     case GIMPLE_RETURN:
4256       return weights->return_cost;
4257 
4258     case GIMPLE_GOTO:
4259     case GIMPLE_LABEL:
4260     case GIMPLE_NOP:
4261     case GIMPLE_PHI:
4262     case GIMPLE_PREDICT:
4263     case GIMPLE_DEBUG:
4264       return 0;
4265 
4266     case GIMPLE_ASM:
4267       {
4268 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4269 	/* 1000 means infinity. This avoids overflows later
4270 	   with very long asm statements.  */
4271 	if (count > 1000)
4272 	  count = 1000;
4273 	/* If this asm is asm inline, count anything as minimum size.  */
4274 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4275 	  count = MIN (1, count);
4276 	return MAX (1, count);
4277       }
4278 
4279     case GIMPLE_RESX:
4280       /* This is either going to be an external function call with one
4281 	 argument, or two register copy statements plus a goto.  */
4282       return 2;
4283 
4284     case GIMPLE_EH_DISPATCH:
4285       /* ??? This is going to turn into a switch statement.  Ideally
4286 	 we'd have a look at the eh region and estimate the number of
4287 	 edges involved.  */
4288       return 10;
4289 
4290     case GIMPLE_BIND:
4291       return estimate_num_insns_seq (
4292 	       gimple_bind_body (as_a <gbind *> (stmt)),
4293 	       weights);
4294 
4295     case GIMPLE_EH_FILTER:
4296       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4297 
4298     case GIMPLE_CATCH:
4299       return estimate_num_insns_seq (gimple_catch_handler (
4300 				       as_a <gcatch *> (stmt)),
4301 				     weights);
4302 
4303     case GIMPLE_TRY:
4304       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4305               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4306 
4307     /* OMP directives are generally very expensive.  */
4308 
4309     case GIMPLE_OMP_RETURN:
4310     case GIMPLE_OMP_SECTIONS_SWITCH:
4311     case GIMPLE_OMP_ATOMIC_STORE:
4312     case GIMPLE_OMP_CONTINUE:
4313       /* ...except these, which are cheap.  */
4314       return 0;
4315 
4316     case GIMPLE_OMP_ATOMIC_LOAD:
4317       return weights->omp_cost;
4318 
4319     case GIMPLE_OMP_FOR:
4320       return (weights->omp_cost
4321               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4322               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4323 
4324     case GIMPLE_OMP_PARALLEL:
4325     case GIMPLE_OMP_TASK:
4326     case GIMPLE_OMP_CRITICAL:
4327     case GIMPLE_OMP_MASTER:
4328     case GIMPLE_OMP_TASKGROUP:
4329     case GIMPLE_OMP_ORDERED:
4330     case GIMPLE_OMP_SECTION:
4331     case GIMPLE_OMP_SECTIONS:
4332     case GIMPLE_OMP_SINGLE:
4333     case GIMPLE_OMP_TARGET:
4334     case GIMPLE_OMP_TEAMS:
4335       return (weights->omp_cost
4336               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4337 
4338     case GIMPLE_TRANSACTION:
4339       return (weights->tm_cost
4340 	      + estimate_num_insns_seq (gimple_transaction_body (
4341 					  as_a <gtransaction *> (stmt)),
4342 					weights));
4343 
4344     default:
4345       gcc_unreachable ();
4346     }
4347 
4348   return cost;
4349 }
4350 
4351 /* Estimate number of instructions that will be created by expanding
4352    function FNDECL.  WEIGHTS contains weights attributed to various
4353    constructs.  */
4354 
4355 int
4356 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4357 {
4358   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4359   gimple_stmt_iterator bsi;
4360   basic_block bb;
4361   int n = 0;
4362 
4363   gcc_assert (my_function && my_function->cfg);
4364   FOR_EACH_BB_FN (bb, my_function)
4365     {
4366       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4367 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4368     }
4369 
4370   return n;
4371 }
4372 
4373 
4374 /* Initializes weights used by estimate_num_insns.  */
4375 
4376 void
4377 init_inline_once (void)
4378 {
4379   eni_size_weights.call_cost = 1;
4380   eni_size_weights.indirect_call_cost = 3;
4381   eni_size_weights.target_builtin_call_cost = 1;
4382   eni_size_weights.div_mod_cost = 1;
4383   eni_size_weights.omp_cost = 40;
4384   eni_size_weights.tm_cost = 10;
4385   eni_size_weights.time_based = false;
4386   eni_size_weights.return_cost = 1;
4387 
4388   /* Estimating time for call is difficult, since we have no idea what the
4389      called function does.  In the current uses of eni_time_weights,
4390      underestimating the cost does less harm than overestimating it, so
4391      we choose a rather small value here.  */
4392   eni_time_weights.call_cost = 10;
4393   eni_time_weights.indirect_call_cost = 15;
4394   eni_time_weights.target_builtin_call_cost = 1;
4395   eni_time_weights.div_mod_cost = 10;
4396   eni_time_weights.omp_cost = 40;
4397   eni_time_weights.tm_cost = 40;
4398   eni_time_weights.time_based = true;
4399   eni_time_weights.return_cost = 2;
4400 }
4401 
4402 
4403 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4404 
4405 static void
4406 prepend_lexical_block (tree current_block, tree new_block)
4407 {
4408   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4409   BLOCK_SUBBLOCKS (current_block) = new_block;
4410   BLOCK_SUPERCONTEXT (new_block) = current_block;
4411 }
4412 
4413 /* Add local variables from CALLEE to CALLER.  */
4414 
4415 static inline void
4416 add_local_variables (struct function *callee, struct function *caller,
4417 		     copy_body_data *id)
4418 {
4419   tree var;
4420   unsigned ix;
4421 
4422   FOR_EACH_LOCAL_DECL (callee, ix, var)
4423     if (!can_be_nonlocal (var, id))
4424       {
4425         tree new_var = remap_decl (var, id);
4426 
4427         /* Remap debug-expressions.  */
4428 	if (VAR_P (new_var)
4429 	    && DECL_HAS_DEBUG_EXPR_P (var)
4430 	    && new_var != var)
4431 	  {
4432 	    tree tem = DECL_DEBUG_EXPR (var);
4433 	    bool old_regimplify = id->regimplify;
4434 	    id->remapping_type_depth++;
4435 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4436 	    id->remapping_type_depth--;
4437 	    id->regimplify = old_regimplify;
4438 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4439 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4440 	  }
4441 	add_local_decl (caller, new_var);
4442       }
4443 }
4444 
4445 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4446    have brought in or introduced any debug stmts for SRCVAR.  */
4447 
4448 static inline void
4449 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4450 {
4451   tree *remappedvarp = id->decl_map->get (srcvar);
4452 
4453   if (!remappedvarp)
4454     return;
4455 
4456   if (!VAR_P (*remappedvarp))
4457     return;
4458 
4459   if (*remappedvarp == id->retvar)
4460     return;
4461 
4462   tree tvar = target_for_debug_bind (*remappedvarp);
4463   if (!tvar)
4464     return;
4465 
4466   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4467 					  id->call_stmt);
4468   gimple_seq_add_stmt (bindings, stmt);
4469 }
4470 
4471 /* For each inlined variable for which we may have debug bind stmts,
4472    add before GSI a final debug stmt resetting it, marking the end of
4473    its life, so that var-tracking knows it doesn't have to compute
4474    further locations for it.  */
4475 
4476 static inline void
4477 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4478 {
4479   tree var;
4480   unsigned ix;
4481   gimple_seq bindings = NULL;
4482 
4483   if (!gimple_in_ssa_p (id->src_cfun))
4484     return;
4485 
4486   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4487     return;
4488 
4489   for (var = DECL_ARGUMENTS (id->src_fn);
4490        var; var = DECL_CHAIN (var))
4491     reset_debug_binding (id, var, &bindings);
4492 
4493   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4494     reset_debug_binding (id, var, &bindings);
4495 
4496   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4497 }
4498 
4499 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4500 
4501 static bool
4502 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4503 		    bitmap to_purge)
4504 {
4505   tree use_retvar;
4506   tree fn;
4507   hash_map<tree, tree> *dst;
4508   hash_map<tree, tree> *st = NULL;
4509   tree return_slot;
4510   tree modify_dest;
4511   struct cgraph_edge *cg_edge;
4512   cgraph_inline_failed_t reason;
4513   basic_block return_block;
4514   edge e;
4515   gimple_stmt_iterator gsi, stmt_gsi;
4516   bool successfully_inlined = false;
4517   bool purge_dead_abnormal_edges;
4518   gcall *call_stmt;
4519   unsigned int prop_mask, src_properties;
4520   struct function *dst_cfun;
4521   tree simduid;
4522   use_operand_p use;
4523   gimple *simtenter_stmt = NULL;
4524   vec<tree> *simtvars_save;
4525 
4526   /* The gimplifier uses input_location in too many places, such as
4527      internal_get_tmp_var ().  */
4528   location_t saved_location = input_location;
4529   input_location = gimple_location (stmt);
4530 
4531   /* From here on, we're only interested in CALL_EXPRs.  */
4532   call_stmt = dyn_cast <gcall *> (stmt);
4533   if (!call_stmt)
4534     goto egress;
4535 
4536   cg_edge = id->dst_node->get_edge (stmt);
4537   gcc_checking_assert (cg_edge);
4538   /* First, see if we can figure out what function is being called.
4539      If we cannot, then there is no hope of inlining the function.  */
4540   if (cg_edge->indirect_unknown_callee)
4541     goto egress;
4542   fn = cg_edge->callee->decl;
4543   gcc_checking_assert (fn);
4544 
4545   /* If FN is a declaration of a function in a nested scope that was
4546      globally declared inline, we don't set its DECL_INITIAL.
4547      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4548      C++ front-end uses it for cdtors to refer to their internal
4549      declarations, that are not real functions.  Fortunately those
4550      don't have trees to be saved, so we can tell by checking their
4551      gimple_body.  */
4552   if (!DECL_INITIAL (fn)
4553       && DECL_ABSTRACT_ORIGIN (fn)
4554       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4555     fn = DECL_ABSTRACT_ORIGIN (fn);
4556 
4557   /* Don't try to inline functions that are not well-suited to inlining.  */
4558   if (cg_edge->inline_failed)
4559     {
4560       reason = cg_edge->inline_failed;
4561       /* If this call was originally indirect, we do not want to emit any
4562 	 inlining related warnings or sorry messages because there are no
4563 	 guarantees regarding those.  */
4564       if (cg_edge->indirect_inlining_edge)
4565 	goto egress;
4566 
4567       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4568           /* For extern inline functions that get redefined we always
4569 	     silently ignored always_inline flag. Better behavior would
4570 	     be to be able to keep both bodies and use extern inline body
4571 	     for inlining, but we can't do that because frontends overwrite
4572 	     the body.  */
4573 	  && !cg_edge->callee->local.redefined_extern_inline
4574 	  /* During early inline pass, report only when optimization is
4575 	     not turned on.  */
4576 	  && (symtab->global_info_ready
4577 	      || !optimize
4578 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4579 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4580 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4581 	{
4582 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4583 		 cgraph_inline_failed_string (reason));
4584 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4585 	    inform (gimple_location (stmt), "called from here");
4586 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4587 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4588                    "called from this function");
4589 	}
4590       else if (warn_inline
4591 	       && DECL_DECLARED_INLINE_P (fn)
4592 	       && !DECL_NO_INLINE_WARNING_P (fn)
4593 	       && !DECL_IN_SYSTEM_HEADER (fn)
4594 	       && reason != CIF_UNSPECIFIED
4595 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4596 	       /* Do not warn about not inlined recursive calls.  */
4597 	       && !cg_edge->recursive_p ()
4598 	       /* Avoid warnings during early inline pass. */
4599 	       && symtab->global_info_ready)
4600 	{
4601 	  auto_diagnostic_group d;
4602 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4603 		       fn, _(cgraph_inline_failed_string (reason))))
4604 	    {
4605 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4606 		inform (gimple_location (stmt), "called from here");
4607 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4608 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4609                        "called from this function");
4610 	    }
4611 	}
4612       goto egress;
4613     }
4614   id->src_node = cg_edge->callee;
4615 
4616   /* If callee is thunk, all we need is to adjust the THIS pointer
4617      and redirect to function being thunked.  */
4618   if (id->src_node->thunk.thunk_p)
4619     {
4620       cgraph_edge *edge;
4621       tree virtual_offset = NULL;
4622       profile_count count = cg_edge->count;
4623       tree op;
4624       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4625 
4626       cg_edge->remove ();
4627       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4628 		   		           gimple_uid (stmt),
4629 				   	   profile_count::one (),
4630 					   profile_count::one (),
4631 				           true);
4632       edge->count = count;
4633       if (id->src_node->thunk.virtual_offset_p)
4634         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4635       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4636 			      NULL);
4637       gsi_insert_before (&iter, gimple_build_assign (op,
4638 						    gimple_call_arg (stmt, 0)),
4639 			 GSI_NEW_STMT);
4640       gcc_assert (id->src_node->thunk.this_adjusting);
4641       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4642 			 virtual_offset, id->src_node->thunk.indirect_offset);
4643 
4644       gimple_call_set_arg (stmt, 0, op);
4645       gimple_call_set_fndecl (stmt, edge->callee->decl);
4646       update_stmt (stmt);
4647       id->src_node->remove ();
4648       expand_call_inline (bb, stmt, id, to_purge);
4649       maybe_remove_unused_call_args (cfun, stmt);
4650       return true;
4651     }
4652   fn = cg_edge->callee->decl;
4653   cg_edge->callee->get_untransformed_body ();
4654 
4655   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4656     cg_edge->callee->verify ();
4657 
4658   /* We will be inlining this callee.  */
4659   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4660 
4661   /* Update the callers EH personality.  */
4662   if (DECL_FUNCTION_PERSONALITY (fn))
4663     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4664       = DECL_FUNCTION_PERSONALITY (fn);
4665 
4666   /* Split the block before the GIMPLE_CALL.  */
4667   stmt_gsi = gsi_for_stmt (stmt);
4668   gsi_prev (&stmt_gsi);
4669   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4670   bb = e->src;
4671   return_block = e->dest;
4672   remove_edge (e);
4673 
4674   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4675      been the source of abnormal edges.  In this case, schedule
4676      the removal of dead abnormal edges.  */
4677   gsi = gsi_start_bb (return_block);
4678   gsi_next (&gsi);
4679   purge_dead_abnormal_edges = gsi_end_p (gsi);
4680 
4681   stmt_gsi = gsi_start_bb (return_block);
4682 
4683   /* Build a block containing code to initialize the arguments, the
4684      actual inline expansion of the body, and a label for the return
4685      statements within the function to jump to.  The type of the
4686      statement expression is the return type of the function call.
4687      ???  If the call does not have an associated block then we will
4688      remap all callee blocks to NULL, effectively dropping most of
4689      its debug information.  This should only happen for calls to
4690      artificial decls inserted by the compiler itself.  We need to
4691      either link the inlined blocks into the caller block tree or
4692      not refer to them in any way to not break GC for locations.  */
4693   if (tree block = gimple_block (stmt))
4694     {
4695       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4696          to make inlined_function_outer_scope_p return true on this BLOCK.  */
4697       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4698       if (loc == UNKNOWN_LOCATION)
4699 	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4700       if (loc == UNKNOWN_LOCATION)
4701 	loc = BUILTINS_LOCATION;
4702       id->block = make_node (BLOCK);
4703       BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4704       BLOCK_SOURCE_LOCATION (id->block) = loc;
4705       prepend_lexical_block (block, id->block);
4706     }
4707 
4708   /* Local declarations will be replaced by their equivalents in this map.  */
4709   st = id->decl_map;
4710   id->decl_map = new hash_map<tree, tree>;
4711   dst = id->debug_map;
4712   id->debug_map = NULL;
4713   if (flag_stack_reuse != SR_NONE)
4714     id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4715 
4716   /* Record the function we are about to inline.  */
4717   id->src_fn = fn;
4718   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4719   id->reset_location = DECL_IGNORED_P (fn);
4720   id->call_stmt = call_stmt;
4721 
4722   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4723      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4724   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4725   simtvars_save = id->dst_simt_vars;
4726   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4727       && (simduid = bb->loop_father->simduid) != NULL_TREE
4728       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4729       && single_imm_use (simduid, &use, &simtenter_stmt)
4730       && is_gimple_call (simtenter_stmt)
4731       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4732     vec_alloc (id->dst_simt_vars, 0);
4733   else
4734     id->dst_simt_vars = NULL;
4735 
4736   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4737     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4738 
4739   /* If the src function contains an IFN_VA_ARG, then so will the dst
4740      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4741   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4742   src_properties = id->src_cfun->curr_properties & prop_mask;
4743   if (src_properties != prop_mask)
4744     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4745 
4746   gcc_assert (!id->src_cfun->after_inlining);
4747 
4748   id->entry_bb = bb;
4749   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4750     {
4751       gimple_stmt_iterator si = gsi_last_bb (bb);
4752       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4753       						   NOT_TAKEN),
4754 			GSI_NEW_STMT);
4755     }
4756   initialize_inlined_parameters (id, stmt, fn, bb);
4757   if (debug_nonbind_markers_p && debug_inline_points && id->block
4758       && inlined_function_outer_scope_p (id->block))
4759     {
4760       gimple_stmt_iterator si = gsi_last_bb (bb);
4761       gsi_insert_after (&si, gimple_build_debug_inline_entry
4762 			(id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4763 			GSI_NEW_STMT);
4764     }
4765 
4766   if (DECL_INITIAL (fn))
4767     {
4768       if (gimple_block (stmt))
4769 	{
4770 	  tree *var;
4771 
4772 	  prepend_lexical_block (id->block,
4773 				 remap_blocks (DECL_INITIAL (fn), id));
4774 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4775 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4776 				   == NULL_TREE));
4777 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4778 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4779 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4780 	     under it.  The parameters can be then evaluated in the debugger,
4781 	     but don't show in backtraces.  */
4782 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4783 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4784 	      {
4785 		tree v = *var;
4786 		*var = TREE_CHAIN (v);
4787 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4788 		BLOCK_VARS (id->block) = v;
4789 	      }
4790 	    else
4791 	      var = &TREE_CHAIN (*var);
4792 	}
4793       else
4794 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4795     }
4796 
4797   /* Return statements in the function body will be replaced by jumps
4798      to the RET_LABEL.  */
4799   gcc_assert (DECL_INITIAL (fn));
4800   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4801 
4802   /* Find the LHS to which the result of this call is assigned.  */
4803   return_slot = NULL;
4804   if (gimple_call_lhs (stmt))
4805     {
4806       modify_dest = gimple_call_lhs (stmt);
4807 
4808       /* The function which we are inlining might not return a value,
4809 	 in which case we should issue a warning that the function
4810 	 does not return a value.  In that case the optimizers will
4811 	 see that the variable to which the value is assigned was not
4812 	 initialized.  We do not want to issue a warning about that
4813 	 uninitialized variable.  */
4814       if (DECL_P (modify_dest))
4815 	TREE_NO_WARNING (modify_dest) = 1;
4816 
4817       if (gimple_call_return_slot_opt_p (call_stmt))
4818 	{
4819 	  return_slot = modify_dest;
4820 	  modify_dest = NULL;
4821 	}
4822     }
4823   else
4824     modify_dest = NULL;
4825 
4826   /* If we are inlining a call to the C++ operator new, we don't want
4827      to use type based alias analysis on the return value.  Otherwise
4828      we may get confused if the compiler sees that the inlined new
4829      function returns a pointer which was just deleted.  See bug
4830      33407.  */
4831   if (DECL_IS_OPERATOR_NEW (fn))
4832     {
4833       return_slot = NULL;
4834       modify_dest = NULL;
4835     }
4836 
4837   /* Declare the return variable for the function.  */
4838   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4839 
4840   /* Add local vars in this inlined callee to caller.  */
4841   add_local_variables (id->src_cfun, cfun, id);
4842 
4843   if (dump_enabled_p ())
4844     {
4845       char buf[128];
4846       snprintf (buf, sizeof(buf), "%4.2f",
4847 		cg_edge->sreal_frequency ().to_double ());
4848       dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4849 		       call_stmt,
4850 		       "Inlining %C to %C with frequency %s\n",
4851 		       id->src_node, id->dst_node, buf);
4852       if (dump_file && (dump_flags & TDF_DETAILS))
4853 	{
4854 	  id->src_node->dump (dump_file);
4855 	  id->dst_node->dump (dump_file);
4856 	}
4857     }
4858 
4859   /* This is it.  Duplicate the callee body.  Assume callee is
4860      pre-gimplified.  Note that we must not alter the caller
4861      function in any way before this point, as this CALL_EXPR may be
4862      a self-referential call; if we're calling ourselves, we need to
4863      duplicate our body before altering anything.  */
4864   copy_body (id, bb, return_block, NULL);
4865 
4866   reset_debug_bindings (id, stmt_gsi);
4867 
4868   if (flag_stack_reuse != SR_NONE)
4869     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4870       if (!TREE_THIS_VOLATILE (p))
4871 	{
4872 	  tree *varp = id->decl_map->get (p);
4873 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4874 	    {
4875 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4876 	      gimple *clobber_stmt;
4877 	      TREE_THIS_VOLATILE (clobber) = 1;
4878 	      clobber_stmt = gimple_build_assign (*varp, clobber);
4879 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
4880 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4881 	    }
4882 	}
4883 
4884   /* Reset the escaped solution.  */
4885   if (cfun->gimple_df)
4886     pt_solution_reset (&cfun->gimple_df->escaped);
4887 
4888   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
4889   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4890     {
4891       size_t nargs = gimple_call_num_args (simtenter_stmt);
4892       vec<tree> *vars = id->dst_simt_vars;
4893       auto_vec<tree> newargs (nargs + vars->length ());
4894       for (size_t i = 0; i < nargs; i++)
4895 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4896       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4897 	{
4898 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4899 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4900 	}
4901       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4902       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4903       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4904       gsi_replace (&gsi, g, false);
4905     }
4906   vec_free (id->dst_simt_vars);
4907   id->dst_simt_vars = simtvars_save;
4908 
4909   /* Clean up.  */
4910   if (id->debug_map)
4911     {
4912       delete id->debug_map;
4913       id->debug_map = dst;
4914     }
4915   delete id->decl_map;
4916   id->decl_map = st;
4917 
4918   /* Unlink the calls virtual operands before replacing it.  */
4919   unlink_stmt_vdef (stmt);
4920   if (gimple_vdef (stmt)
4921       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4922     release_ssa_name (gimple_vdef (stmt));
4923 
4924   /* If the inlined function returns a result that we care about,
4925      substitute the GIMPLE_CALL with an assignment of the return
4926      variable to the LHS of the call.  That is, if STMT was
4927      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4928   if (use_retvar && gimple_call_lhs (stmt))
4929     {
4930       gimple *old_stmt = stmt;
4931       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4932       gimple_set_location (stmt, gimple_location (old_stmt));
4933       gsi_replace (&stmt_gsi, stmt, false);
4934       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4935       /* Append a clobber for id->retvar if easily possible.  */
4936       if (flag_stack_reuse != SR_NONE
4937 	  && id->retvar
4938 	  && VAR_P (id->retvar)
4939 	  && id->retvar != return_slot
4940 	  && id->retvar != modify_dest
4941 	  && !TREE_THIS_VOLATILE (id->retvar)
4942 	  && !is_gimple_reg (id->retvar)
4943 	  && !stmt_ends_bb_p (stmt))
4944 	{
4945 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4946 	  gimple *clobber_stmt;
4947 	  TREE_THIS_VOLATILE (clobber) = 1;
4948 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4949 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4950 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4951 	}
4952     }
4953   else
4954     {
4955       /* Handle the case of inlining a function with no return
4956 	 statement, which causes the return value to become undefined.  */
4957       if (gimple_call_lhs (stmt)
4958 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4959 	{
4960 	  tree name = gimple_call_lhs (stmt);
4961 	  tree var = SSA_NAME_VAR (name);
4962 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4963 
4964 	  if (def)
4965 	    {
4966 	      /* If the variable is used undefined, make this name
4967 		 undefined via a move.  */
4968 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4969 	      gsi_replace (&stmt_gsi, stmt, true);
4970 	    }
4971 	  else
4972 	    {
4973 	      if (!var)
4974 		{
4975 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4976 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4977 		}
4978 	      /* Otherwise make this variable undefined.  */
4979 	      gsi_remove (&stmt_gsi, true);
4980 	      set_ssa_default_def (cfun, var, name);
4981 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4982 	    }
4983 	}
4984       /* Replace with a clobber for id->retvar.  */
4985       else if (flag_stack_reuse != SR_NONE
4986 	       && id->retvar
4987 	       && VAR_P (id->retvar)
4988 	       && id->retvar != return_slot
4989 	       && id->retvar != modify_dest
4990 	       && !TREE_THIS_VOLATILE (id->retvar)
4991 	       && !is_gimple_reg (id->retvar))
4992 	{
4993 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4994 	  gimple *clobber_stmt;
4995 	  TREE_THIS_VOLATILE (clobber) = 1;
4996 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4997 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
4998 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
4999 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5000 	}
5001       else
5002 	gsi_remove (&stmt_gsi, true);
5003     }
5004 
5005   if (purge_dead_abnormal_edges)
5006     bitmap_set_bit (to_purge, return_block->index);
5007 
5008   /* If the value of the new expression is ignored, that's OK.  We
5009      don't warn about this for CALL_EXPRs, so we shouldn't warn about
5010      the equivalent inlined version either.  */
5011   if (is_gimple_assign (stmt))
5012     {
5013       gcc_assert (gimple_assign_single_p (stmt)
5014 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5015       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5016     }
5017 
5018   id->add_clobbers_to_eh_landing_pads = 0;
5019 
5020   /* Output the inlining info for this abstract function, since it has been
5021      inlined.  If we don't do this now, we can lose the information about the
5022      variables in the function when the blocks get blown away as soon as we
5023      remove the cgraph node.  */
5024   if (gimple_block (stmt))
5025     (*debug_hooks->outlining_inline_function) (fn);
5026 
5027   /* Update callgraph if needed.  */
5028   cg_edge->callee->remove ();
5029 
5030   id->block = NULL_TREE;
5031   id->retvar = NULL_TREE;
5032   successfully_inlined = true;
5033 
5034  egress:
5035   input_location = saved_location;
5036   return successfully_inlined;
5037 }
5038 
5039 /* Expand call statements reachable from STMT_P.
5040    We can only have CALL_EXPRs as the "toplevel" tree code or nested
5041    in a MODIFY_EXPR.  */
5042 
5043 static bool
5044 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5045 			    bitmap to_purge)
5046 {
5047   gimple_stmt_iterator gsi;
5048   bool inlined = false;
5049 
5050   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5051     {
5052       gimple *stmt = gsi_stmt (gsi);
5053       gsi_prev (&gsi);
5054 
5055       if (is_gimple_call (stmt)
5056 	  && !gimple_call_internal_p (stmt))
5057 	inlined |= expand_call_inline (bb, stmt, id, to_purge);
5058     }
5059 
5060   return inlined;
5061 }
5062 
5063 
5064 /* Walk all basic blocks created after FIRST and try to fold every statement
5065    in the STATEMENTS pointer set.  */
5066 
5067 static void
5068 fold_marked_statements (int first, hash_set<gimple *> *statements)
5069 {
5070   auto_bitmap to_purge;
5071   for (; first < last_basic_block_for_fn (cfun); first++)
5072     if (BASIC_BLOCK_FOR_FN (cfun, first))
5073       {
5074         gimple_stmt_iterator gsi;
5075 
5076 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5077 	     !gsi_end_p (gsi);
5078 	     gsi_next (&gsi))
5079 	  if (statements->contains (gsi_stmt (gsi)))
5080 	    {
5081 	      gimple *old_stmt = gsi_stmt (gsi);
5082 	      tree old_decl
5083 		= is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5084 
5085 	      if (old_decl && fndecl_built_in_p (old_decl))
5086 		{
5087 		  /* Folding builtins can create multiple instructions,
5088 		     we need to look at all of them.  */
5089 		  gimple_stmt_iterator i2 = gsi;
5090 		  gsi_prev (&i2);
5091 		  if (fold_stmt (&gsi))
5092 		    {
5093 		      gimple *new_stmt;
5094 		      /* If a builtin at the end of a bb folded into nothing,
5095 			 the following loop won't work.  */
5096 		      if (gsi_end_p (gsi))
5097 			{
5098 			  cgraph_update_edges_for_call_stmt (old_stmt,
5099 							     old_decl, NULL);
5100 			  break;
5101 			}
5102 		      if (gsi_end_p (i2))
5103 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5104 		      else
5105 			gsi_next (&i2);
5106 		      while (1)
5107 			{
5108 			  new_stmt = gsi_stmt (i2);
5109 			  update_stmt (new_stmt);
5110 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5111 							     new_stmt);
5112 
5113 			  if (new_stmt == gsi_stmt (gsi))
5114 			    {
5115 			      /* It is okay to check only for the very last
5116 				 of these statements.  If it is a throwing
5117 				 statement nothing will change.  If it isn't
5118 				 this can remove EH edges.  If that weren't
5119 				 correct then because some intermediate stmts
5120 				 throw, but not the last one.  That would mean
5121 				 we'd have to split the block, which we can't
5122 				 here and we'd loose anyway.  And as builtins
5123 				 probably never throw, this all
5124 				 is mood anyway.  */
5125 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
5126 								  new_stmt))
5127 				bitmap_set_bit (to_purge, first);
5128 			      break;
5129 			    }
5130 			  gsi_next (&i2);
5131 			}
5132 		    }
5133 		}
5134 	      else if (fold_stmt (&gsi))
5135 		{
5136 		  /* Re-read the statement from GSI as fold_stmt() may
5137 		     have changed it.  */
5138 		  gimple *new_stmt = gsi_stmt (gsi);
5139 		  update_stmt (new_stmt);
5140 
5141 		  if (is_gimple_call (old_stmt)
5142 		      || is_gimple_call (new_stmt))
5143 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5144 						       new_stmt);
5145 
5146 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5147 		    bitmap_set_bit (to_purge, first);
5148 		}
5149 	    }
5150       }
5151   gimple_purge_all_dead_eh_edges (to_purge);
5152 }
5153 
5154 /* Expand calls to inline functions in the body of FN.  */
5155 
5156 unsigned int
5157 optimize_inline_calls (tree fn)
5158 {
5159   copy_body_data id;
5160   basic_block bb;
5161   int last = n_basic_blocks_for_fn (cfun);
5162   bool inlined_p = false;
5163 
5164   /* Clear out ID.  */
5165   memset (&id, 0, sizeof (id));
5166 
5167   id.src_node = id.dst_node = cgraph_node::get (fn);
5168   gcc_assert (id.dst_node->definition);
5169   id.dst_fn = fn;
5170   /* Or any functions that aren't finished yet.  */
5171   if (current_function_decl)
5172     id.dst_fn = current_function_decl;
5173 
5174   id.copy_decl = copy_decl_maybe_to_var;
5175   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5176   id.transform_new_cfg = false;
5177   id.transform_return_to_modify = true;
5178   id.transform_parameter = true;
5179   id.transform_lang_insert_block = NULL;
5180   id.statements_to_fold = new hash_set<gimple *>;
5181 
5182   push_gimplify_context ();
5183 
5184   /* We make no attempts to keep dominance info up-to-date.  */
5185   free_dominance_info (CDI_DOMINATORS);
5186   free_dominance_info (CDI_POST_DOMINATORS);
5187 
5188   /* Register specific gimple functions.  */
5189   gimple_register_cfg_hooks ();
5190 
5191   /* Reach the trees by walking over the CFG, and note the
5192      enclosing basic-blocks in the call edges.  */
5193   /* We walk the blocks going forward, because inlined function bodies
5194      will split id->current_basic_block, and the new blocks will
5195      follow it; we'll trudge through them, processing their CALL_EXPRs
5196      along the way.  */
5197   auto_bitmap to_purge;
5198   FOR_EACH_BB_FN (bb, cfun)
5199     inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5200 
5201   pop_gimplify_context (NULL);
5202 
5203   if (flag_checking)
5204     {
5205       struct cgraph_edge *e;
5206 
5207       id.dst_node->verify ();
5208 
5209       /* Double check that we inlined everything we are supposed to inline.  */
5210       for (e = id.dst_node->callees; e; e = e->next_callee)
5211 	gcc_assert (e->inline_failed);
5212     }
5213 
5214   /* Fold queued statements.  */
5215   update_max_bb_count ();
5216   fold_marked_statements (last, id.statements_to_fold);
5217   delete id.statements_to_fold;
5218 
5219   /* Finally purge EH and abnormal edges from the call stmts we inlined.
5220      We need to do this after fold_marked_statements since that may walk
5221      the SSA use-def chain.  */
5222   unsigned i;
5223   bitmap_iterator bi;
5224   EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5225     {
5226       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5227       if (bb)
5228 	{
5229 	  gimple_purge_dead_eh_edges (bb);
5230 	  gimple_purge_dead_abnormal_call_edges (bb);
5231 	}
5232     }
5233 
5234   gcc_assert (!id.debug_stmts.exists ());
5235 
5236   /* If we didn't inline into the function there is nothing to do.  */
5237   if (!inlined_p)
5238     return 0;
5239 
5240   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5241   number_blocks (fn);
5242 
5243   delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5244 
5245   if (flag_checking)
5246     id.dst_node->verify ();
5247 
5248   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5249      not possible yet - the IPA passes might make various functions to not
5250      throw and they don't care to proactively update local EH info.  This is
5251      done later in fixup_cfg pass that also execute the verification.  */
5252   return (TODO_update_ssa
5253 	  | TODO_cleanup_cfg
5254 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5255 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5256 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5257 	     ? TODO_rebuild_frequencies : 0));
5258 }
5259 
5260 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5261 
5262 tree
5263 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5264 {
5265   enum tree_code code = TREE_CODE (*tp);
5266   enum tree_code_class cl = TREE_CODE_CLASS (code);
5267 
5268   /* We make copies of most nodes.  */
5269   if (IS_EXPR_CODE_CLASS (cl)
5270       || code == TREE_LIST
5271       || code == TREE_VEC
5272       || code == TYPE_DECL
5273       || code == OMP_CLAUSE)
5274     {
5275       /* Because the chain gets clobbered when we make a copy, we save it
5276 	 here.  */
5277       tree chain = NULL_TREE, new_tree;
5278 
5279       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5280 	chain = TREE_CHAIN (*tp);
5281 
5282       /* Copy the node.  */
5283       new_tree = copy_node (*tp);
5284 
5285       *tp = new_tree;
5286 
5287       /* Now, restore the chain, if appropriate.  That will cause
5288 	 walk_tree to walk into the chain as well.  */
5289       if (code == PARM_DECL
5290 	  || code == TREE_LIST
5291 	  || code == OMP_CLAUSE)
5292 	TREE_CHAIN (*tp) = chain;
5293 
5294       /* For now, we don't update BLOCKs when we make copies.  So, we
5295 	 have to nullify all BIND_EXPRs.  */
5296       if (TREE_CODE (*tp) == BIND_EXPR)
5297 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5298     }
5299   else if (code == CONSTRUCTOR)
5300     {
5301       /* CONSTRUCTOR nodes need special handling because
5302          we need to duplicate the vector of elements.  */
5303       tree new_tree;
5304 
5305       new_tree = copy_node (*tp);
5306       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5307       *tp = new_tree;
5308     }
5309   else if (code == STATEMENT_LIST)
5310     /* We used to just abort on STATEMENT_LIST, but we can run into them
5311        with statement-expressions (c++/40975).  */
5312     copy_statement_list (tp);
5313   else if (TREE_CODE_CLASS (code) == tcc_type)
5314     *walk_subtrees = 0;
5315   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5316     *walk_subtrees = 0;
5317   else if (TREE_CODE_CLASS (code) == tcc_constant)
5318     *walk_subtrees = 0;
5319   return NULL_TREE;
5320 }
5321 
5322 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5323    information indicating to what new SAVE_EXPR this one should be mapped,
5324    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5325    the function into which the copy will be placed.  */
5326 
5327 static void
5328 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5329 {
5330   tree *n;
5331   tree t;
5332 
5333   /* See if we already encountered this SAVE_EXPR.  */
5334   n = st->get (*tp);
5335 
5336   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5337   if (!n)
5338     {
5339       t = copy_node (*tp);
5340 
5341       /* Remember this SAVE_EXPR.  */
5342       st->put (*tp, t);
5343       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5344       st->put (t, t);
5345     }
5346   else
5347     {
5348       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5349       *walk_subtrees = 0;
5350       t = *n;
5351     }
5352 
5353   /* Replace this SAVE_EXPR with the copy.  */
5354   *tp = t;
5355 }
5356 
5357 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5358    label, copies the declaration and enters it in the splay_tree in DATA (which
5359    is really a 'copy_body_data *'.  */
5360 
5361 static tree
5362 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5363 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5364 		        struct walk_stmt_info *wi)
5365 {
5366   copy_body_data *id = (copy_body_data *) wi->info;
5367   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5368 
5369   if (stmt)
5370     {
5371       tree decl = gimple_label_label (stmt);
5372 
5373       /* Copy the decl and remember the copy.  */
5374       insert_decl_map (id, decl, id->copy_decl (decl, id));
5375     }
5376 
5377   return NULL_TREE;
5378 }
5379 
5380 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5381 						  struct walk_stmt_info *wi);
5382 
5383 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5384    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5385    remaps all local declarations to appropriate replacements in gimple
5386    operands. */
5387 
5388 static tree
5389 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5390 {
5391   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5392   copy_body_data *id = (copy_body_data *) wi->info;
5393   hash_map<tree, tree> *st = id->decl_map;
5394   tree *n;
5395   tree expr = *tp;
5396 
5397   /* For recursive invocations this is no longer the LHS itself.  */
5398   bool is_lhs = wi->is_lhs;
5399   wi->is_lhs = false;
5400 
5401   if (TREE_CODE (expr) == SSA_NAME)
5402     {
5403       *tp = remap_ssa_name (*tp, id);
5404       *walk_subtrees = 0;
5405       if (is_lhs)
5406 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5407     }
5408   /* Only a local declaration (variable or label).  */
5409   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5410 	   || TREE_CODE (expr) == LABEL_DECL)
5411     {
5412       /* Lookup the declaration.  */
5413       n = st->get (expr);
5414 
5415       /* If it's there, remap it.  */
5416       if (n)
5417 	*tp = *n;
5418       *walk_subtrees = 0;
5419     }
5420   else if (TREE_CODE (expr) == STATEMENT_LIST
5421 	   || TREE_CODE (expr) == BIND_EXPR
5422 	   || TREE_CODE (expr) == SAVE_EXPR)
5423     gcc_unreachable ();
5424   else if (TREE_CODE (expr) == TARGET_EXPR)
5425     {
5426       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5427          It's OK for this to happen if it was part of a subtree that
5428          isn't immediately expanded, such as operand 2 of another
5429          TARGET_EXPR.  */
5430       if (!TREE_OPERAND (expr, 1))
5431 	{
5432 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5433 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5434 	}
5435     }
5436   else if (TREE_CODE (expr) == OMP_CLAUSE)
5437     {
5438       /* Before the omplower pass completes, some OMP clauses can contain
5439 	 sequences that are neither copied by gimple_seq_copy nor walked by
5440 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5441 	 in those situations, we have to copy and process them explicitely.  */
5442 
5443       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5444 	{
5445 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5446 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5447 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5448 	}
5449       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5450 	{
5451 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5452 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5453 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5454 	}
5455       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5456 	{
5457 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5458 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5459 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5460 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5461 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5462 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5463 	}
5464     }
5465 
5466   /* Keep iterating.  */
5467   return NULL_TREE;
5468 }
5469 
5470 
5471 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5472    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5473    remaps all local declarations to appropriate replacements in gimple
5474    statements. */
5475 
5476 static tree
5477 replace_locals_stmt (gimple_stmt_iterator *gsip,
5478 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5479 		     struct walk_stmt_info *wi)
5480 {
5481   copy_body_data *id = (copy_body_data *) wi->info;
5482   gimple *gs = gsi_stmt (*gsip);
5483 
5484   if (gbind *stmt = dyn_cast <gbind *> (gs))
5485     {
5486       tree block = gimple_bind_block (stmt);
5487 
5488       if (block)
5489 	{
5490 	  remap_block (&block, id);
5491 	  gimple_bind_set_block (stmt, block);
5492 	}
5493 
5494       /* This will remap a lot of the same decls again, but this should be
5495 	 harmless.  */
5496       if (gimple_bind_vars (stmt))
5497 	{
5498 	  tree old_var, decls = gimple_bind_vars (stmt);
5499 
5500 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5501 	    if (!can_be_nonlocal (old_var, id)
5502 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5503 	      remap_decl (old_var, id);
5504 
5505 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5506 	  id->prevent_decl_creation_for_types = true;
5507 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5508 	  id->prevent_decl_creation_for_types = false;
5509 	}
5510     }
5511 
5512   /* Keep iterating.  */
5513   return NULL_TREE;
5514 }
5515 
5516 /* Create a copy of SEQ and remap all decls in it.  */
5517 
5518 static gimple_seq
5519 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5520 {
5521   if (!seq)
5522     return NULL;
5523 
5524   /* If there are any labels in OMP sequences, they can be only referred to in
5525      the sequence itself and therefore we can do both here.  */
5526   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5527   gimple_seq copy = gimple_seq_copy (seq);
5528   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5529   return copy;
5530 }
5531 
5532 /* Copies everything in SEQ and replaces variables and labels local to
5533    current_function_decl.  */
5534 
5535 gimple_seq
5536 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5537 {
5538   copy_body_data id;
5539   struct walk_stmt_info wi;
5540   gimple_seq copy;
5541 
5542   /* There's nothing to do for NULL_TREE.  */
5543   if (seq == NULL)
5544     return seq;
5545 
5546   /* Set up ID.  */
5547   memset (&id, 0, sizeof (id));
5548   id.src_fn = current_function_decl;
5549   id.dst_fn = current_function_decl;
5550   id.src_cfun = cfun;
5551   id.decl_map = new hash_map<tree, tree>;
5552   id.debug_map = NULL;
5553 
5554   id.copy_decl = copy_decl_no_change;
5555   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5556   id.transform_new_cfg = false;
5557   id.transform_return_to_modify = false;
5558   id.transform_parameter = false;
5559   id.transform_lang_insert_block = NULL;
5560 
5561   /* Walk the tree once to find local labels.  */
5562   memset (&wi, 0, sizeof (wi));
5563   hash_set<tree> visited;
5564   wi.info = &id;
5565   wi.pset = &visited;
5566   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5567 
5568   copy = gimple_seq_copy (seq);
5569 
5570   /* Walk the copy, remapping decls.  */
5571   memset (&wi, 0, sizeof (wi));
5572   wi.info = &id;
5573   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5574 
5575   /* Clean up.  */
5576   delete id.decl_map;
5577   if (id.debug_map)
5578     delete id.debug_map;
5579   if (id.dependence_map)
5580     {
5581       delete id.dependence_map;
5582       id.dependence_map = NULL;
5583     }
5584 
5585   return copy;
5586 }
5587 
5588 
5589 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5590 
5591 static tree
5592 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5593 {
5594   if (*tp == data)
5595     return (tree) data;
5596   else
5597     return NULL;
5598 }
5599 
5600 DEBUG_FUNCTION bool
5601 debug_find_tree (tree top, tree search)
5602 {
5603   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5604 }
5605 
5606 
5607 /* Declare the variables created by the inliner.  Add all the variables in
5608    VARS to BIND_EXPR.  */
5609 
5610 static void
5611 declare_inline_vars (tree block, tree vars)
5612 {
5613   tree t;
5614   for (t = vars; t; t = DECL_CHAIN (t))
5615     {
5616       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5617       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5618       add_local_decl (cfun, t);
5619     }
5620 
5621   if (block)
5622     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5623 }
5624 
5625 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5626    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5627    VAR_DECL translation.  */
5628 
5629 tree
5630 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5631 {
5632   /* Don't generate debug information for the copy if we wouldn't have
5633      generated it for the copy either.  */
5634   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5635   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5636 
5637   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5638      declaration inspired this copy.  */
5639   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5640 
5641   /* The new variable/label has no RTL, yet.  */
5642   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5643       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5644     SET_DECL_RTL (copy, 0);
5645   /* For vector typed decls make sure to update DECL_MODE according
5646      to the new function context.  */
5647   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5648     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5649 
5650   /* These args would always appear unused, if not for this.  */
5651   TREE_USED (copy) = 1;
5652 
5653   /* Set the context for the new declaration.  */
5654   if (!DECL_CONTEXT (decl))
5655     /* Globals stay global.  */
5656     ;
5657   else if (DECL_CONTEXT (decl) != id->src_fn)
5658     /* Things that weren't in the scope of the function we're inlining
5659        from aren't in the scope we're inlining to, either.  */
5660     ;
5661   else if (TREE_STATIC (decl))
5662     /* Function-scoped static variables should stay in the original
5663        function.  */
5664     ;
5665   else
5666     {
5667       /* Ordinary automatic local variables are now in the scope of the
5668 	 new function.  */
5669       DECL_CONTEXT (copy) = id->dst_fn;
5670       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5671 	{
5672 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5673 	    DECL_ATTRIBUTES (copy)
5674 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5675 			   DECL_ATTRIBUTES (copy));
5676 	  id->dst_simt_vars->safe_push (copy);
5677 	}
5678     }
5679 
5680   return copy;
5681 }
5682 
5683 static tree
5684 copy_decl_to_var (tree decl, copy_body_data *id)
5685 {
5686   tree copy, type;
5687 
5688   gcc_assert (TREE_CODE (decl) == PARM_DECL
5689 	      || TREE_CODE (decl) == RESULT_DECL);
5690 
5691   type = TREE_TYPE (decl);
5692 
5693   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5694 		     VAR_DECL, DECL_NAME (decl), type);
5695   if (DECL_PT_UID_SET_P (decl))
5696     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5697   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5698   TREE_READONLY (copy) = TREE_READONLY (decl);
5699   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5700   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5701 
5702   return copy_decl_for_dup_finish (id, decl, copy);
5703 }
5704 
5705 /* Like copy_decl_to_var, but create a return slot object instead of a
5706    pointer variable for return by invisible reference.  */
5707 
5708 static tree
5709 copy_result_decl_to_var (tree decl, copy_body_data *id)
5710 {
5711   tree copy, type;
5712 
5713   gcc_assert (TREE_CODE (decl) == PARM_DECL
5714 	      || TREE_CODE (decl) == RESULT_DECL);
5715 
5716   type = TREE_TYPE (decl);
5717   if (DECL_BY_REFERENCE (decl))
5718     type = TREE_TYPE (type);
5719 
5720   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5721 		     VAR_DECL, DECL_NAME (decl), type);
5722   if (DECL_PT_UID_SET_P (decl))
5723     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5724   TREE_READONLY (copy) = TREE_READONLY (decl);
5725   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5726   if (!DECL_BY_REFERENCE (decl))
5727     {
5728       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5729       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5730     }
5731 
5732   return copy_decl_for_dup_finish (id, decl, copy);
5733 }
5734 
5735 tree
5736 copy_decl_no_change (tree decl, copy_body_data *id)
5737 {
5738   tree copy;
5739 
5740   copy = copy_node (decl);
5741 
5742   /* The COPY is not abstract; it will be generated in DST_FN.  */
5743   DECL_ABSTRACT_P (copy) = false;
5744   lang_hooks.dup_lang_specific_decl (copy);
5745 
5746   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5747      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5748   if (TREE_CODE (copy) == LABEL_DECL)
5749     {
5750       TREE_ADDRESSABLE (copy) = 0;
5751       LABEL_DECL_UID (copy) = -1;
5752     }
5753 
5754   return copy_decl_for_dup_finish (id, decl, copy);
5755 }
5756 
5757 static tree
5758 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5759 {
5760   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5761     return copy_decl_to_var (decl, id);
5762   else
5763     return copy_decl_no_change (decl, id);
5764 }
5765 
5766 /* Return a copy of the function's argument tree.  */
5767 static tree
5768 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5769 			       bitmap args_to_skip, tree *vars)
5770 {
5771   tree arg, *parg;
5772   tree new_parm = NULL;
5773   int i = 0;
5774 
5775   parg = &new_parm;
5776 
5777   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5778     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5779       {
5780         tree new_tree = remap_decl (arg, id);
5781 	if (TREE_CODE (new_tree) != PARM_DECL)
5782 	  new_tree = id->copy_decl (arg, id);
5783         lang_hooks.dup_lang_specific_decl (new_tree);
5784         *parg = new_tree;
5785 	parg = &DECL_CHAIN (new_tree);
5786       }
5787     else if (!id->decl_map->get (arg))
5788       {
5789 	/* Make an equivalent VAR_DECL.  If the argument was used
5790 	   as temporary variable later in function, the uses will be
5791 	   replaced by local variable.  */
5792 	tree var = copy_decl_to_var (arg, id);
5793 	insert_decl_map (id, arg, var);
5794         /* Declare this new variable.  */
5795         DECL_CHAIN (var) = *vars;
5796         *vars = var;
5797       }
5798   return new_parm;
5799 }
5800 
5801 /* Return a copy of the function's static chain.  */
5802 static tree
5803 copy_static_chain (tree static_chain, copy_body_data * id)
5804 {
5805   tree *chain_copy, *pvar;
5806 
5807   chain_copy = &static_chain;
5808   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5809     {
5810       tree new_tree = remap_decl (*pvar, id);
5811       lang_hooks.dup_lang_specific_decl (new_tree);
5812       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5813       *pvar = new_tree;
5814     }
5815   return static_chain;
5816 }
5817 
5818 /* Return true if the function is allowed to be versioned.
5819    This is a guard for the versioning functionality.  */
5820 
5821 bool
5822 tree_versionable_function_p (tree fndecl)
5823 {
5824   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5825 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5826 }
5827 
5828 /* Update clone info after duplication.  */
5829 
5830 static void
5831 update_clone_info (copy_body_data * id)
5832 {
5833   struct cgraph_node *node;
5834   if (!id->dst_node->clones)
5835     return;
5836   for (node = id->dst_node->clones; node != id->dst_node;)
5837     {
5838       /* First update replace maps to match the new body.  */
5839       if (node->clone.tree_map)
5840         {
5841 	  unsigned int i;
5842           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5843 	    {
5844 	      struct ipa_replace_map *replace_info;
5845 	      replace_info = (*node->clone.tree_map)[i];
5846 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5847 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5848 	    }
5849 	}
5850       if (node->clones)
5851 	node = node->clones;
5852       else if (node->next_sibling_clone)
5853 	node = node->next_sibling_clone;
5854       else
5855 	{
5856 	  while (node != id->dst_node && !node->next_sibling_clone)
5857 	    node = node->clone_of;
5858 	  if (node != id->dst_node)
5859 	    node = node->next_sibling_clone;
5860 	}
5861     }
5862 }
5863 
5864 /* Create a copy of a function's tree.
5865    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5866    of the original function and the new copied function
5867    respectively.  In case we want to replace a DECL
5868    tree with another tree while duplicating the function's
5869    body, TREE_MAP represents the mapping between these
5870    trees. If UPDATE_CLONES is set, the call_stmt fields
5871    of edges of clones of the function will be updated.
5872 
5873    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5874    from new version.
5875    If SKIP_RETURN is true, the new version will return void.
5876    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5877    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5878 */
5879 void
5880 tree_function_versioning (tree old_decl, tree new_decl,
5881 			  vec<ipa_replace_map *, va_gc> *tree_map,
5882 			  bool update_clones, bitmap args_to_skip,
5883 			  bool skip_return, bitmap blocks_to_copy,
5884 			  basic_block new_entry)
5885 {
5886   struct cgraph_node *old_version_node;
5887   struct cgraph_node *new_version_node;
5888   copy_body_data id;
5889   tree p;
5890   unsigned i;
5891   struct ipa_replace_map *replace_info;
5892   basic_block old_entry_block, bb;
5893   auto_vec<gimple *, 10> init_stmts;
5894   tree vars = NULL_TREE;
5895   bitmap debug_args_to_skip = args_to_skip;
5896 
5897   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5898 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5899   DECL_POSSIBLY_INLINED (old_decl) = 1;
5900 
5901   old_version_node = cgraph_node::get (old_decl);
5902   gcc_checking_assert (old_version_node);
5903   new_version_node = cgraph_node::get (new_decl);
5904   gcc_checking_assert (new_version_node);
5905 
5906   /* Copy over debug args.  */
5907   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5908     {
5909       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5910       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5911       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5912       old_debug_args = decl_debug_args_lookup (old_decl);
5913       if (old_debug_args)
5914 	{
5915 	  new_debug_args = decl_debug_args_insert (new_decl);
5916 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5917 	}
5918     }
5919 
5920   /* Output the inlining info for this abstract function, since it has been
5921      inlined.  If we don't do this now, we can lose the information about the
5922      variables in the function when the blocks get blown away as soon as we
5923      remove the cgraph node.  */
5924   (*debug_hooks->outlining_inline_function) (old_decl);
5925 
5926   DECL_ARTIFICIAL (new_decl) = 1;
5927   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5928   if (DECL_ORIGIN (old_decl) == old_decl)
5929     old_version_node->used_as_abstract_origin = true;
5930   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5931 
5932   /* Prepare the data structures for the tree copy.  */
5933   memset (&id, 0, sizeof (id));
5934 
5935   /* Generate a new name for the new version. */
5936   id.statements_to_fold = new hash_set<gimple *>;
5937 
5938   id.decl_map = new hash_map<tree, tree>;
5939   id.debug_map = NULL;
5940   id.src_fn = old_decl;
5941   id.dst_fn = new_decl;
5942   id.src_node = old_version_node;
5943   id.dst_node = new_version_node;
5944   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5945   id.blocks_to_copy = blocks_to_copy;
5946 
5947   id.copy_decl = copy_decl_no_change;
5948   id.transform_call_graph_edges
5949     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5950   id.transform_new_cfg = true;
5951   id.transform_return_to_modify = false;
5952   id.transform_parameter = false;
5953   id.transform_lang_insert_block = NULL;
5954 
5955   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5956     (DECL_STRUCT_FUNCTION (old_decl));
5957   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5958   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5959   initialize_cfun (new_decl, old_decl,
5960 		   new_entry ? new_entry->count : old_entry_block->count);
5961   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5962     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5963       = id.src_cfun->gimple_df->ipa_pta;
5964 
5965   /* Copy the function's static chain.  */
5966   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5967   if (p)
5968     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5969       = copy_static_chain (p, &id);
5970 
5971   /* If there's a tree_map, prepare for substitution.  */
5972   if (tree_map)
5973     for (i = 0; i < tree_map->length (); i++)
5974       {
5975 	gimple *init;
5976 	replace_info = (*tree_map)[i];
5977 	if (replace_info->replace_p)
5978 	  {
5979 	    int parm_num = -1;
5980 	    if (!replace_info->old_tree)
5981 	      {
5982 		int p = replace_info->parm_num;
5983 		tree parm;
5984 		tree req_type, new_type;
5985 
5986 		for (parm = DECL_ARGUMENTS (old_decl); p;
5987 		     parm = DECL_CHAIN (parm))
5988 		  p--;
5989 		replace_info->old_tree = parm;
5990 		parm_num = replace_info->parm_num;
5991 		req_type = TREE_TYPE (parm);
5992 		new_type = TREE_TYPE (replace_info->new_tree);
5993 		if (!useless_type_conversion_p (req_type, new_type))
5994 		  {
5995 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5996 		      replace_info->new_tree
5997 			= fold_build1 (NOP_EXPR, req_type,
5998 				       replace_info->new_tree);
5999 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6000 		      replace_info->new_tree
6001 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
6002 				       replace_info->new_tree);
6003 		    else
6004 		      {
6005 			if (dump_file)
6006 			  {
6007 			    fprintf (dump_file, "    const ");
6008 			    print_generic_expr (dump_file,
6009 						replace_info->new_tree);
6010 			    fprintf (dump_file,
6011 				     "  can't be converted to param ");
6012 			    print_generic_expr (dump_file, parm);
6013 			    fprintf (dump_file, "\n");
6014 			  }
6015 			replace_info->old_tree = NULL;
6016 		      }
6017 		  }
6018 	      }
6019 	    else
6020 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6021 	    if (replace_info->old_tree)
6022 	      {
6023 		init = setup_one_parameter (&id, replace_info->old_tree,
6024 					    replace_info->new_tree, id.src_fn,
6025 					    NULL,
6026 					    &vars);
6027 		if (init)
6028 		  init_stmts.safe_push (init);
6029 		if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6030 		  {
6031 		    if (parm_num == -1)
6032 		      {
6033 			tree parm;
6034 			int p;
6035 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6036 			     parm = DECL_CHAIN (parm), p++)
6037 			  if (parm == replace_info->old_tree)
6038 			    {
6039 			      parm_num = p;
6040 			      break;
6041 			    }
6042 		      }
6043 		    if (parm_num != -1)
6044 		      {
6045 			if (debug_args_to_skip == args_to_skip)
6046 			  {
6047 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
6048 			    bitmap_copy (debug_args_to_skip, args_to_skip);
6049 			  }
6050 			bitmap_clear_bit (debug_args_to_skip, parm_num);
6051 		      }
6052 		  }
6053 	      }
6054 	  }
6055       }
6056   /* Copy the function's arguments.  */
6057   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6058     DECL_ARGUMENTS (new_decl)
6059       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6060 				       args_to_skip, &vars);
6061 
6062   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6063   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6064 
6065   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6066 
6067   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6068     /* Add local vars.  */
6069     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6070 
6071   if (DECL_RESULT (old_decl) == NULL_TREE)
6072     ;
6073   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6074     {
6075       DECL_RESULT (new_decl)
6076 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6077 		      RESULT_DECL, NULL_TREE, void_type_node);
6078       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6079       cfun->returns_struct = 0;
6080       cfun->returns_pcc_struct = 0;
6081     }
6082   else
6083     {
6084       tree old_name;
6085       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6086       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6087       if (gimple_in_ssa_p (id.src_cfun)
6088 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6089 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6090 	{
6091 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6092 	  insert_decl_map (&id, old_name, new_name);
6093 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6094 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6095 	}
6096     }
6097 
6098   /* Set up the destination functions loop tree.  */
6099   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6100     {
6101       cfun->curr_properties &= ~PROP_loops;
6102       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6103       cfun->curr_properties |= PROP_loops;
6104     }
6105 
6106   /* Copy the Function's body.  */
6107   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6108 	     new_entry);
6109 
6110   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6111   number_blocks (new_decl);
6112 
6113   /* We want to create the BB unconditionally, so that the addition of
6114      debug stmts doesn't affect BB count, which may in the end cause
6115      codegen differences.  */
6116   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6117   while (init_stmts.length ())
6118     insert_init_stmt (&id, bb, init_stmts.pop ());
6119   update_clone_info (&id);
6120 
6121   /* Remap the nonlocal_goto_save_area, if any.  */
6122   if (cfun->nonlocal_goto_save_area)
6123     {
6124       struct walk_stmt_info wi;
6125 
6126       memset (&wi, 0, sizeof (wi));
6127       wi.info = &id;
6128       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6129     }
6130 
6131   /* Clean up.  */
6132   delete id.decl_map;
6133   if (id.debug_map)
6134     delete id.debug_map;
6135   free_dominance_info (CDI_DOMINATORS);
6136   free_dominance_info (CDI_POST_DOMINATORS);
6137 
6138   update_max_bb_count ();
6139   fold_marked_statements (0, id.statements_to_fold);
6140   delete id.statements_to_fold;
6141   delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6142   if (id.dst_node->definition)
6143     cgraph_edge::rebuild_references ();
6144   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6145     {
6146       calculate_dominance_info (CDI_DOMINATORS);
6147       fix_loop_structure (NULL);
6148     }
6149   update_ssa (TODO_update_ssa);
6150 
6151   /* After partial cloning we need to rescale frequencies, so they are
6152      within proper range in the cloned function.  */
6153   if (new_entry)
6154     {
6155       struct cgraph_edge *e;
6156       rebuild_frequencies ();
6157 
6158       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6159       for (e = new_version_node->callees; e; e = e->next_callee)
6160 	{
6161 	  basic_block bb = gimple_bb (e->call_stmt);
6162 	  e->count = bb->count;
6163 	}
6164       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6165 	{
6166 	  basic_block bb = gimple_bb (e->call_stmt);
6167 	  e->count = bb->count;
6168 	}
6169     }
6170 
6171   if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6172     {
6173       tree parm;
6174       vec<tree, va_gc> **debug_args = NULL;
6175       unsigned int len = 0;
6176       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6177 	   parm; parm = DECL_CHAIN (parm), i++)
6178 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6179 	  {
6180 	    tree ddecl;
6181 
6182 	    if (debug_args == NULL)
6183 	      {
6184 		debug_args = decl_debug_args_insert (new_decl);
6185 		len = vec_safe_length (*debug_args);
6186 	      }
6187 	    ddecl = make_node (DEBUG_EXPR_DECL);
6188 	    DECL_ARTIFICIAL (ddecl) = 1;
6189 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6190 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
6191 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6192 	    vec_safe_push (*debug_args, ddecl);
6193 	  }
6194       if (debug_args != NULL)
6195 	{
6196 	  /* On the callee side, add
6197 	     DEBUG D#Y s=> parm
6198 	     DEBUG var => D#Y
6199 	     stmts to the first bb where var is a VAR_DECL created for the
6200 	     optimized away parameter in DECL_INITIAL block.  This hints
6201 	     in the debug info that var (whole DECL_ORIGIN is the parm
6202 	     PARM_DECL) is optimized away, but could be looked up at the
6203 	     call site as value of D#X there.  */
6204 	  tree var = vars, vexpr;
6205 	  gimple_stmt_iterator cgsi
6206 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6207 	  gimple *def_temp;
6208 	  var = vars;
6209 	  i = vec_safe_length (*debug_args);
6210 	  do
6211 	    {
6212 	      i -= 2;
6213 	      while (var != NULL_TREE
6214 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6215 		var = TREE_CHAIN (var);
6216 	      if (var == NULL_TREE)
6217 		break;
6218 	      vexpr = make_node (DEBUG_EXPR_DECL);
6219 	      parm = (**debug_args)[i];
6220 	      DECL_ARTIFICIAL (vexpr) = 1;
6221 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6222 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6223 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6224 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6225 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6226 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6227 	    }
6228 	  while (i > len);
6229 	}
6230     }
6231 
6232   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6233     BITMAP_FREE (debug_args_to_skip);
6234   free_dominance_info (CDI_DOMINATORS);
6235   free_dominance_info (CDI_POST_DOMINATORS);
6236 
6237   gcc_assert (!id.debug_stmts.exists ());
6238   pop_cfun ();
6239   return;
6240 }
6241 
6242 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6243    the callee and return the inlined body on success.  */
6244 
6245 tree
6246 maybe_inline_call_in_expr (tree exp)
6247 {
6248   tree fn = get_callee_fndecl (exp);
6249 
6250   /* We can only try to inline "const" functions.  */
6251   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6252     {
6253       call_expr_arg_iterator iter;
6254       copy_body_data id;
6255       tree param, arg, t;
6256       hash_map<tree, tree> decl_map;
6257 
6258       /* Remap the parameters.  */
6259       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6260 	   param;
6261 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6262 	decl_map.put (param, arg);
6263 
6264       memset (&id, 0, sizeof (id));
6265       id.src_fn = fn;
6266       id.dst_fn = current_function_decl;
6267       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6268       id.decl_map = &decl_map;
6269 
6270       id.copy_decl = copy_decl_no_change;
6271       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6272       id.transform_new_cfg = false;
6273       id.transform_return_to_modify = true;
6274       id.transform_parameter = true;
6275       id.transform_lang_insert_block = NULL;
6276 
6277       /* Make sure not to unshare trees behind the front-end's back
6278 	 since front-end specific mechanisms may rely on sharing.  */
6279       id.regimplify = false;
6280       id.do_not_unshare = true;
6281 
6282       /* We're not inside any EH region.  */
6283       id.eh_lp_nr = 0;
6284 
6285       t = copy_tree_body (&id);
6286 
6287       /* We can only return something suitable for use in a GENERIC
6288 	 expression tree.  */
6289       if (TREE_CODE (t) == MODIFY_EXPR)
6290 	return TREE_OPERAND (t, 1);
6291     }
6292 
6293    return NULL_TREE;
6294 }
6295 
6296 /* Duplicate a type, fields and all.  */
6297 
6298 tree
6299 build_duplicate_type (tree type)
6300 {
6301   struct copy_body_data id;
6302 
6303   memset (&id, 0, sizeof (id));
6304   id.src_fn = current_function_decl;
6305   id.dst_fn = current_function_decl;
6306   id.src_cfun = cfun;
6307   id.decl_map = new hash_map<tree, tree>;
6308   id.debug_map = NULL;
6309   id.copy_decl = copy_decl_no_change;
6310 
6311   type = remap_type_1 (type, &id);
6312 
6313   delete id.decl_map;
6314   if (id.debug_map)
6315     delete id.debug_map;
6316 
6317   TYPE_CANONICAL (type) = type;
6318 
6319   return type;
6320 }
6321 
6322 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6323    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6324    evaluation.  */
6325 
6326 tree
6327 copy_fn (tree fn, tree& parms, tree& result)
6328 {
6329   copy_body_data id;
6330   tree param;
6331   hash_map<tree, tree> decl_map;
6332 
6333   tree *p = &parms;
6334   *p = NULL_TREE;
6335 
6336   memset (&id, 0, sizeof (id));
6337   id.src_fn = fn;
6338   id.dst_fn = current_function_decl;
6339   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6340   id.decl_map = &decl_map;
6341 
6342   id.copy_decl = copy_decl_no_change;
6343   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6344   id.transform_new_cfg = false;
6345   id.transform_return_to_modify = false;
6346   id.transform_parameter = true;
6347   id.transform_lang_insert_block = NULL;
6348 
6349   /* Make sure not to unshare trees behind the front-end's back
6350      since front-end specific mechanisms may rely on sharing.  */
6351   id.regimplify = false;
6352   id.do_not_unshare = true;
6353   id.do_not_fold = true;
6354 
6355   /* We're not inside any EH region.  */
6356   id.eh_lp_nr = 0;
6357 
6358   /* Remap the parameters and result and return them to the caller.  */
6359   for (param = DECL_ARGUMENTS (fn);
6360        param;
6361        param = DECL_CHAIN (param))
6362     {
6363       *p = remap_decl (param, &id);
6364       p = &DECL_CHAIN (*p);
6365     }
6366 
6367   if (DECL_RESULT (fn))
6368     result = remap_decl (DECL_RESULT (fn), &id);
6369   else
6370     result = NULL_TREE;
6371 
6372   return copy_tree_body (&id);
6373 }
6374