xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision afab4e300d3a9fb07dd8c80daf53d0feb3345706)
1 /* Tree inlining.
2    Copyright (C) 2001-2020 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 
65 /* I'm not real happy about this, but we need to handle gimple and
66    non-gimple trees.  */
67 
68 /* Inlining, Cloning, Versioning, Parallelization
69 
70    Inlining: a function body is duplicated, but the PARM_DECLs are
71    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72    MODIFY_EXPRs that store to a dedicated returned-value variable.
73    The duplicated eh_region info of the copy will later be appended
74    to the info for the caller; the eh_region info in copied throwing
75    statements and RESX statements are adjusted accordingly.
76 
77    Cloning: (only in C++) We have one body for a con/de/structor, and
78    multiple function decls, each with a unique parameter list.
79    Duplicate the body, using the given splay tree; some parameters
80    will become constants (like 0 or 1).
81 
82    Versioning: a function body is duplicated and the result is a new
83    function rather than into blocks of an existing function as with
84    inlining.  Some parameters will become constants.
85 
86    Parallelization: a region of a function is duplicated resulting in
87    a new function.  Variables may be replaced with complex expressions
88    to enable shared variable semantics.
89 
90    All of these will simultaneously lookup any callgraph edges.  If
91    we're going to inline the duplicated function body, and the given
92    function has some cloned callgraph nodes (one for each place this
93    function will be inlined) those callgraph edges will be duplicated.
94    If we're cloning the body, those callgraph edges will be
95    updated to point into the new body.  (Note that the original
96    callgraph node and edge list will not be altered.)
97 
98    See the CALL_EXPR handling case in copy_tree_body_r ().  */
99 
100 /* To Do:
101 
102    o In order to make inlining-on-trees work, we pessimized
103      function-local static constants.  In particular, they are now
104      always output, even when not addressed.  Fix this by treating
105      function-local static constants just like global static
106      constants; the back-end already knows not to output them if they
107      are not needed.
108 
109    o Provide heuristics to clamp inlining of recursive template
110      calls?  */
111 
112 
113 /* Weights that estimate_num_insns uses to estimate the size of the
114    produced code.  */
115 
116 eni_weights eni_size_weights;
117 
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119    to execute the produced code.  */
120 
121 eni_weights eni_time_weights;
122 
123 /* Prototypes.  */
124 
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 				     basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136 
137 /* Insert a tree->tree mapping for ID.  Despite the name suggests
138    that the trees should be variables, it is used for more than that.  */
139 
140 void
141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143   id->decl_map->put (key, value);
144 
145   /* Always insert an identity map as well.  If we see this same new
146      node again, we won't want to duplicate it a second time.  */
147   if (key != value)
148     id->decl_map->put (value, value);
149 }
150 
151 /* Insert a tree->tree mapping for ID.  This is only used for
152    variables.  */
153 
154 static void
155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157   if (!gimple_in_ssa_p (id->src_cfun))
158     return;
159 
160   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161     return;
162 
163   if (!target_for_debug_bind (key))
164     return;
165 
166   gcc_assert (TREE_CODE (key) == PARM_DECL);
167   gcc_assert (VAR_P (value));
168 
169   if (!id->debug_map)
170     id->debug_map = new hash_map<tree, tree>;
171 
172   id->debug_map->put (key, value);
173 }
174 
175 /* If nonzero, we're remapping the contents of inlined debug
176    statements.  If negative, an error has occurred, such as a
177    reference to a variable that isn't available in the inlined
178    context.  */
179 static int processing_debug_stmt = 0;
180 
181 /* Construct new SSA name for old NAME. ID is the inline context.  */
182 
183 static tree
184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186   tree new_tree, var;
187   tree *n;
188 
189   gcc_assert (TREE_CODE (name) == SSA_NAME);
190 
191   n = id->decl_map->get (name);
192   if (n)
193     {
194       /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
195 	 remove an unused LHS from a call statement.  Such LHS can however
196 	 still appear in debug statements, but their value is lost in this
197 	 function and we do not want to map them.  */
198       if (id->killed_new_ssa_names
199 	  && id->killed_new_ssa_names->contains (*n))
200 	{
201 	  gcc_assert (processing_debug_stmt);
202 	  processing_debug_stmt = -1;
203 	  return name;
204 	}
205 
206       return unshare_expr (*n);
207     }
208 
209   if (processing_debug_stmt)
210     {
211       if (SSA_NAME_IS_DEFAULT_DEF (name)
212 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 	  && id->entry_bb == NULL
214 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
215 	{
216 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
217 	  gimple *def_temp;
218 	  gimple_stmt_iterator gsi;
219 	  tree val = SSA_NAME_VAR (name);
220 
221 	  n = id->decl_map->get (val);
222 	  if (n != NULL)
223 	    val = *n;
224 	  if (TREE_CODE (val) != PARM_DECL
225 	      && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
226 	    {
227 	      processing_debug_stmt = -1;
228 	      return name;
229 	    }
230 	  n = id->decl_map->get (val);
231 	  if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
232 	    return *n;
233 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
234 	  DECL_ARTIFICIAL (vexpr) = 1;
235 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
236 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
237 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
238 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
239 	  insert_decl_map (id, val, vexpr);
240 	  return vexpr;
241 	}
242 
243       processing_debug_stmt = -1;
244       return name;
245     }
246 
247   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
248   var = SSA_NAME_VAR (name);
249   if (!var
250       || (!SSA_NAME_IS_DEFAULT_DEF (name)
251 	  && VAR_P (var)
252 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
253 	  && DECL_ARTIFICIAL (var)
254 	  && DECL_IGNORED_P (var)
255 	  && !DECL_NAME (var)))
256     {
257       struct ptr_info_def *pi;
258       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
259       if (!var && SSA_NAME_IDENTIFIER (name))
260 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
261       insert_decl_map (id, name, new_tree);
262       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
263 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
264       /* At least IPA points-to info can be directly transferred.  */
265       if (id->src_cfun->gimple_df
266 	  && id->src_cfun->gimple_df->ipa_pta
267 	  && POINTER_TYPE_P (TREE_TYPE (name))
268 	  && (pi = SSA_NAME_PTR_INFO (name))
269 	  && !pi->pt.anything)
270 	{
271 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
272 	  new_pi->pt = pi->pt;
273 	}
274       /* So can range-info.  */
275       if (!POINTER_TYPE_P (TREE_TYPE (name))
276 	  && SSA_NAME_RANGE_INFO (name))
277 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
278 				       SSA_NAME_RANGE_INFO (name));
279       return new_tree;
280     }
281 
282   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
283      in copy_bb.  */
284   new_tree = remap_decl (var, id);
285 
286   /* We might've substituted constant or another SSA_NAME for
287      the variable.
288 
289      Replace the SSA name representing RESULT_DECL by variable during
290      inlining:  this saves us from need to introduce PHI node in a case
291      return value is just partly initialized.  */
292   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
293       && (!SSA_NAME_VAR (name)
294 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
295 	  || !id->transform_return_to_modify))
296     {
297       struct ptr_info_def *pi;
298       new_tree = make_ssa_name (new_tree);
299       insert_decl_map (id, name, new_tree);
300       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
301 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
302       /* At least IPA points-to info can be directly transferred.  */
303       if (id->src_cfun->gimple_df
304 	  && id->src_cfun->gimple_df->ipa_pta
305 	  && POINTER_TYPE_P (TREE_TYPE (name))
306 	  && (pi = SSA_NAME_PTR_INFO (name))
307 	  && !pi->pt.anything)
308 	{
309 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 	  new_pi->pt = pi->pt;
311 	}
312       /* So can range-info.  */
313       if (!POINTER_TYPE_P (TREE_TYPE (name))
314 	  && SSA_NAME_RANGE_INFO (name))
315 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
316 				       SSA_NAME_RANGE_INFO (name));
317       if (SSA_NAME_IS_DEFAULT_DEF (name))
318 	{
319 	  /* By inlining function having uninitialized variable, we might
320 	     extend the lifetime (variable might get reused).  This cause
321 	     ICE in the case we end up extending lifetime of SSA name across
322 	     abnormal edge, but also increase register pressure.
323 
324 	     We simply initialize all uninitialized vars by 0 except
325 	     for case we are inlining to very first BB.  We can avoid
326 	     this for all BBs that are not inside strongly connected
327 	     regions of the CFG, but this is expensive to test.  */
328 	  if (id->entry_bb
329 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
330 	      && (!SSA_NAME_VAR (name)
331 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
332 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
333 					     0)->dest
334 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
335 	    {
336 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
337 	      gimple *init_stmt;
338 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
339 
340 	      init_stmt = gimple_build_assign (new_tree, zero);
341 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
342 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
343 	    }
344 	  else
345 	    {
346 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
347 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
348 	    }
349 	}
350     }
351   else
352     insert_decl_map (id, name, new_tree);
353   return new_tree;
354 }
355 
356 /* Remap DECL during the copying of the BLOCK tree for the function.  */
357 
358 tree
359 remap_decl (tree decl, copy_body_data *id)
360 {
361   tree *n;
362 
363   /* We only remap local variables in the current function.  */
364 
365   /* See if we have remapped this declaration.  */
366 
367   n = id->decl_map->get (decl);
368 
369   if (!n && processing_debug_stmt)
370     {
371       processing_debug_stmt = -1;
372       return decl;
373     }
374 
375   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
376      necessary DECLs have already been remapped and we do not want to duplicate
377      a decl coming from outside of the sequence we are copying.  */
378   if (!n
379       && id->prevent_decl_creation_for_types
380       && id->remapping_type_depth > 0
381       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
382     return decl;
383 
384   /* If we didn't already have an equivalent for this declaration, create one
385      now.  */
386   if (!n)
387     {
388       /* Make a copy of the variable or label.  */
389       tree t = id->copy_decl (decl, id);
390 
391       /* Remember it, so that if we encounter this local entity again
392 	 we can reuse this copy.  Do this early because remap_type may
393 	 need this decl for TYPE_STUB_DECL.  */
394       insert_decl_map (id, decl, t);
395 
396       if (!DECL_P (t))
397 	return t;
398 
399       /* Remap types, if necessary.  */
400       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
401       if (TREE_CODE (t) == TYPE_DECL)
402 	{
403 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404 
405 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
406 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
407 	     is not set on the TYPE_DECL, for example in LTO mode.  */
408 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
409 	    {
410 	      tree x = build_variant_type_copy (TREE_TYPE (t));
411 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
412 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
413 	      DECL_ORIGINAL_TYPE (t) = x;
414 	    }
415 	}
416 
417       /* Remap sizes as necessary.  */
418       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
419       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
420 
421       /* If fields, do likewise for offset and qualifier.  */
422       if (TREE_CODE (t) == FIELD_DECL)
423 	{
424 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
425 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
426 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
427 	}
428 
429       return t;
430     }
431 
432   if (id->do_not_unshare)
433     return *n;
434   else
435     return unshare_expr (*n);
436 }
437 
438 static tree
439 remap_type_1 (tree type, copy_body_data *id)
440 {
441   tree new_tree, t;
442 
443   /* We do need a copy.  build and register it now.  If this is a pointer or
444      reference type, remap the designated type and make a new pointer or
445      reference type.  */
446   if (TREE_CODE (type) == POINTER_TYPE)
447     {
448       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
449 					 TYPE_MODE (type),
450 					 TYPE_REF_CAN_ALIAS_ALL (type));
451       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 	new_tree = build_type_attribute_qual_variant (new_tree,
453 						      TYPE_ATTRIBUTES (type),
454 						      TYPE_QUALS (type));
455       insert_decl_map (id, type, new_tree);
456       return new_tree;
457     }
458   else if (TREE_CODE (type) == REFERENCE_TYPE)
459     {
460       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
461 					    TYPE_MODE (type),
462 					    TYPE_REF_CAN_ALIAS_ALL (type));
463       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
464 	new_tree = build_type_attribute_qual_variant (new_tree,
465 						      TYPE_ATTRIBUTES (type),
466 						      TYPE_QUALS (type));
467       insert_decl_map (id, type, new_tree);
468       return new_tree;
469     }
470   else
471     new_tree = copy_node (type);
472 
473   insert_decl_map (id, type, new_tree);
474 
475   /* This is a new type, not a copy of an old type.  Need to reassociate
476      variants.  We can handle everything except the main variant lazily.  */
477   t = TYPE_MAIN_VARIANT (type);
478   if (type != t)
479     {
480       t = remap_type (t, id);
481       TYPE_MAIN_VARIANT (new_tree) = t;
482       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
483       TYPE_NEXT_VARIANT (t) = new_tree;
484     }
485   else
486     {
487       TYPE_MAIN_VARIANT (new_tree) = new_tree;
488       TYPE_NEXT_VARIANT (new_tree) = NULL;
489     }
490 
491   if (TYPE_STUB_DECL (type))
492     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
493 
494   /* Lazily create pointer and reference types.  */
495   TYPE_POINTER_TO (new_tree) = NULL;
496   TYPE_REFERENCE_TO (new_tree) = NULL;
497 
498   /* Copy all types that may contain references to local variables; be sure to
499      preserve sharing in between type and its main variant when possible.  */
500   switch (TREE_CODE (new_tree))
501     {
502     case INTEGER_TYPE:
503     case REAL_TYPE:
504     case FIXED_POINT_TYPE:
505     case ENUMERAL_TYPE:
506     case BOOLEAN_TYPE:
507       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
508 	{
509 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
510 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
511 
512 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
513 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 	}
515       else
516 	{
517 	  t = TYPE_MIN_VALUE (new_tree);
518 	  if (t && TREE_CODE (t) != INTEGER_CST)
519 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
520 
521 	  t = TYPE_MAX_VALUE (new_tree);
522 	  if (t && TREE_CODE (t) != INTEGER_CST)
523 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
524 	}
525       return new_tree;
526 
527     case FUNCTION_TYPE:
528       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531       else
532         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
534 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
535 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
536       else
537         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
538       return new_tree;
539 
540     case ARRAY_TYPE:
541       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
542 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
543 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
544       else
545 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
546 
547       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
548 	{
549 	  gcc_checking_assert (TYPE_DOMAIN (type)
550 			       == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
551 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
552 	}
553       else
554         {
555 	  TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
556 	  /* For array bounds where we have decided not to copy over the bounds
557 	     variable which isn't used in OpenMP/OpenACC region, change them to
558 	     an uninitialized VAR_DECL temporary.  */
559 	  if (id->adjust_array_error_bounds
560 	      && TYPE_DOMAIN (new_tree)
561 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
562 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
563 	    {
564 	      tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
565 	      DECL_ATTRIBUTES (v)
566 		= tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
567 			     DECL_ATTRIBUTES (v));
568 	      TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
569 	    }
570         }
571       break;
572 
573     case RECORD_TYPE:
574     case UNION_TYPE:
575     case QUAL_UNION_TYPE:
576       if (TYPE_MAIN_VARIANT (type) != type
577 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
578 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
579       else
580 	{
581 	  tree f, nf = NULL;
582 
583 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
584 	    {
585 	      t = remap_decl (f, id);
586 	      DECL_CONTEXT (t) = new_tree;
587 	      DECL_CHAIN (t) = nf;
588 	      nf = t;
589 	    }
590 	  TYPE_FIELDS (new_tree) = nreverse (nf);
591 	}
592       break;
593 
594     case OFFSET_TYPE:
595     default:
596       /* Shouldn't have been thought variable sized.  */
597       gcc_unreachable ();
598     }
599 
600   /* All variants of type share the same size, so use the already remaped data.  */
601   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
602     {
603       tree s = TYPE_SIZE (type);
604       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
605       tree su = TYPE_SIZE_UNIT (type);
606       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
607       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
608 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
609 			   || s == mvs);
610       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
611 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
612 			   || su == mvsu);
613       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
614       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
615     }
616   else
617     {
618       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
619       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
620     }
621 
622   return new_tree;
623 }
624 
625 /* Helper function for remap_type_2, called through walk_tree.  */
626 
627 static tree
628 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
629 {
630   copy_body_data *id = (copy_body_data *) data;
631 
632   if (TYPE_P (*tp))
633     *walk_subtrees = 0;
634 
635   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
636     return *tp;
637 
638   return NULL_TREE;
639 }
640 
641 /* Return true if TYPE needs to be remapped because remap_decl on any
642    needed embedded decl returns something other than that decl.  */
643 
644 static bool
645 remap_type_2 (tree type, copy_body_data *id)
646 {
647   tree t;
648 
649 #define RETURN_TRUE_IF_VAR(T) \
650   do								\
651     {								\
652       tree _t = (T);						\
653       if (_t)							\
654 	{							\
655 	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
656 	    return true;					\
657 	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
658 	      && walk_tree (&_t, remap_type_3, id, NULL))	\
659 	    return true;					\
660 	}							\
661     }								\
662   while (0)
663 
664   switch (TREE_CODE (type))
665     {
666     case POINTER_TYPE:
667     case REFERENCE_TYPE:
668     case FUNCTION_TYPE:
669     case METHOD_TYPE:
670       return remap_type_2 (TREE_TYPE (type), id);
671 
672     case INTEGER_TYPE:
673     case REAL_TYPE:
674     case FIXED_POINT_TYPE:
675     case ENUMERAL_TYPE:
676     case BOOLEAN_TYPE:
677       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
678       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
679       return false;
680 
681     case ARRAY_TYPE:
682       if (remap_type_2 (TREE_TYPE (type), id)
683 	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
684 	return true;
685       break;
686 
687     case RECORD_TYPE:
688     case UNION_TYPE:
689     case QUAL_UNION_TYPE:
690       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
691 	if (TREE_CODE (t) == FIELD_DECL)
692 	  {
693 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
694 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
695 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
696 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
697 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
698 	  }
699       break;
700 
701     default:
702       return false;
703     }
704 
705   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
706   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
707   return false;
708 #undef RETURN_TRUE_IF_VAR
709 }
710 
711 tree
712 remap_type (tree type, copy_body_data *id)
713 {
714   tree *node;
715   tree tmp;
716 
717   if (type == NULL)
718     return type;
719 
720   /* See if we have remapped this type.  */
721   node = id->decl_map->get (type);
722   if (node)
723     return *node;
724 
725   /* The type only needs remapping if it's variably modified.  */
726   if (! variably_modified_type_p (type, id->src_fn)
727       /* Don't remap if copy_decl method doesn't always return a new
728 	 decl and for all embedded decls returns the passed in decl.  */
729       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
730     {
731       insert_decl_map (id, type, type);
732       return type;
733     }
734 
735   id->remapping_type_depth++;
736   tmp = remap_type_1 (type, id);
737   id->remapping_type_depth--;
738 
739   return tmp;
740 }
741 
742 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
743 
744 static bool
745 can_be_nonlocal (tree decl, copy_body_data *id)
746 {
747   /* We cannot duplicate function decls.  */
748   if (TREE_CODE (decl) == FUNCTION_DECL)
749     return true;
750 
751   /* Local static vars must be non-local or we get multiple declaration
752      problems.  */
753   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
754     return true;
755 
756   return false;
757 }
758 
759 static tree
760 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
761 	     copy_body_data *id)
762 {
763   tree old_var;
764   tree new_decls = NULL_TREE;
765 
766   /* Remap its variables.  */
767   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
768     {
769       tree new_var;
770 
771       if (can_be_nonlocal (old_var, id))
772 	{
773 	  /* We need to add this variable to the local decls as otherwise
774 	     nothing else will do so.  */
775 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
776 	    add_local_decl (cfun, old_var);
777 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
778 	      && !DECL_IGNORED_P (old_var)
779 	      && nonlocalized_list)
780 	    vec_safe_push (*nonlocalized_list, old_var);
781 	  continue;
782 	}
783 
784       /* Remap the variable.  */
785       new_var = remap_decl (old_var, id);
786 
787       /* If we didn't remap this variable, we can't mess with its
788 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
789 	 already declared somewhere else, so don't declare it here.  */
790 
791       if (new_var == id->retvar)
792 	;
793       else if (!new_var)
794         {
795 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
796 	      && !DECL_IGNORED_P (old_var)
797 	      && nonlocalized_list)
798 	    vec_safe_push (*nonlocalized_list, old_var);
799 	}
800       else
801 	{
802 	  gcc_assert (DECL_P (new_var));
803 	  DECL_CHAIN (new_var) = new_decls;
804 	  new_decls = new_var;
805 
806 	  /* Also copy value-expressions.  */
807 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
808 	    {
809 	      tree tem = DECL_VALUE_EXPR (new_var);
810 	      bool old_regimplify = id->regimplify;
811 	      id->remapping_type_depth++;
812 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
813 	      id->remapping_type_depth--;
814 	      id->regimplify = old_regimplify;
815 	      SET_DECL_VALUE_EXPR (new_var, tem);
816 	    }
817 	}
818     }
819 
820   return nreverse (new_decls);
821 }
822 
823 /* Copy the BLOCK to contain remapped versions of the variables
824    therein.  And hook the new block into the block-tree.  */
825 
826 static void
827 remap_block (tree *block, copy_body_data *id)
828 {
829   tree old_block;
830   tree new_block;
831 
832   /* Make the new block.  */
833   old_block = *block;
834   new_block = make_node (BLOCK);
835   TREE_USED (new_block) = TREE_USED (old_block);
836   BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
837   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
838   BLOCK_NONLOCALIZED_VARS (new_block)
839     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
840   *block = new_block;
841 
842   /* Remap its variables.  */
843   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
844   					&BLOCK_NONLOCALIZED_VARS (new_block),
845 					id);
846 
847   if (id->transform_lang_insert_block)
848     id->transform_lang_insert_block (new_block);
849 
850   /* Remember the remapped block.  */
851   insert_decl_map (id, old_block, new_block);
852 }
853 
854 /* Copy the whole block tree and root it in id->block.  */
855 
856 static tree
857 remap_blocks (tree block, copy_body_data *id)
858 {
859   tree t;
860   tree new_tree = block;
861 
862   if (!block)
863     return NULL;
864 
865   remap_block (&new_tree, id);
866   gcc_assert (new_tree != block);
867   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
868     prepend_lexical_block (new_tree, remap_blocks (t, id));
869   /* Blocks are in arbitrary order, but make things slightly prettier and do
870      not swap order when producing a copy.  */
871   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
872   return new_tree;
873 }
874 
875 /* Remap the block tree rooted at BLOCK to nothing.  */
876 
877 static void
878 remap_blocks_to_null (tree block, copy_body_data *id)
879 {
880   tree t;
881   insert_decl_map (id, block, NULL_TREE);
882   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
883     remap_blocks_to_null (t, id);
884 }
885 
886 /* Remap the location info pointed to by LOCUS.  */
887 
888 static location_t
889 remap_location (location_t locus, copy_body_data *id)
890 {
891   if (LOCATION_BLOCK (locus))
892     {
893       tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
894       gcc_assert (n);
895       if (*n)
896 	return set_block (locus, *n);
897     }
898 
899   locus = LOCATION_LOCUS (locus);
900 
901   if (locus != UNKNOWN_LOCATION && id->block)
902     return set_block (locus, id->block);
903 
904   return locus;
905 }
906 
907 static void
908 copy_statement_list (tree *tp)
909 {
910   tree_stmt_iterator oi, ni;
911   tree new_tree;
912 
913   new_tree = alloc_stmt_list ();
914   ni = tsi_start (new_tree);
915   oi = tsi_start (*tp);
916   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
917   *tp = new_tree;
918 
919   for (; !tsi_end_p (oi); tsi_next (&oi))
920     {
921       tree stmt = tsi_stmt (oi);
922       if (TREE_CODE (stmt) == STATEMENT_LIST)
923 	/* This copy is not redundant; tsi_link_after will smash this
924 	   STATEMENT_LIST into the end of the one we're building, and we
925 	   don't want to do that with the original.  */
926 	copy_statement_list (&stmt);
927       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
928     }
929 }
930 
931 static void
932 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
933 {
934   tree block = BIND_EXPR_BLOCK (*tp);
935   /* Copy (and replace) the statement.  */
936   copy_tree_r (tp, walk_subtrees, NULL);
937   if (block)
938     {
939       remap_block (&block, id);
940       BIND_EXPR_BLOCK (*tp) = block;
941     }
942 
943   if (BIND_EXPR_VARS (*tp))
944     /* This will remap a lot of the same decls again, but this should be
945        harmless.  */
946     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
947 }
948 
949 
950 /* Create a new gimple_seq by remapping all the statements in BODY
951    using the inlining information in ID.  */
952 
953 static gimple_seq
954 remap_gimple_seq (gimple_seq body, copy_body_data *id)
955 {
956   gimple_stmt_iterator si;
957   gimple_seq new_body = NULL;
958 
959   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
960     {
961       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
962       gimple_seq_add_seq (&new_body, new_stmts);
963     }
964 
965   return new_body;
966 }
967 
968 
969 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
970    block using the mapping information in ID.  */
971 
972 static gimple *
973 copy_gimple_bind (gbind *stmt, copy_body_data *id)
974 {
975   gimple *new_bind;
976   tree new_block, new_vars;
977   gimple_seq body, new_body;
978 
979   /* Copy the statement.  Note that we purposely don't use copy_stmt
980      here because we need to remap statements as we copy.  */
981   body = gimple_bind_body (stmt);
982   new_body = remap_gimple_seq (body, id);
983 
984   new_block = gimple_bind_block (stmt);
985   if (new_block)
986     remap_block (&new_block, id);
987 
988   /* This will remap a lot of the same decls again, but this should be
989      harmless.  */
990   new_vars = gimple_bind_vars (stmt);
991   if (new_vars)
992     new_vars = remap_decls (new_vars, NULL, id);
993 
994   new_bind = gimple_build_bind (new_vars, new_body, new_block);
995 
996   return new_bind;
997 }
998 
999 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
1000 
1001 static bool
1002 is_parm (tree decl)
1003 {
1004   if (TREE_CODE (decl) == SSA_NAME)
1005     {
1006       decl = SSA_NAME_VAR (decl);
1007       if (!decl)
1008 	return false;
1009     }
1010 
1011   return (TREE_CODE (decl) == PARM_DECL);
1012 }
1013 
1014 /* Remap the dependence CLIQUE from the source to the destination function
1015    as specified in ID.  */
1016 
1017 static unsigned short
1018 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1019 {
1020   if (clique == 0 || processing_debug_stmt)
1021     return 0;
1022   if (!id->dependence_map)
1023     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1024   bool existed;
1025   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1026   if (!existed)
1027     {
1028       /* Clique 1 is reserved for local ones set by PTA.  */
1029       if (cfun->last_clique == 0)
1030 	cfun->last_clique = 1;
1031       newc = ++cfun->last_clique;
1032     }
1033   return newc;
1034 }
1035 
1036 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
1037    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
1038    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1039    recursing into the children nodes of *TP.  */
1040 
1041 static tree
1042 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1043 {
1044   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1045   copy_body_data *id = (copy_body_data *) wi_p->info;
1046   tree fn = id->src_fn;
1047 
1048   /* For recursive invocations this is no longer the LHS itself.  */
1049   bool is_lhs = wi_p->is_lhs;
1050   wi_p->is_lhs = false;
1051 
1052   if (TREE_CODE (*tp) == SSA_NAME)
1053     {
1054       *tp = remap_ssa_name (*tp, id);
1055       *walk_subtrees = 0;
1056       if (is_lhs)
1057 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1058       return NULL;
1059     }
1060   else if (auto_var_in_fn_p (*tp, fn))
1061     {
1062       /* Local variables and labels need to be replaced by equivalent
1063 	 variables.  We don't want to copy static variables; there's
1064 	 only one of those, no matter how many times we inline the
1065 	 containing function.  Similarly for globals from an outer
1066 	 function.  */
1067       tree new_decl;
1068 
1069       /* Remap the declaration.  */
1070       new_decl = remap_decl (*tp, id);
1071       gcc_assert (new_decl);
1072       /* Replace this variable with the copy.  */
1073       STRIP_TYPE_NOPS (new_decl);
1074       /* ???  The C++ frontend uses void * pointer zero to initialize
1075          any other type.  This confuses the middle-end type verification.
1076 	 As cloned bodies do not go through gimplification again the fixup
1077 	 there doesn't trigger.  */
1078       if (TREE_CODE (new_decl) == INTEGER_CST
1079 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1080 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1081       *tp = new_decl;
1082       *walk_subtrees = 0;
1083     }
1084   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1085     gcc_unreachable ();
1086   else if (TREE_CODE (*tp) == SAVE_EXPR)
1087     gcc_unreachable ();
1088   else if (TREE_CODE (*tp) == LABEL_DECL
1089 	   && (!DECL_CONTEXT (*tp)
1090 	       || decl_function_context (*tp) == id->src_fn))
1091     /* These may need to be remapped for EH handling.  */
1092     *tp = remap_decl (*tp, id);
1093   else if (TREE_CODE (*tp) == FIELD_DECL)
1094     {
1095       /* If the enclosing record type is variably_modified_type_p, the field
1096 	 has already been remapped.  Otherwise, it need not be.  */
1097       tree *n = id->decl_map->get (*tp);
1098       if (n)
1099 	*tp = *n;
1100       *walk_subtrees = 0;
1101     }
1102   else if (TYPE_P (*tp))
1103     /* Types may need remapping as well.  */
1104     *tp = remap_type (*tp, id);
1105   else if (CONSTANT_CLASS_P (*tp))
1106     {
1107       /* If this is a constant, we have to copy the node iff the type
1108 	 will be remapped.  copy_tree_r will not copy a constant.  */
1109       tree new_type = remap_type (TREE_TYPE (*tp), id);
1110 
1111       if (new_type == TREE_TYPE (*tp))
1112 	*walk_subtrees = 0;
1113 
1114       else if (TREE_CODE (*tp) == INTEGER_CST)
1115 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1116       else
1117 	{
1118 	  *tp = copy_node (*tp);
1119 	  TREE_TYPE (*tp) = new_type;
1120 	}
1121     }
1122   else
1123     {
1124       /* Otherwise, just copy the node.  Note that copy_tree_r already
1125 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1126 
1127       if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1128 	{
1129 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 	     that can happen when a pointer argument is an ADDR_EXPR.
1131 	     Recurse here manually to allow that.  */
1132 	  tree ptr = TREE_OPERAND (*tp, 0);
1133 	  tree type = remap_type (TREE_TYPE (*tp), id);
1134 	  tree old = *tp;
1135 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1136 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1141 	    {
1142 	      MR_DEPENDENCE_CLIQUE (*tp)
1143 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1144 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1145 	    }
1146 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1147 	     remapped a parameter as the property might be valid only
1148 	     for the parameter itself.  */
1149 	  if (TREE_THIS_NOTRAP (old)
1150 	      && (!is_parm (TREE_OPERAND (old, 0))
1151 		  || (!id->transform_parameter && is_parm (ptr))))
1152 	    TREE_THIS_NOTRAP (*tp) = 1;
1153 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1154 	  *walk_subtrees = 0;
1155 	  return NULL;
1156 	}
1157 
1158       /* Here is the "usual case".  Copy this tree node, and then
1159 	 tweak some special cases.  */
1160       copy_tree_r (tp, walk_subtrees, NULL);
1161 
1162       if (TREE_CODE (*tp) != OMP_CLAUSE)
1163 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1164 
1165       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1166 	{
1167 	  /* The copied TARGET_EXPR has never been expanded, even if the
1168 	     original node was expanded already.  */
1169 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1170 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1171 	}
1172       else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 	{
1174 	  /* Variable substitution need not be simple.  In particular,
1175 	     the MEM_REF substitution above.  Make sure that
1176 	     TREE_CONSTANT and friends are up-to-date.  */
1177 	  int invariant = is_gimple_min_invariant (*tp);
1178 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1179 	  recompute_tree_invariant_for_addr_expr (*tp);
1180 
1181 	  /* If this used to be invariant, but is not any longer,
1182 	     then regimplification is probably needed.  */
1183 	  if (invariant && !is_gimple_min_invariant (*tp))
1184 	    id->regimplify = true;
1185 
1186 	  *walk_subtrees = 0;
1187 	}
1188     }
1189 
1190   /* Update the TREE_BLOCK for the cloned expr.  */
1191   if (EXPR_P (*tp))
1192     {
1193       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194       tree old_block = TREE_BLOCK (*tp);
1195       if (old_block)
1196 	{
1197 	  tree *n;
1198 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1199 	  if (n)
1200 	    new_block = *n;
1201 	}
1202       TREE_SET_BLOCK (*tp, new_block);
1203     }
1204 
1205   /* Keep iterating.  */
1206   return NULL_TREE;
1207 }
1208 
1209 
1210 /* Called from copy_body_id via walk_tree.  DATA is really a
1211    `copy_body_data *'.  */
1212 
1213 tree
1214 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1215 {
1216   copy_body_data *id = (copy_body_data *) data;
1217   tree fn = id->src_fn;
1218   tree new_block;
1219 
1220   /* Begin by recognizing trees that we'll completely rewrite for the
1221      inlining context.  Our output for these trees is completely
1222      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223      into an edge).  Further down, we'll handle trees that get
1224      duplicated and/or tweaked.  */
1225 
1226   /* When requested, RETURN_EXPRs should be transformed to just the
1227      contained MODIFY_EXPR.  The branch semantics of the return will
1228      be handled elsewhere by manipulating the CFG rather than a statement.  */
1229   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1230     {
1231       tree assignment = TREE_OPERAND (*tp, 0);
1232 
1233       /* If we're returning something, just turn that into an
1234 	 assignment into the equivalent of the original RESULT_DECL.
1235 	 If the "assignment" is just the result decl, the result
1236 	 decl has already been set (e.g. a recent "foo (&result_decl,
1237 	 ...)"); just toss the entire RETURN_EXPR.  */
1238       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1239 	{
1240 	  /* Replace the RETURN_EXPR with (a copy of) the
1241 	     MODIFY_EXPR hanging underneath.  */
1242 	  *tp = copy_node (assignment);
1243 	}
1244       else /* Else the RETURN_EXPR returns no value.  */
1245 	{
1246 	  *tp = NULL;
1247 	  return (tree) (void *)1;
1248 	}
1249     }
1250   else if (TREE_CODE (*tp) == SSA_NAME)
1251     {
1252       *tp = remap_ssa_name (*tp, id);
1253       *walk_subtrees = 0;
1254       return NULL;
1255     }
1256 
1257   /* Local variables and labels need to be replaced by equivalent
1258      variables.  We don't want to copy static variables; there's only
1259      one of those, no matter how many times we inline the containing
1260      function.  Similarly for globals from an outer function.  */
1261   else if (auto_var_in_fn_p (*tp, fn))
1262     {
1263       tree new_decl;
1264 
1265       /* Remap the declaration.  */
1266       new_decl = remap_decl (*tp, id);
1267       gcc_assert (new_decl);
1268       /* Replace this variable with the copy.  */
1269       STRIP_TYPE_NOPS (new_decl);
1270       *tp = new_decl;
1271       *walk_subtrees = 0;
1272     }
1273   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274     copy_statement_list (tp);
1275   else if (TREE_CODE (*tp) == SAVE_EXPR
1276 	   || TREE_CODE (*tp) == TARGET_EXPR)
1277     remap_save_expr (tp, id->decl_map, walk_subtrees);
1278   else if (TREE_CODE (*tp) == LABEL_DECL
1279 	   && (! DECL_CONTEXT (*tp)
1280 	       || decl_function_context (*tp) == id->src_fn))
1281     /* These may need to be remapped for EH handling.  */
1282     *tp = remap_decl (*tp, id);
1283   else if (TREE_CODE (*tp) == BIND_EXPR)
1284     copy_bind_expr (tp, walk_subtrees, id);
1285   /* Types may need remapping as well.  */
1286   else if (TYPE_P (*tp))
1287     *tp = remap_type (*tp, id);
1288 
1289   /* If this is a constant, we have to copy the node iff the type will be
1290      remapped.  copy_tree_r will not copy a constant.  */
1291   else if (CONSTANT_CLASS_P (*tp))
1292     {
1293       tree new_type = remap_type (TREE_TYPE (*tp), id);
1294 
1295       if (new_type == TREE_TYPE (*tp))
1296 	*walk_subtrees = 0;
1297 
1298       else if (TREE_CODE (*tp) == INTEGER_CST)
1299 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300       else
1301 	{
1302 	  *tp = copy_node (*tp);
1303 	  TREE_TYPE (*tp) = new_type;
1304 	}
1305     }
1306 
1307   /* Otherwise, just copy the node.  Note that copy_tree_r already
1308      knows not to copy VAR_DECLs, etc., so this is safe.  */
1309   else
1310     {
1311       /* Here we handle trees that are not completely rewritten.
1312 	 First we detect some inlining-induced bogosities for
1313 	 discarding.  */
1314       if (TREE_CODE (*tp) == MODIFY_EXPR
1315 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1317 	{
1318 	  /* Some assignments VAR = VAR; don't generate any rtl code
1319 	     and thus don't count as variable modification.  Avoid
1320 	     keeping bogosities like 0 = 0.  */
1321 	  tree decl = TREE_OPERAND (*tp, 0), value;
1322 	  tree *n;
1323 
1324 	  n = id->decl_map->get (decl);
1325 	  if (n)
1326 	    {
1327 	      value = *n;
1328 	      STRIP_TYPE_NOPS (value);
1329 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1330 		{
1331 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 		  return copy_tree_body_r (tp, walk_subtrees, data);
1333 		}
1334 	    }
1335 	}
1336       else if (TREE_CODE (*tp) == INDIRECT_REF)
1337 	{
1338 	  /* Get rid of *& from inline substitutions that can happen when a
1339 	     pointer argument is an ADDR_EXPR.  */
1340 	  tree decl = TREE_OPERAND (*tp, 0);
1341 	  tree *n = id->decl_map->get (decl);
1342 	  if (n)
1343 	    {
1344 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1345 	         it manually here as we'll eventually get ADDR_EXPRs
1346 		 which lie about their types pointed to.  In this case
1347 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 		 but we absolutely rely on that.  As fold_indirect_ref
1349 	         does other useful transformations, try that first, though.  */
1350 	      tree type = TREE_TYPE (*tp);
1351 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 	      tree old = *tp;
1353 	      *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 	      if (! *tp)
1355 	        {
1356 		  type = remap_type (type, id);
1357 		  if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1358 		    {
1359 		      *tp
1360 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 		      /* ???  We should either assert here or build
1362 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1363 			 incompatible types to our IL.  */
1364 		      if (! *tp)
1365 			*tp = TREE_OPERAND (ptr, 0);
1366 		    }
1367 	          else
1368 		    {
1369 	              *tp = build1 (INDIRECT_REF, type, ptr);
1370 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1373 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1374 			 have remapped a parameter as the property might be
1375 			 valid only for the parameter itself.  */
1376 		      if (TREE_THIS_NOTRAP (old)
1377 			  && (!is_parm (TREE_OPERAND (old, 0))
1378 			      || (!id->transform_parameter && is_parm (ptr))))
1379 		        TREE_THIS_NOTRAP (*tp) = 1;
1380 		    }
1381 		}
1382 	      *walk_subtrees = 0;
1383 	      return NULL;
1384 	    }
1385 	}
1386       else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1387 	{
1388 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1389 	     that can happen when a pointer argument is an ADDR_EXPR.
1390 	     Recurse here manually to allow that.  */
1391 	  tree ptr = TREE_OPERAND (*tp, 0);
1392 	  tree type = remap_type (TREE_TYPE (*tp), id);
1393 	  tree old = *tp;
1394 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1395 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1396 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1397 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1398 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1399 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1400 	    {
1401 	      MR_DEPENDENCE_CLIQUE (*tp)
1402 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1403 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1404 	    }
1405 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1406 	     remapped a parameter as the property might be valid only
1407 	     for the parameter itself.  */
1408 	  if (TREE_THIS_NOTRAP (old)
1409 	      && (!is_parm (TREE_OPERAND (old, 0))
1410 		  || (!id->transform_parameter && is_parm (ptr))))
1411 	    TREE_THIS_NOTRAP (*tp) = 1;
1412 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1413 	  *walk_subtrees = 0;
1414 	  return NULL;
1415 	}
1416 
1417       /* Here is the "usual case".  Copy this tree node, and then
1418 	 tweak some special cases.  */
1419       copy_tree_r (tp, walk_subtrees, NULL);
1420 
1421       /* If EXPR has block defined, map it to newly constructed block.
1422          When inlining we want EXPRs without block appear in the block
1423 	 of function call if we are not remapping a type.  */
1424       if (EXPR_P (*tp))
1425 	{
1426 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1427 	  if (TREE_BLOCK (*tp))
1428 	    {
1429 	      tree *n;
1430 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1431 	      if (n)
1432 		new_block = *n;
1433 	    }
1434 	  TREE_SET_BLOCK (*tp, new_block);
1435 	}
1436 
1437       if (TREE_CODE (*tp) != OMP_CLAUSE)
1438 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1439 
1440       /* The copied TARGET_EXPR has never been expanded, even if the
1441 	 original node was expanded already.  */
1442       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1443 	{
1444 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1445 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1446 	}
1447 
1448       /* Variable substitution need not be simple.  In particular, the
1449 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1450 	 and friends are up-to-date.  */
1451       else if (TREE_CODE (*tp) == ADDR_EXPR)
1452 	{
1453 	  int invariant = is_gimple_min_invariant (*tp);
1454 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1455 
1456 	  /* Handle the case where we substituted an INDIRECT_REF
1457 	     into the operand of the ADDR_EXPR.  */
1458 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1459 	      && !id->do_not_fold)
1460 	    {
1461 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1462 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1463 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1464 	      *tp = t;
1465 	    }
1466 	  else
1467 	    recompute_tree_invariant_for_addr_expr (*tp);
1468 
1469 	  /* If this used to be invariant, but is not any longer,
1470 	     then regimplification is probably needed.  */
1471 	  if (invariant && !is_gimple_min_invariant (*tp))
1472 	    id->regimplify = true;
1473 
1474 	  *walk_subtrees = 0;
1475 	}
1476     }
1477 
1478   /* Keep iterating.  */
1479   return NULL_TREE;
1480 }
1481 
1482 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1483    source function, map that to the duplicate EH region number in
1484    the destination function.  */
1485 
1486 static int
1487 remap_eh_region_nr (int old_nr, copy_body_data *id)
1488 {
1489   eh_region old_r, new_r;
1490 
1491   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1492   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1493 
1494   return new_r->index;
1495 }
1496 
1497 /* Similar, but operate on INTEGER_CSTs.  */
1498 
1499 static tree
1500 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1501 {
1502   int old_nr, new_nr;
1503 
1504   old_nr = tree_to_shwi (old_t_nr);
1505   new_nr = remap_eh_region_nr (old_nr, id);
1506 
1507   return build_int_cst (integer_type_node, new_nr);
1508 }
1509 
1510 /* Helper for copy_bb.  Remap statement STMT using the inlining
1511    information in ID.  Return the new statement copy.  */
1512 
1513 static gimple_seq
1514 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1515 {
1516   gimple *copy = NULL;
1517   struct walk_stmt_info wi;
1518   bool skip_first = false;
1519   gimple_seq stmts = NULL;
1520 
1521   if (is_gimple_debug (stmt)
1522       && (gimple_debug_nonbind_marker_p (stmt)
1523 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1524 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1525     return NULL;
1526 
1527   /* Begin by recognizing trees that we'll completely rewrite for the
1528      inlining context.  Our output for these trees is completely
1529      different from our input (e.g. RETURN_EXPR is deleted and morphs
1530      into an edge).  Further down, we'll handle trees that get
1531      duplicated and/or tweaked.  */
1532 
1533   /* When requested, GIMPLE_RETURN should be transformed to just the
1534      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1535      be handled elsewhere by manipulating the CFG rather than the
1536      statement.  */
1537   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1538     {
1539       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1540 
1541       /* If we're returning something, just turn that into an
1542 	 assignment to the equivalent of the original RESULT_DECL.
1543 	 If RETVAL is just the result decl, the result decl has
1544 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1545 	 just toss the entire GIMPLE_RETURN.  Likewise for when the
1546 	 call doesn't want the return value.  */
1547       if (retval
1548 	  && (TREE_CODE (retval) != RESULT_DECL
1549 	      && (!id->call_stmt
1550 		  || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1551 	      && (TREE_CODE (retval) != SSA_NAME
1552 		  || ! SSA_NAME_VAR (retval)
1553 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1554         {
1555 	  copy = gimple_build_assign (id->do_not_unshare
1556 				      ? id->retvar : unshare_expr (id->retvar),
1557 				      retval);
1558 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1559 	  skip_first = true;
1560 	}
1561       else
1562 	return NULL;
1563     }
1564   else if (gimple_has_substatements (stmt))
1565     {
1566       gimple_seq s1, s2;
1567 
1568       /* When cloning bodies from the C++ front end, we will be handed bodies
1569 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1570 	 have embedded statements.  */
1571       switch (gimple_code (stmt))
1572 	{
1573 	case GIMPLE_BIND:
1574 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1575 	  break;
1576 
1577 	case GIMPLE_CATCH:
1578 	  {
1579 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1580 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1581 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1582 	  }
1583 	  break;
1584 
1585 	case GIMPLE_EH_FILTER:
1586 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1587 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1588 	  break;
1589 
1590 	case GIMPLE_TRY:
1591 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1592 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1593 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1594 	  break;
1595 
1596 	case GIMPLE_WITH_CLEANUP_EXPR:
1597 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1598 	  copy = gimple_build_wce (s1);
1599 	  break;
1600 
1601 	case GIMPLE_OMP_PARALLEL:
1602 	  {
1603 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1604 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1605 	    copy = gimple_build_omp_parallel
1606 	             (s1,
1607 		      gimple_omp_parallel_clauses (omp_par_stmt),
1608 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1609 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1610 	  }
1611 	  break;
1612 
1613 	case GIMPLE_OMP_TASK:
1614 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1615 	  copy = gimple_build_omp_task
1616 	           (s1,
1617 		    gimple_omp_task_clauses (stmt),
1618 		    gimple_omp_task_child_fn (stmt),
1619 		    gimple_omp_task_data_arg (stmt),
1620 		    gimple_omp_task_copy_fn (stmt),
1621 		    gimple_omp_task_arg_size (stmt),
1622 		    gimple_omp_task_arg_align (stmt));
1623 	  break;
1624 
1625 	case GIMPLE_OMP_FOR:
1626 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1627 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1628 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1629 				       gimple_omp_for_clauses (stmt),
1630 				       gimple_omp_for_collapse (stmt), s2);
1631 	  {
1632 	    size_t i;
1633 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1634 	      {
1635 		gimple_omp_for_set_index (copy, i,
1636 					  gimple_omp_for_index (stmt, i));
1637 		gimple_omp_for_set_initial (copy, i,
1638 					    gimple_omp_for_initial (stmt, i));
1639 		gimple_omp_for_set_final (copy, i,
1640 					  gimple_omp_for_final (stmt, i));
1641 		gimple_omp_for_set_incr (copy, i,
1642 					 gimple_omp_for_incr (stmt, i));
1643 		gimple_omp_for_set_cond (copy, i,
1644 					 gimple_omp_for_cond (stmt, i));
1645 	      }
1646 	  }
1647 	  break;
1648 
1649 	case GIMPLE_OMP_MASTER:
1650 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1651 	  copy = gimple_build_omp_master (s1);
1652 	  break;
1653 
1654 	case GIMPLE_OMP_TASKGROUP:
1655 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1656 	  copy = gimple_build_omp_taskgroup
1657 		   (s1, gimple_omp_taskgroup_clauses (stmt));
1658 	  break;
1659 
1660 	case GIMPLE_OMP_ORDERED:
1661 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1662 	  copy = gimple_build_omp_ordered
1663 		   (s1,
1664 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1665 	  break;
1666 
1667 	case GIMPLE_OMP_SCAN:
1668 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1669 	  copy = gimple_build_omp_scan
1670 		   (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1671 	  break;
1672 
1673 	case GIMPLE_OMP_SECTION:
1674 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1675 	  copy = gimple_build_omp_section (s1);
1676 	  break;
1677 
1678 	case GIMPLE_OMP_SECTIONS:
1679 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1680 	  copy = gimple_build_omp_sections
1681 	           (s1, gimple_omp_sections_clauses (stmt));
1682 	  break;
1683 
1684 	case GIMPLE_OMP_SINGLE:
1685 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1686 	  copy = gimple_build_omp_single
1687 	           (s1, gimple_omp_single_clauses (stmt));
1688 	  break;
1689 
1690 	case GIMPLE_OMP_TARGET:
1691 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1692 	  copy = gimple_build_omp_target
1693 		   (s1, gimple_omp_target_kind (stmt),
1694 		    gimple_omp_target_clauses (stmt));
1695 	  break;
1696 
1697 	case GIMPLE_OMP_TEAMS:
1698 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1699 	  copy = gimple_build_omp_teams
1700 		   (s1, gimple_omp_teams_clauses (stmt));
1701 	  break;
1702 
1703 	case GIMPLE_OMP_CRITICAL:
1704 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1705 	  copy = gimple_build_omp_critical (s1,
1706 					    gimple_omp_critical_name
1707 					      (as_a <gomp_critical *> (stmt)),
1708 					    gimple_omp_critical_clauses
1709 					      (as_a <gomp_critical *> (stmt)));
1710 	  break;
1711 
1712 	case GIMPLE_TRANSACTION:
1713 	  {
1714 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1715 	    gtransaction *new_trans_stmt;
1716 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1717 				   id);
1718 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1719 	    gimple_transaction_set_subcode (new_trans_stmt,
1720 	      gimple_transaction_subcode (old_trans_stmt));
1721 	    gimple_transaction_set_label_norm (new_trans_stmt,
1722 	      gimple_transaction_label_norm (old_trans_stmt));
1723 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1724 	      gimple_transaction_label_uninst (old_trans_stmt));
1725 	    gimple_transaction_set_label_over (new_trans_stmt,
1726 	      gimple_transaction_label_over (old_trans_stmt));
1727 	  }
1728 	  break;
1729 
1730 	default:
1731 	  gcc_unreachable ();
1732 	}
1733     }
1734   else
1735     {
1736       if (gimple_assign_copy_p (stmt)
1737 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1738 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1739 	{
1740 	  /* Here we handle statements that are not completely rewritten.
1741 	     First we detect some inlining-induced bogosities for
1742 	     discarding.  */
1743 
1744 	  /* Some assignments VAR = VAR; don't generate any rtl code
1745 	     and thus don't count as variable modification.  Avoid
1746 	     keeping bogosities like 0 = 0.  */
1747 	  tree decl = gimple_assign_lhs (stmt), value;
1748 	  tree *n;
1749 
1750 	  n = id->decl_map->get (decl);
1751 	  if (n)
1752 	    {
1753 	      value = *n;
1754 	      STRIP_TYPE_NOPS (value);
1755 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1756 		return NULL;
1757 	    }
1758 	}
1759 
1760       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1761 	 in a block that we aren't copying during tree_function_versioning,
1762 	 just drop the clobber stmt.  */
1763       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1764 	{
1765 	  tree lhs = gimple_assign_lhs (stmt);
1766 	  if (TREE_CODE (lhs) == MEM_REF
1767 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1768 	    {
1769 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1770 	      if (gimple_bb (def_stmt)
1771 		  && !bitmap_bit_p (id->blocks_to_copy,
1772 				    gimple_bb (def_stmt)->index))
1773 		return NULL;
1774 	    }
1775 	}
1776 
1777       /* We do not allow CLOBBERs of handled components.  In case
1778 	 returned value is stored via such handled component, remove
1779 	 the clobber so stmt verifier is happy.  */
1780       if (gimple_clobber_p (stmt)
1781 	  && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1782 	{
1783 	  tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1784 	  if (!DECL_P (remapped)
1785 	      && TREE_CODE (remapped) != MEM_REF)
1786 	    return NULL;
1787 	}
1788 
1789       if (gimple_debug_bind_p (stmt))
1790 	{
1791 	  gdebug *copy
1792 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1793 				       gimple_debug_bind_get_value (stmt),
1794 				       stmt);
1795 	  if (id->reset_location)
1796 	    gimple_set_location (copy, input_location);
1797 	  id->debug_stmts.safe_push (copy);
1798 	  gimple_seq_add_stmt (&stmts, copy);
1799 	  return stmts;
1800 	}
1801       if (gimple_debug_source_bind_p (stmt))
1802 	{
1803 	  gdebug *copy = gimple_build_debug_source_bind
1804 	                   (gimple_debug_source_bind_get_var (stmt),
1805 			    gimple_debug_source_bind_get_value (stmt),
1806 			    stmt);
1807 	  if (id->reset_location)
1808 	    gimple_set_location (copy, input_location);
1809 	  id->debug_stmts.safe_push (copy);
1810 	  gimple_seq_add_stmt (&stmts, copy);
1811 	  return stmts;
1812 	}
1813       if (gimple_debug_nonbind_marker_p (stmt))
1814 	{
1815 	  /* If the inlined function has too many debug markers,
1816 	     don't copy them.  */
1817 	  if (id->src_cfun->debug_marker_count
1818 	      > param_max_debug_marker_count)
1819 	    return stmts;
1820 
1821 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1822 	  if (id->reset_location)
1823 	    gimple_set_location (copy, input_location);
1824 	  id->debug_stmts.safe_push (copy);
1825 	  gimple_seq_add_stmt (&stmts, copy);
1826 	  return stmts;
1827 	}
1828 
1829       /* Create a new deep copy of the statement.  */
1830       copy = gimple_copy (stmt);
1831 
1832       /* Clear flags that need revisiting.  */
1833       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1834         {
1835 	  if (gimple_call_tail_p (call_stmt))
1836 	    gimple_call_set_tail (call_stmt, false);
1837 	  if (gimple_call_from_thunk_p (call_stmt))
1838 	    gimple_call_set_from_thunk (call_stmt, false);
1839 	  if (gimple_call_internal_p (call_stmt))
1840 	    switch (gimple_call_internal_fn (call_stmt))
1841 	      {
1842 	      case IFN_GOMP_SIMD_LANE:
1843 	      case IFN_GOMP_SIMD_VF:
1844 	      case IFN_GOMP_SIMD_LAST_LANE:
1845 	      case IFN_GOMP_SIMD_ORDERED_START:
1846 	      case IFN_GOMP_SIMD_ORDERED_END:
1847 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1848 	        break;
1849 	      default:
1850 		break;
1851 	      }
1852 	}
1853 
1854       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1855 	 RESX and EH_DISPATCH.  */
1856       if (id->eh_map)
1857 	switch (gimple_code (copy))
1858 	  {
1859 	  case GIMPLE_CALL:
1860 	    {
1861 	      tree r, fndecl = gimple_call_fndecl (copy);
1862 	      if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1863 		switch (DECL_FUNCTION_CODE (fndecl))
1864 		  {
1865 		  case BUILT_IN_EH_COPY_VALUES:
1866 		    r = gimple_call_arg (copy, 1);
1867 		    r = remap_eh_region_tree_nr (r, id);
1868 		    gimple_call_set_arg (copy, 1, r);
1869 		    /* FALLTHRU */
1870 
1871 		  case BUILT_IN_EH_POINTER:
1872 		  case BUILT_IN_EH_FILTER:
1873 		    r = gimple_call_arg (copy, 0);
1874 		    r = remap_eh_region_tree_nr (r, id);
1875 		    gimple_call_set_arg (copy, 0, r);
1876 		    break;
1877 
1878 		  default:
1879 		    break;
1880 		  }
1881 
1882 	      /* Reset alias info if we didn't apply measures to
1883 		 keep it valid over inlining by setting DECL_PT_UID.  */
1884 	      if (!id->src_cfun->gimple_df
1885 		  || !id->src_cfun->gimple_df->ipa_pta)
1886 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1887 	    }
1888 	    break;
1889 
1890 	  case GIMPLE_RESX:
1891 	    {
1892 	      gresx *resx_stmt = as_a <gresx *> (copy);
1893 	      int r = gimple_resx_region (resx_stmt);
1894 	      r = remap_eh_region_nr (r, id);
1895 	      gimple_resx_set_region (resx_stmt, r);
1896 	    }
1897 	    break;
1898 
1899 	  case GIMPLE_EH_DISPATCH:
1900 	    {
1901 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1902 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1903 	      r = remap_eh_region_nr (r, id);
1904 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1905 	    }
1906 	    break;
1907 
1908 	  default:
1909 	    break;
1910 	  }
1911     }
1912 
1913   /* If STMT has a block defined, map it to the newly constructed block.  */
1914   if (tree block = gimple_block (copy))
1915     {
1916       tree *n;
1917       n = id->decl_map->get (block);
1918       gcc_assert (n);
1919       gimple_set_block (copy, *n);
1920     }
1921   if (id->param_body_adjs)
1922     {
1923       gimple_seq extra_stmts = NULL;
1924       id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1925       if (!gimple_seq_empty_p (extra_stmts))
1926 	{
1927 	  memset (&wi, 0, sizeof (wi));
1928 	  wi.info = id;
1929 	  for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1930 	       !gsi_end_p (egsi);
1931 	       gsi_next (&egsi))
1932 	    walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1933 	  gimple_seq_add_seq (&stmts, extra_stmts);
1934 	}
1935     }
1936 
1937   if (id->reset_location)
1938     gimple_set_location (copy, input_location);
1939 
1940   /* Debug statements ought to be rebuilt and not copied.  */
1941   gcc_checking_assert (!is_gimple_debug (copy));
1942 
1943   /* Remap all the operands in COPY.  */
1944   memset (&wi, 0, sizeof (wi));
1945   wi.info = id;
1946   if (skip_first)
1947     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1948   else
1949     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1950 
1951   /* Clear the copied virtual operands.  We are not remapping them here
1952      but are going to recreate them from scratch.  */
1953   if (gimple_has_mem_ops (copy))
1954     {
1955       gimple_set_vdef (copy, NULL_TREE);
1956       gimple_set_vuse (copy, NULL_TREE);
1957     }
1958 
1959   if (cfun->can_throw_non_call_exceptions)
1960     {
1961       /* When inlining a function which does not have non-call exceptions
1962 	 enabled into a function that has (which only happens with
1963 	 always-inline) we have to fixup stmts that cannot throw.  */
1964       if (gcond *cond = dyn_cast <gcond *> (copy))
1965 	if (gimple_could_trap_p (cond))
1966 	  {
1967 	    gassign *cmp
1968 	      = gimple_build_assign (make_ssa_name (boolean_type_node),
1969 				     gimple_cond_code (cond),
1970 				     gimple_cond_lhs (cond),
1971 				     gimple_cond_rhs (cond));
1972 	    gimple_seq_add_stmt (&stmts, cmp);
1973 	    gimple_cond_set_code (cond, NE_EXPR);
1974 	    gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1975 	    gimple_cond_set_rhs (cond, boolean_false_node);
1976 	  }
1977       if (gassign *ass = dyn_cast <gassign *> (copy))
1978 	if ((gimple_assign_rhs_code (ass) == COND_EXPR
1979 	     || gimple_assign_rhs_code (ass) == VEC_COND_EXPR)
1980 	    && gimple_could_trap_p (ass))
1981 	  {
1982 	    gassign *cmp
1983 	      = gimple_build_assign (make_ssa_name (boolean_type_node),
1984 				     gimple_assign_rhs1 (ass));
1985 	    gimple_seq_add_stmt (&stmts, cmp);
1986 	    gimple_assign_set_rhs1 (ass, gimple_assign_lhs (cmp));
1987 	  }
1988     }
1989 
1990   gimple_seq_add_stmt (&stmts, copy);
1991   return stmts;
1992 }
1993 
1994 
1995 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1996    later  */
1997 
1998 static basic_block
1999 copy_bb (copy_body_data *id, basic_block bb,
2000          profile_count num, profile_count den)
2001 {
2002   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2003   basic_block copy_basic_block;
2004   tree decl;
2005   basic_block prev;
2006 
2007   profile_count::adjust_for_ipa_scaling (&num, &den);
2008 
2009   /* Search for previous copied basic block.  */
2010   prev = bb->prev_bb;
2011   while (!prev->aux)
2012     prev = prev->prev_bb;
2013 
2014   /* create_basic_block() will append every new block to
2015      basic_block_info automatically.  */
2016   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2017   copy_basic_block->count = bb->count.apply_scale (num, den);
2018 
2019   copy_gsi = gsi_start_bb (copy_basic_block);
2020 
2021   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2022     {
2023       gimple_seq stmts;
2024       gimple *stmt = gsi_stmt (gsi);
2025       gimple *orig_stmt = stmt;
2026       gimple_stmt_iterator stmts_gsi;
2027       bool stmt_added = false;
2028 
2029       id->regimplify = false;
2030       stmts = remap_gimple_stmt (stmt, id);
2031 
2032       if (gimple_seq_empty_p (stmts))
2033 	continue;
2034 
2035       seq_gsi = copy_gsi;
2036 
2037       for (stmts_gsi = gsi_start (stmts);
2038 	   !gsi_end_p (stmts_gsi); )
2039 	{
2040 	  stmt = gsi_stmt (stmts_gsi);
2041 
2042 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
2043 	  gsi_next (&stmts_gsi);
2044 
2045 	  if (gimple_nop_p (stmt))
2046 	      continue;
2047 
2048 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2049 					    orig_stmt);
2050 
2051 	  /* With return slot optimization we can end up with
2052 	     non-gimple (foo *)&this->m, fix that here.  */
2053 	  if (is_gimple_assign (stmt)
2054 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2055 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2056 	    {
2057 	      tree new_rhs;
2058 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
2059 						  gimple_assign_rhs1 (stmt),
2060 						  true, NULL, false,
2061 						  GSI_CONTINUE_LINKING);
2062 	      gimple_assign_set_rhs1 (stmt, new_rhs);
2063 	      id->regimplify = false;
2064 	    }
2065 
2066 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2067 
2068 	  if (id->regimplify)
2069 	    gimple_regimplify_operands (stmt, &seq_gsi);
2070 
2071 	  stmt_added = true;
2072 	}
2073 
2074       if (!stmt_added)
2075 	continue;
2076 
2077       /* If copy_basic_block has been empty at the start of this iteration,
2078 	 call gsi_start_bb again to get at the newly added statements.  */
2079       if (gsi_end_p (copy_gsi))
2080 	copy_gsi = gsi_start_bb (copy_basic_block);
2081       else
2082 	gsi_next (&copy_gsi);
2083 
2084       /* Process the new statement.  The call to gimple_regimplify_operands
2085 	 possibly turned the statement into multiple statements, we
2086 	 need to process all of them.  */
2087       do
2088 	{
2089 	  tree fn;
2090 	  gcall *call_stmt;
2091 
2092 	  stmt = gsi_stmt (copy_gsi);
2093 	  call_stmt = dyn_cast <gcall *> (stmt);
2094 	  if (call_stmt
2095 	      && gimple_call_va_arg_pack_p (call_stmt)
2096 	      && id->call_stmt
2097 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
2098 	    {
2099 	      /* __builtin_va_arg_pack () should be replaced by
2100 		 all arguments corresponding to ... in the caller.  */
2101 	      tree p;
2102 	      gcall *new_call;
2103 	      vec<tree> argarray;
2104 	      size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2105 	      size_t nargs = nargs_caller;
2106 
2107 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2108 		{
2109 		  /* Avoid crashing on invalid IL that doesn't have a
2110 		     varargs function or that passes not enough arguments.  */
2111 		  if (nargs == 0)
2112 		    break;
2113 		  nargs--;
2114 		}
2115 
2116 	      /* Create the new array of arguments.  */
2117 	      size_t nargs_callee = gimple_call_num_args (call_stmt);
2118 	      size_t n = nargs + nargs_callee;
2119 	      argarray.create (n);
2120 	      argarray.safe_grow_cleared (n);
2121 
2122 	      /* Copy all the arguments before '...'  */
2123 	      if (nargs_callee)
2124 		memcpy (argarray.address (),
2125 			gimple_call_arg_ptr (call_stmt, 0),
2126 			nargs_callee * sizeof (tree));
2127 
2128 	      /* Append the arguments passed in '...'  */
2129 	      if (nargs)
2130 		memcpy (argarray.address () + nargs_callee,
2131 			gimple_call_arg_ptr (id->call_stmt, 0)
2132 			+ (nargs_caller - nargs), nargs * sizeof (tree));
2133 
2134 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2135 						argarray);
2136 
2137 	      argarray.release ();
2138 
2139 	      /* Copy all GIMPLE_CALL flags, location and block, except
2140 		 GF_CALL_VA_ARG_PACK.  */
2141 	      gimple_call_copy_flags (new_call, call_stmt);
2142 	      gimple_call_set_va_arg_pack (new_call, false);
2143 	      gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2144 	      /* location includes block.  */
2145 	      gimple_set_location (new_call, gimple_location (stmt));
2146 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2147 
2148 	      gsi_replace (&copy_gsi, new_call, false);
2149 	      stmt = new_call;
2150 	    }
2151 	  else if (call_stmt
2152 		   && id->call_stmt
2153 		   && (decl = gimple_call_fndecl (stmt))
2154 		   && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2155 	    {
2156 	      /* __builtin_va_arg_pack_len () should be replaced by
2157 		 the number of anonymous arguments.  */
2158 	      size_t nargs = gimple_call_num_args (id->call_stmt);
2159 	      tree count, p;
2160 	      gimple *new_stmt;
2161 
2162 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2163 		nargs--;
2164 
2165 	      if (!gimple_call_lhs (stmt))
2166 		{
2167 		  /* Drop unused calls.  */
2168 		  gsi_remove (&copy_gsi, false);
2169 		  continue;
2170 		}
2171 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2172 		{
2173 		  count = build_int_cst (integer_type_node, nargs);
2174 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2175 		  gsi_replace (&copy_gsi, new_stmt, false);
2176 		  stmt = new_stmt;
2177 		}
2178 	      else if (nargs != 0)
2179 		{
2180 		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2181 		  count = build_int_cst (integer_type_node, nargs);
2182 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2183 						  PLUS_EXPR, newlhs, count);
2184 		  gimple_call_set_lhs (stmt, newlhs);
2185 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2186 		}
2187 	    }
2188 	  else if (call_stmt
2189 		   && id->call_stmt
2190 		   && gimple_call_internal_p (stmt)
2191 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2192 	    {
2193 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
2194 	      gsi_remove (&copy_gsi, false);
2195 	      continue;
2196 	    }
2197 
2198 	  /* Statements produced by inlining can be unfolded, especially
2199 	     when we constant propagated some operands.  We can't fold
2200 	     them right now for two reasons:
2201 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2202 	     2) we can't change function calls to builtins.
2203 	     So we just mark statement for later folding.  We mark
2204 	     all new statements, instead just statements that has changed
2205 	     by some nontrivial substitution so even statements made
2206 	     foldable indirectly are updated.  If this turns out to be
2207 	     expensive, copy_body can be told to watch for nontrivial
2208 	     changes.  */
2209 	  if (id->statements_to_fold)
2210 	    id->statements_to_fold->add (stmt);
2211 
2212 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2213 	     callgraph edges and update or duplicate them.  */
2214 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2215 	    {
2216 	      struct cgraph_edge *edge;
2217 
2218 	      switch (id->transform_call_graph_edges)
2219 		{
2220 		case CB_CGE_DUPLICATE:
2221 		  edge = id->src_node->get_edge (orig_stmt);
2222 		  if (edge)
2223 		    {
2224 		      struct cgraph_edge *old_edge = edge;
2225 
2226 		      /* A speculative call is consist of multiple
2227 			 edges - indirect edge and one or more direct edges
2228 			 Duplicate the whole thing and distribute frequencies
2229 			 accordingly.  */
2230 		      if (edge->speculative)
2231 			{
2232 			  int n = 0;
2233 			  profile_count direct_cnt
2234 				 = profile_count::zero ();
2235 
2236 			  /* First figure out the distribution of counts
2237 			     so we can re-scale BB profile accordingly.  */
2238 			  for (cgraph_edge *e = old_edge; e;
2239 			       e = e->next_speculative_call_target ())
2240 			    direct_cnt = direct_cnt + e->count;
2241 
2242 			  cgraph_edge *indirect
2243 				 = old_edge->speculative_call_indirect_edge ();
2244 			  profile_count indir_cnt = indirect->count;
2245 
2246 			  /* Next iterate all direct edges, clone it and its
2247 			     corresponding reference and update profile.  */
2248 			  for (cgraph_edge *e = old_edge;
2249 			       e;
2250 			       e = e->next_speculative_call_target ())
2251 			    {
2252 			      profile_count cnt = e->count;
2253 
2254 			      id->dst_node->clone_reference
2255 				 (e->speculative_call_target_ref (), stmt);
2256 			      edge = e->clone (id->dst_node, call_stmt,
2257 					       gimple_uid (stmt), num, den,
2258 					       true);
2259 			      profile_probability prob
2260 				 = cnt.probability_in (direct_cnt
2261 						       + indir_cnt);
2262 			      edge->count
2263 				 = copy_basic_block->count.apply_probability
2264 					 (prob);
2265 			      n++;
2266 			    }
2267 			  gcc_checking_assert
2268 				 (indirect->num_speculative_call_targets_p ()
2269 				  == n);
2270 
2271 			  /* Duplicate the indirect edge after all direct edges
2272 			     cloned.  */
2273 			  indirect = indirect->clone (id->dst_node, call_stmt,
2274 						      gimple_uid (stmt),
2275 						      num, den,
2276 						      true);
2277 
2278 			  profile_probability prob
2279 			     = indir_cnt.probability_in (direct_cnt
2280 							 + indir_cnt);
2281 			  indirect->count
2282 			     = copy_basic_block->count.apply_probability (prob);
2283 			}
2284 		      else
2285 			{
2286 			  edge = edge->clone (id->dst_node, call_stmt,
2287 					      gimple_uid (stmt),
2288 					      num, den,
2289 					      true);
2290 			  edge->count = copy_basic_block->count;
2291 			}
2292 		    }
2293 		  break;
2294 
2295 		case CB_CGE_MOVE_CLONES:
2296 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2297 								call_stmt);
2298 		  edge = id->dst_node->get_edge (stmt);
2299 		  break;
2300 
2301 		case CB_CGE_MOVE:
2302 		  edge = id->dst_node->get_edge (orig_stmt);
2303 		  if (edge)
2304 		    edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2305 		  break;
2306 
2307 		default:
2308 		  gcc_unreachable ();
2309 		}
2310 
2311 	      /* Constant propagation on argument done during inlining
2312 		 may create new direct call.  Produce an edge for it.  */
2313 	      if ((!edge
2314 		   || (edge->indirect_inlining_edge
2315 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2316 		  && id->dst_node->definition
2317 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2318 		{
2319 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2320 
2321 		  /* We have missing edge in the callgraph.  This can happen
2322 		     when previous inlining turned an indirect call into a
2323 		     direct call by constant propagating arguments or we are
2324 		     producing dead clone (for further cloning).  In all
2325 		     other cases we hit a bug (incorrect node sharing is the
2326 		     most common reason for missing edges).  */
2327 		  gcc_assert (!dest->definition
2328 			      || dest->address_taken
2329 		  	      || !id->src_node->definition
2330 			      || !id->dst_node->definition);
2331 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2332 		    id->dst_node->create_edge_including_clones
2333 		      (dest, orig_stmt, call_stmt, bb->count,
2334 		       CIF_ORIGINALLY_INDIRECT_CALL);
2335 		  else
2336 		    id->dst_node->create_edge (dest, call_stmt,
2337 					bb->count)->inline_failed
2338 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2339 		  if (dump_file)
2340 		    {
2341 		      fprintf (dump_file, "Created new direct edge to %s\n",
2342 			       dest->dump_name ());
2343 		    }
2344 		}
2345 
2346 	      notice_special_calls (as_a <gcall *> (stmt));
2347 	    }
2348 
2349 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2350 				      id->eh_map, id->eh_lp_nr);
2351 
2352 	  gsi_next (&copy_gsi);
2353 	}
2354       while (!gsi_end_p (copy_gsi));
2355 
2356       copy_gsi = gsi_last_bb (copy_basic_block);
2357     }
2358 
2359   return copy_basic_block;
2360 }
2361 
2362 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2363    form is quite easy, since dominator relationship for old basic blocks does
2364    not change.
2365 
2366    There is however exception where inlining might change dominator relation
2367    across EH edges from basic block within inlined functions destinating
2368    to landing pads in function we inline into.
2369 
2370    The function fills in PHI_RESULTs of such PHI nodes if they refer
2371    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2372    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2373    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2374    set, and this means that there will be no overlapping live ranges
2375    for the underlying symbol.
2376 
2377    This might change in future if we allow redirecting of EH edges and
2378    we might want to change way build CFG pre-inlining to include
2379    all the possible edges then.  */
2380 static void
2381 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2382 				  bool can_throw, bool nonlocal_goto)
2383 {
2384   edge e;
2385   edge_iterator ei;
2386 
2387   FOR_EACH_EDGE (e, ei, bb->succs)
2388     if (!e->dest->aux
2389 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2390       {
2391 	gphi *phi;
2392 	gphi_iterator si;
2393 
2394 	if (!nonlocal_goto)
2395 	  gcc_assert (e->flags & EDGE_EH);
2396 
2397 	if (!can_throw)
2398 	  gcc_assert (!(e->flags & EDGE_EH));
2399 
2400 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2401 	  {
2402 	    edge re;
2403 
2404 	    phi = si.phi ();
2405 
2406 	    /* For abnormal goto/call edges the receiver can be the
2407 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2408 
2409 	    gcc_assert ((e->flags & EDGE_EH)
2410 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2411 
2412 	    re = find_edge (ret_bb, e->dest);
2413 	    gcc_checking_assert (re);
2414 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2415 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2416 
2417 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2418 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2419 	  }
2420       }
2421 }
2422 
2423 /* Insert clobbers for automatic variables of inlined ID->src_fn
2424    function at the start of basic block ID->eh_landing_pad_dest.  */
2425 
2426 static void
2427 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2428 {
2429   tree var;
2430   basic_block bb = id->eh_landing_pad_dest;
2431   live_vars_map *vars = NULL;
2432   unsigned int cnt = 0;
2433   unsigned int i;
2434   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2435     if (VAR_P (var)
2436 	&& !DECL_HARD_REGISTER (var)
2437 	&& !TREE_THIS_VOLATILE (var)
2438 	&& !DECL_HAS_VALUE_EXPR_P (var)
2439 	&& !is_gimple_reg (var)
2440 	&& auto_var_in_fn_p (var, id->src_fn)
2441 	&& !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2442       {
2443 	tree *t = id->decl_map->get (var);
2444 	if (!t)
2445 	  continue;
2446 	tree new_var = *t;
2447 	if (VAR_P (new_var)
2448 	    && !DECL_HARD_REGISTER (new_var)
2449 	    && !TREE_THIS_VOLATILE (new_var)
2450 	    && !DECL_HAS_VALUE_EXPR_P (new_var)
2451 	    && !is_gimple_reg (new_var)
2452 	    && auto_var_in_fn_p (new_var, id->dst_fn))
2453 	  {
2454 	    if (vars == NULL)
2455 	      vars = new live_vars_map;
2456             vars->put (DECL_UID (var), cnt++);
2457 	  }
2458       }
2459   if (vars == NULL)
2460     return;
2461 
2462   vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2463   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2464     if (VAR_P (var))
2465       {
2466 	edge e;
2467 	edge_iterator ei;
2468 	bool needed = false;
2469 	unsigned int *v = vars->get (DECL_UID (var));
2470 	if (v == NULL)
2471 	  continue;
2472 	FOR_EACH_EDGE (e, ei, bb->preds)
2473 	  if ((e->flags & EDGE_EH) != 0
2474 	      && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2475 	    {
2476 	      basic_block src_bb = (basic_block) e->src->aux;
2477 
2478 	      if (bitmap_bit_p (&live[src_bb->index], *v))
2479 		{
2480 		  needed = true;
2481 		  break;
2482 		}
2483 	    }
2484 	if (needed)
2485 	  {
2486 	    tree new_var = *id->decl_map->get (var);
2487 	    gimple_stmt_iterator gsi = gsi_after_labels (bb);
2488 	    tree clobber = build_clobber (TREE_TYPE (new_var));
2489 	    gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2490 	    gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2491 	  }
2492       }
2493   destroy_live_vars (live);
2494   delete vars;
2495 }
2496 
2497 /* Copy edges from BB into its copy constructed earlier, scale profile
2498    accordingly.  Edges will be taken care of later.  Assume aux
2499    pointers to point to the copies of each BB.  Return true if any
2500    debug stmts are left after a statement that must end the basic block.  */
2501 
2502 static bool
2503 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2504 		   basic_block ret_bb, basic_block abnormal_goto_dest,
2505 		   copy_body_data *id)
2506 {
2507   basic_block new_bb = (basic_block) bb->aux;
2508   edge_iterator ei;
2509   edge old_edge;
2510   gimple_stmt_iterator si;
2511   bool need_debug_cleanup = false;
2512 
2513   /* Use the indices from the original blocks to create edges for the
2514      new ones.  */
2515   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2516     if (!(old_edge->flags & EDGE_EH))
2517       {
2518 	edge new_edge;
2519 	int flags = old_edge->flags;
2520 	location_t locus = old_edge->goto_locus;
2521 
2522 	/* Return edges do get a FALLTHRU flag when they get inlined.  */
2523 	if (old_edge->dest->index == EXIT_BLOCK
2524 	    && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2525 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2526 	  flags |= EDGE_FALLTHRU;
2527 
2528 	new_edge
2529 	  = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2530 	new_edge->probability = old_edge->probability;
2531 	if (!id->reset_location)
2532 	  new_edge->goto_locus = remap_location (locus, id);
2533       }
2534 
2535   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2536     return false;
2537 
2538   /* When doing function splitting, we must decrease count of the return block
2539      which was previously reachable by block we did not copy.  */
2540   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2541     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2542       if (old_edge->src->index != ENTRY_BLOCK
2543 	  && !old_edge->src->aux)
2544 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2545 
2546   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2547     {
2548       gimple *copy_stmt;
2549       bool can_throw, nonlocal_goto;
2550 
2551       copy_stmt = gsi_stmt (si);
2552       if (!is_gimple_debug (copy_stmt))
2553 	update_stmt (copy_stmt);
2554 
2555       /* Do this before the possible split_block.  */
2556       gsi_next (&si);
2557 
2558       /* If this tree could throw an exception, there are two
2559          cases where we need to add abnormal edge(s): the
2560          tree wasn't in a region and there is a "current
2561          region" in the caller; or the original tree had
2562          EH edges.  In both cases split the block after the tree,
2563          and add abnormal edge(s) as needed; we need both
2564          those from the callee and the caller.
2565          We check whether the copy can throw, because the const
2566          propagation can change an INDIRECT_REF which throws
2567          into a COMPONENT_REF which doesn't.  If the copy
2568          can throw, the original could also throw.  */
2569       can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2570       nonlocal_goto
2571 	= (stmt_can_make_abnormal_goto (copy_stmt)
2572 	   && !computed_goto_p (copy_stmt));
2573 
2574       if (can_throw || nonlocal_goto)
2575 	{
2576 	  if (!gsi_end_p (si))
2577 	    {
2578 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2579 		gsi_next (&si);
2580 	      if (gsi_end_p (si))
2581 		need_debug_cleanup = true;
2582 	    }
2583 	  if (!gsi_end_p (si))
2584 	    /* Note that bb's predecessor edges aren't necessarily
2585 	       right at this point; split_block doesn't care.  */
2586 	    {
2587 	      edge e = split_block (new_bb, copy_stmt);
2588 
2589 	      new_bb = e->dest;
2590 	      new_bb->aux = e->src->aux;
2591 	      si = gsi_start_bb (new_bb);
2592 	    }
2593 	}
2594 
2595       bool update_probs = false;
2596 
2597       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2598 	{
2599 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2600 	  update_probs = true;
2601 	}
2602       else if (can_throw)
2603 	{
2604 	  make_eh_edges (copy_stmt);
2605 	  update_probs = true;
2606 	}
2607 
2608       /* EH edges may not match old edges.  Copy as much as possible.  */
2609       if (update_probs)
2610 	{
2611           edge e;
2612           edge_iterator ei;
2613 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2614 
2615           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2616             if ((old_edge->flags & EDGE_EH)
2617 		&& (e = find_edge (copy_stmt_bb,
2618 				   (basic_block) old_edge->dest->aux))
2619 		&& (e->flags & EDGE_EH))
2620 	      e->probability = old_edge->probability;
2621 
2622           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2623 	    if (e->flags & EDGE_EH)
2624 	      {
2625 		if (!e->probability.initialized_p ())
2626 		  e->probability = profile_probability::never ();
2627 		if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2628 		  {
2629 		    if (id->eh_landing_pad_dest == NULL)
2630 		      id->eh_landing_pad_dest = e->dest;
2631 		    else
2632 		      gcc_assert (id->eh_landing_pad_dest == e->dest);
2633 		  }
2634 	      }
2635         }
2636 
2637 
2638       /* If the call we inline cannot make abnormal goto do not add
2639          additional abnormal edges but only retain those already present
2640 	 in the original function body.  */
2641       if (abnormal_goto_dest == NULL)
2642 	nonlocal_goto = false;
2643       if (nonlocal_goto)
2644 	{
2645 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2646 
2647 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2648 	    nonlocal_goto = false;
2649 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2650 	     in OpenMP regions which aren't allowed to be left abnormally.
2651 	     So, no need to add abnormal edge in that case.  */
2652 	  else if (is_gimple_call (copy_stmt)
2653 		   && gimple_call_internal_p (copy_stmt)
2654 		   && (gimple_call_internal_fn (copy_stmt)
2655 		       == IFN_ABNORMAL_DISPATCHER)
2656 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2657 	    nonlocal_goto = false;
2658 	  else
2659 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2660 				   EDGE_ABNORMAL);
2661 	}
2662 
2663       if ((can_throw || nonlocal_goto)
2664 	  && gimple_in_ssa_p (cfun))
2665 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2666 					  can_throw, nonlocal_goto);
2667     }
2668   return need_debug_cleanup;
2669 }
2670 
2671 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2672    was possibly split and new outgoing EH edges inserted.
2673    BB points to the block of original function and AUX pointers links
2674    the original and newly copied blocks.  */
2675 
2676 static void
2677 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2678 {
2679   basic_block const new_bb = (basic_block) bb->aux;
2680   edge_iterator ei;
2681   gphi *phi;
2682   gphi_iterator si;
2683   edge new_edge;
2684   bool inserted = false;
2685 
2686   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2687     {
2688       tree res, new_res;
2689       gphi *new_phi;
2690 
2691       phi = si.phi ();
2692       res = PHI_RESULT (phi);
2693       new_res = res;
2694       if (!virtual_operand_p (res))
2695 	{
2696 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2697 	  if (EDGE_COUNT (new_bb->preds) == 0)
2698 	    {
2699 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2700 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2701 	    }
2702 	  else
2703 	    {
2704 	      new_phi = create_phi_node (new_res, new_bb);
2705 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2706 		{
2707 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2708 					     bb);
2709 		  tree arg;
2710 		  tree new_arg;
2711 		  edge_iterator ei2;
2712 		  location_t locus;
2713 
2714 		  /* When doing partial cloning, we allow PHIs on the entry
2715 		     block as long as all the arguments are the same.
2716 		     Find any input edge to see argument to copy.  */
2717 		  if (!old_edge)
2718 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2719 		      if (!old_edge->src->aux)
2720 			break;
2721 
2722 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2723 		  new_arg = arg;
2724 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2725 		  gcc_assert (new_arg);
2726 		  /* With return slot optimization we can end up with
2727 		     non-gimple (foo *)&this->m, fix that here.  */
2728 		  if (TREE_CODE (new_arg) != SSA_NAME
2729 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2730 		      && !is_gimple_val (new_arg))
2731 		    {
2732 		      gimple_seq stmts = NULL;
2733 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2734 						      NULL);
2735 		      gsi_insert_seq_on_edge (new_edge, stmts);
2736 		      inserted = true;
2737 		    }
2738 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2739 		  if (id->reset_location)
2740 		    locus = input_location;
2741 		  else
2742 		    locus = remap_location (locus, id);
2743 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2744 		}
2745 	    }
2746 	}
2747     }
2748 
2749   /* Commit the delayed edge insertions.  */
2750   if (inserted)
2751     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2752       gsi_commit_one_edge_insert (new_edge, NULL);
2753 }
2754 
2755 
2756 /* Wrapper for remap_decl so it can be used as a callback.  */
2757 
2758 static tree
2759 remap_decl_1 (tree decl, void *data)
2760 {
2761   return remap_decl (decl, (copy_body_data *) data);
2762 }
2763 
2764 /* Build struct function and associated datastructures for the new clone
2765    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2766    the cfun to the function of new_fndecl (and current_function_decl too).  */
2767 
2768 static void
2769 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2770 {
2771   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2772 
2773   if (!DECL_ARGUMENTS (new_fndecl))
2774     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2775   if (!DECL_RESULT (new_fndecl))
2776     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2777 
2778   /* Register specific tree functions.  */
2779   gimple_register_cfg_hooks ();
2780 
2781   /* Get clean struct function.  */
2782   push_struct_function (new_fndecl);
2783 
2784   /* We will rebuild these, so just sanity check that they are empty.  */
2785   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2786   gcc_assert (cfun->local_decls == NULL);
2787   gcc_assert (cfun->cfg == NULL);
2788   gcc_assert (cfun->decl == new_fndecl);
2789 
2790   /* Copy items we preserve during cloning.  */
2791   cfun->static_chain_decl = src_cfun->static_chain_decl;
2792   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2793   cfun->function_end_locus = src_cfun->function_end_locus;
2794   cfun->curr_properties = src_cfun->curr_properties;
2795   cfun->last_verified = src_cfun->last_verified;
2796   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2797   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2798   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2799   cfun->calls_eh_return = src_cfun->calls_eh_return;
2800   cfun->stdarg = src_cfun->stdarg;
2801   cfun->after_inlining = src_cfun->after_inlining;
2802   cfun->can_throw_non_call_exceptions
2803     = src_cfun->can_throw_non_call_exceptions;
2804   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2805   cfun->returns_struct = src_cfun->returns_struct;
2806   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2807 
2808   init_empty_tree_cfg ();
2809 
2810   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2811 
2812   profile_count num = count;
2813   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2814   profile_count::adjust_for_ipa_scaling (&num, &den);
2815 
2816   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2817     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2818 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2819   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2820     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2821 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2822   if (src_cfun->eh)
2823     init_eh_for_function ();
2824 
2825   if (src_cfun->gimple_df)
2826     {
2827       init_tree_ssa (cfun);
2828       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2829       if (cfun->gimple_df->in_ssa_p)
2830 	init_ssa_operands (cfun);
2831     }
2832 }
2833 
2834 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2835    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2836    successor has multiple predecessors, reset them, otherwise keep
2837    their value.  */
2838 
2839 static void
2840 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2841 {
2842   edge e;
2843   edge_iterator ei;
2844   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2845 
2846   if (gsi_end_p (si)
2847       || gsi_one_before_end_p (si)
2848       || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2849 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2850     return;
2851 
2852   FOR_EACH_EDGE (e, ei, new_bb->succs)
2853     {
2854       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2855       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2856       while (is_gimple_debug (gsi_stmt (ssi)))
2857 	{
2858 	  gimple *stmt = gsi_stmt (ssi);
2859 	  gdebug *new_stmt;
2860 	  tree var;
2861 	  tree value;
2862 
2863 	  /* For the last edge move the debug stmts instead of copying
2864 	     them.  */
2865 	  if (ei_one_before_end_p (ei))
2866 	    {
2867 	      si = ssi;
2868 	      gsi_prev (&ssi);
2869 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2870 		{
2871 		  gimple_debug_bind_reset_value (stmt);
2872 		  gimple_set_location (stmt, UNKNOWN_LOCATION);
2873 		}
2874 	      gsi_remove (&si, false);
2875 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2876 	      continue;
2877 	    }
2878 
2879 	  if (gimple_debug_bind_p (stmt))
2880 	    {
2881 	      var = gimple_debug_bind_get_var (stmt);
2882 	      if (single_pred_p (e->dest))
2883 		{
2884 		  value = gimple_debug_bind_get_value (stmt);
2885 		  value = unshare_expr (value);
2886 		  new_stmt = gimple_build_debug_bind (var, value, stmt);
2887 		}
2888 	      else
2889 		new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2890 	    }
2891 	  else if (gimple_debug_source_bind_p (stmt))
2892 	    {
2893 	      var = gimple_debug_source_bind_get_var (stmt);
2894 	      value = gimple_debug_source_bind_get_value (stmt);
2895 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2896 	    }
2897 	  else if (gimple_debug_nonbind_marker_p (stmt))
2898 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2899 	  else
2900 	    gcc_unreachable ();
2901 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2902 	  id->debug_stmts.safe_push (new_stmt);
2903 	  gsi_prev (&ssi);
2904 	}
2905     }
2906 }
2907 
2908 /* Make a copy of the sub-loops of SRC_PARENT and place them
2909    as siblings of DEST_PARENT.  */
2910 
2911 static void
2912 copy_loops (copy_body_data *id,
2913 	    class loop *dest_parent, class loop *src_parent)
2914 {
2915   class loop *src_loop = src_parent->inner;
2916   while (src_loop)
2917     {
2918       if (!id->blocks_to_copy
2919 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2920 	{
2921 	  class loop *dest_loop = alloc_loop ();
2922 
2923 	  /* Assign the new loop its header and latch and associate
2924 	     those with the new loop.  */
2925 	  dest_loop->header = (basic_block)src_loop->header->aux;
2926 	  dest_loop->header->loop_father = dest_loop;
2927 	  if (src_loop->latch != NULL)
2928 	    {
2929 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2930 	      dest_loop->latch->loop_father = dest_loop;
2931 	    }
2932 
2933 	  /* Copy loop meta-data.  */
2934 	  copy_loop_info (src_loop, dest_loop);
2935 	  if (dest_loop->unroll)
2936 	    cfun->has_unroll = true;
2937 	  if (dest_loop->force_vectorize)
2938 	    cfun->has_force_vectorize_loops = true;
2939 	  if (id->src_cfun->last_clique != 0)
2940 	    dest_loop->owned_clique
2941 	      = remap_dependence_clique (id,
2942 					 src_loop->owned_clique
2943 					 ? src_loop->owned_clique : 1);
2944 
2945 	  /* Finally place it into the loop array and the loop tree.  */
2946 	  place_new_loop (cfun, dest_loop);
2947 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2948 
2949 	  if (src_loop->simduid)
2950 	    {
2951 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2952 	      cfun->has_simduid_loops = true;
2953 	    }
2954 
2955 	  /* Recurse.  */
2956 	  copy_loops (id, dest_loop, src_loop);
2957 	}
2958       src_loop = src_loop->next;
2959     }
2960 }
2961 
2962 /* Call redirect_call_stmt_to_callee on all calls in BB.  */
2963 
2964 void
2965 redirect_all_calls (copy_body_data * id, basic_block bb)
2966 {
2967   gimple_stmt_iterator si;
2968   gimple *last = last_stmt (bb);
2969   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2970     {
2971       gimple *stmt = gsi_stmt (si);
2972       if (is_gimple_call (stmt))
2973 	{
2974 	  tree old_lhs = gimple_call_lhs (stmt);
2975 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2976 	  if (edge)
2977 	    {
2978 	      gimple *new_stmt
2979 		= cgraph_edge::redirect_call_stmt_to_callee (edge);
2980 	      /* If IPA-SRA transformation, run as part of edge redirection,
2981 		 removed the LHS because it is unused, save it to
2982 		 killed_new_ssa_names so that we can prune it from debug
2983 		 statements.  */
2984 	      if (old_lhs
2985 		  && TREE_CODE (old_lhs) == SSA_NAME
2986 		  && !gimple_call_lhs (new_stmt))
2987 		{
2988 		  if (!id->killed_new_ssa_names)
2989 		    id->killed_new_ssa_names = new hash_set<tree> (16);
2990 		  id->killed_new_ssa_names->add (old_lhs);
2991 		}
2992 
2993 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2994 		gimple_purge_dead_eh_edges (bb);
2995 	    }
2996 	}
2997     }
2998 }
2999 
3000 /* Make a copy of the body of FN so that it can be inserted inline in
3001    another function.  Walks FN via CFG, returns new fndecl.  */
3002 
3003 static tree
3004 copy_cfg_body (copy_body_data * id,
3005 	       basic_block entry_block_map, basic_block exit_block_map,
3006 	       basic_block new_entry)
3007 {
3008   tree callee_fndecl = id->src_fn;
3009   /* Original cfun for the callee, doesn't change.  */
3010   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3011   struct function *cfun_to_copy;
3012   basic_block bb;
3013   tree new_fndecl = NULL;
3014   bool need_debug_cleanup = false;
3015   int last;
3016   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3017   profile_count num = entry_block_map->count;
3018 
3019   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3020 
3021   /* Register specific tree functions.  */
3022   gimple_register_cfg_hooks ();
3023 
3024   /* If we are inlining just region of the function, make sure to connect
3025      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
3026      part of loop, we must compute frequency and probability of
3027      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3028      probabilities of edges incoming from nonduplicated region.  */
3029   if (new_entry)
3030     {
3031       edge e;
3032       edge_iterator ei;
3033       den = profile_count::zero ();
3034 
3035       FOR_EACH_EDGE (e, ei, new_entry->preds)
3036 	if (!e->src->aux)
3037 	  den += e->count ();
3038       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3039     }
3040 
3041   profile_count::adjust_for_ipa_scaling (&num, &den);
3042 
3043   /* Must have a CFG here at this point.  */
3044   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3045 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
3046 
3047 
3048   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3049   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3050   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3051   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3052 
3053   /* Duplicate any exception-handling regions.  */
3054   if (cfun->eh)
3055     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3056 				       remap_decl_1, id);
3057 
3058   /* Use aux pointers to map the original blocks to copy.  */
3059   FOR_EACH_BB_FN (bb, cfun_to_copy)
3060     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3061       {
3062 	basic_block new_bb = copy_bb (id, bb, num, den);
3063 	bb->aux = new_bb;
3064 	new_bb->aux = bb;
3065 	new_bb->loop_father = entry_block_map->loop_father;
3066       }
3067 
3068   last = last_basic_block_for_fn (cfun);
3069 
3070   /* Now that we've duplicated the blocks, duplicate their edges.  */
3071   basic_block abnormal_goto_dest = NULL;
3072   if (id->call_stmt
3073       && stmt_can_make_abnormal_goto (id->call_stmt))
3074     {
3075       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3076 
3077       bb = gimple_bb (id->call_stmt);
3078       gsi_next (&gsi);
3079       if (gsi_end_p (gsi))
3080 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3081     }
3082   FOR_ALL_BB_FN (bb, cfun_to_copy)
3083     if (!id->blocks_to_copy
3084 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3085       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3086 					       abnormal_goto_dest, id);
3087 
3088   if (id->eh_landing_pad_dest)
3089     {
3090       add_clobbers_to_eh_landing_pad (id);
3091       id->eh_landing_pad_dest = NULL;
3092     }
3093 
3094   if (new_entry)
3095     {
3096       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3097 			  EDGE_FALLTHRU);
3098       e->probability = profile_probability::always ();
3099     }
3100 
3101   /* Duplicate the loop tree, if available and wanted.  */
3102   if (loops_for_fn (src_cfun) != NULL
3103       && current_loops != NULL)
3104     {
3105       copy_loops (id, entry_block_map->loop_father,
3106 		  get_loop (src_cfun, 0));
3107       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
3108       loops_state_set (LOOPS_NEED_FIXUP);
3109     }
3110 
3111   /* If the loop tree in the source function needed fixup, mark the
3112      destination loop tree for fixup, too.  */
3113   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3114     loops_state_set (LOOPS_NEED_FIXUP);
3115 
3116   if (gimple_in_ssa_p (cfun))
3117     FOR_ALL_BB_FN (bb, cfun_to_copy)
3118       if (!id->blocks_to_copy
3119 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3120 	copy_phis_for_bb (bb, id);
3121 
3122   FOR_ALL_BB_FN (bb, cfun_to_copy)
3123     if (bb->aux)
3124       {
3125 	if (need_debug_cleanup
3126 	    && bb->index != ENTRY_BLOCK
3127 	    && bb->index != EXIT_BLOCK)
3128 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3129 	/* Update call edge destinations.  This cannot be done before loop
3130 	   info is updated, because we may split basic blocks.  */
3131 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3132 	    && bb->index != ENTRY_BLOCK
3133 	    && bb->index != EXIT_BLOCK)
3134 	  redirect_all_calls (id, (basic_block)bb->aux);
3135 	((basic_block)bb->aux)->aux = NULL;
3136 	bb->aux = NULL;
3137       }
3138 
3139   /* Zero out AUX fields of newly created block during EH edge
3140      insertion. */
3141   for (; last < last_basic_block_for_fn (cfun); last++)
3142     {
3143       if (need_debug_cleanup)
3144 	maybe_move_debug_stmts_to_successors (id,
3145 					      BASIC_BLOCK_FOR_FN (cfun, last));
3146       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3147       /* Update call edge destinations.  This cannot be done before loop
3148 	 info is updated, because we may split basic blocks.  */
3149       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3150 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3151     }
3152   entry_block_map->aux = NULL;
3153   exit_block_map->aux = NULL;
3154 
3155   if (id->eh_map)
3156     {
3157       delete id->eh_map;
3158       id->eh_map = NULL;
3159     }
3160   if (id->dependence_map)
3161     {
3162       delete id->dependence_map;
3163       id->dependence_map = NULL;
3164     }
3165 
3166   return new_fndecl;
3167 }
3168 
3169 /* Copy the debug STMT using ID.  We deal with these statements in a
3170    special way: if any variable in their VALUE expression wasn't
3171    remapped yet, we won't remap it, because that would get decl uids
3172    out of sync, causing codegen differences between -g and -g0.  If
3173    this arises, we drop the VALUE expression altogether.  */
3174 
3175 static void
3176 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3177 {
3178   tree t, *n;
3179   struct walk_stmt_info wi;
3180 
3181   if (tree block = gimple_block (stmt))
3182     {
3183       n = id->decl_map->get (block);
3184       gimple_set_block (stmt, n ? *n : id->block);
3185     }
3186 
3187   if (gimple_debug_nonbind_marker_p (stmt))
3188     return;
3189 
3190   /* Remap all the operands in COPY.  */
3191   memset (&wi, 0, sizeof (wi));
3192   wi.info = id;
3193 
3194   processing_debug_stmt = 1;
3195 
3196   if (gimple_debug_source_bind_p (stmt))
3197     t = gimple_debug_source_bind_get_var (stmt);
3198   else if (gimple_debug_bind_p (stmt))
3199     t = gimple_debug_bind_get_var (stmt);
3200   else
3201     gcc_unreachable ();
3202 
3203   if (TREE_CODE (t) == PARM_DECL && id->debug_map
3204       && (n = id->debug_map->get (t)))
3205     {
3206       gcc_assert (VAR_P (*n));
3207       t = *n;
3208     }
3209   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3210     /* T is a non-localized variable.  */;
3211   else
3212     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3213 
3214   if (gimple_debug_bind_p (stmt))
3215     {
3216       gimple_debug_bind_set_var (stmt, t);
3217 
3218       if (gimple_debug_bind_has_value_p (stmt))
3219 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3220 		   remap_gimple_op_r, &wi, NULL);
3221 
3222       /* Punt if any decl couldn't be remapped.  */
3223       if (processing_debug_stmt < 0)
3224 	gimple_debug_bind_reset_value (stmt);
3225     }
3226   else if (gimple_debug_source_bind_p (stmt))
3227     {
3228       gimple_debug_source_bind_set_var (stmt, t);
3229       /* When inlining and source bind refers to one of the optimized
3230 	 away parameters, change the source bind into normal debug bind
3231 	 referring to the corresponding DEBUG_EXPR_DECL that should have
3232 	 been bound before the call stmt.  */
3233       t = gimple_debug_source_bind_get_value (stmt);
3234       if (t != NULL_TREE
3235 	  && TREE_CODE (t) == PARM_DECL
3236 	  && id->call_stmt)
3237 	{
3238 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3239 	  unsigned int i;
3240 	  if (debug_args != NULL)
3241 	    {
3242 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3243 		if ((**debug_args)[i] == DECL_ORIGIN (t)
3244 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3245 		  {
3246 		    t = (**debug_args)[i + 1];
3247 		    stmt->subcode = GIMPLE_DEBUG_BIND;
3248 		    gimple_debug_bind_set_value (stmt, t);
3249 		    break;
3250 		  }
3251 	    }
3252 	}
3253       if (gimple_debug_source_bind_p (stmt))
3254 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3255 		   remap_gimple_op_r, &wi, NULL);
3256     }
3257 
3258   processing_debug_stmt = 0;
3259 
3260   update_stmt (stmt);
3261 }
3262 
3263 /* Process deferred debug stmts.  In order to give values better odds
3264    of being successfully remapped, we delay the processing of debug
3265    stmts until all other stmts that might require remapping are
3266    processed.  */
3267 
3268 static void
3269 copy_debug_stmts (copy_body_data *id)
3270 {
3271   size_t i;
3272   gdebug *stmt;
3273 
3274   if (!id->debug_stmts.exists ())
3275     return;
3276 
3277   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3278     copy_debug_stmt (stmt, id);
3279 
3280   id->debug_stmts.release ();
3281 }
3282 
3283 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3284    another function.  */
3285 
3286 static tree
3287 copy_tree_body (copy_body_data *id)
3288 {
3289   tree fndecl = id->src_fn;
3290   tree body = DECL_SAVED_TREE (fndecl);
3291 
3292   walk_tree (&body, copy_tree_body_r, id, NULL);
3293 
3294   return body;
3295 }
3296 
3297 /* Make a copy of the body of FN so that it can be inserted inline in
3298    another function.  */
3299 
3300 static tree
3301 copy_body (copy_body_data *id,
3302 	   basic_block entry_block_map, basic_block exit_block_map,
3303 	   basic_block new_entry)
3304 {
3305   tree fndecl = id->src_fn;
3306   tree body;
3307 
3308   /* If this body has a CFG, walk CFG and copy.  */
3309   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3310   body = copy_cfg_body (id, entry_block_map, exit_block_map,
3311 			new_entry);
3312   copy_debug_stmts (id);
3313   delete id->killed_new_ssa_names;
3314   id->killed_new_ssa_names = NULL;
3315 
3316   return body;
3317 }
3318 
3319 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3320    defined in function FN, or of a data member thereof.  */
3321 
3322 static bool
3323 self_inlining_addr_expr (tree value, tree fn)
3324 {
3325   tree var;
3326 
3327   if (TREE_CODE (value) != ADDR_EXPR)
3328     return false;
3329 
3330   var = get_base_address (TREE_OPERAND (value, 0));
3331 
3332   return var && auto_var_in_fn_p (var, fn);
3333 }
3334 
3335 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3336    lexical block and line number information from base_stmt, if given,
3337    or from the last stmt of the block otherwise.  */
3338 
3339 static gimple *
3340 insert_init_debug_bind (copy_body_data *id,
3341 			basic_block bb, tree var, tree value,
3342 			gimple *base_stmt)
3343 {
3344   gimple *note;
3345   gimple_stmt_iterator gsi;
3346   tree tracked_var;
3347 
3348   if (!gimple_in_ssa_p (id->src_cfun))
3349     return NULL;
3350 
3351   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3352     return NULL;
3353 
3354   tracked_var = target_for_debug_bind (var);
3355   if (!tracked_var)
3356     return NULL;
3357 
3358   if (bb)
3359     {
3360       gsi = gsi_last_bb (bb);
3361       if (!base_stmt && !gsi_end_p (gsi))
3362 	base_stmt = gsi_stmt (gsi);
3363     }
3364 
3365   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3366 
3367   if (bb)
3368     {
3369       if (!gsi_end_p (gsi))
3370 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3371       else
3372 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3373     }
3374 
3375   return note;
3376 }
3377 
3378 static void
3379 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3380 {
3381   /* If VAR represents a zero-sized variable, it's possible that the
3382      assignment statement may result in no gimple statements.  */
3383   if (init_stmt)
3384     {
3385       gimple_stmt_iterator si = gsi_last_bb (bb);
3386 
3387       /* We can end up with init statements that store to a non-register
3388          from a rhs with a conversion.  Handle that here by forcing the
3389 	 rhs into a temporary.  gimple_regimplify_operands is not
3390 	 prepared to do this for us.  */
3391       if (!is_gimple_debug (init_stmt)
3392 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3393 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3394 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3395 	{
3396 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3397 			     gimple_expr_type (init_stmt),
3398 			     gimple_assign_rhs1 (init_stmt));
3399 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3400 					  GSI_NEW_STMT);
3401 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3402 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3403 	}
3404       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3405       if (!is_gimple_debug (init_stmt))
3406 	{
3407 	  gimple_regimplify_operands (init_stmt, &si);
3408 
3409 	  tree def = gimple_assign_lhs (init_stmt);
3410 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3411 	}
3412     }
3413 }
3414 
3415 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3416    if need be (which should only be necessary for invalid programs).  Attempt
3417    to convert VAL to TYPE and return the result if it is possible, just return
3418    a zero constant of the given type if it fails.  */
3419 
3420 tree
3421 force_value_to_type (tree type, tree value)
3422 {
3423   /* If we can match up types by promotion/demotion do so.  */
3424   if (fold_convertible_p (type, value))
3425     return fold_convert (type, value);
3426 
3427   /* ???  For valid programs we should not end up here.
3428      Still if we end up with truly mismatched types here, fall back
3429      to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3430      GIMPLE to the following passes.  */
3431   if (!is_gimple_reg_type (TREE_TYPE (value))
3432 	   || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3433     return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3434   else
3435     return build_zero_cst (type);
3436 }
3437 
3438 /* Initialize parameter P with VALUE.  If needed, produce init statement
3439    at the end of BB.  When BB is NULL, we return init statement to be
3440    output later.  */
3441 static gimple *
3442 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3443 		     basic_block bb, tree *vars)
3444 {
3445   gimple *init_stmt = NULL;
3446   tree var;
3447   tree rhs = value;
3448   tree def = (gimple_in_ssa_p (cfun)
3449 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3450 
3451   if (value
3452       && value != error_mark_node
3453       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3454     rhs = force_value_to_type (TREE_TYPE (p), value);
3455 
3456   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3457      here since the type of this decl must be visible to the calling
3458      function.  */
3459   var = copy_decl_to_var (p, id);
3460 
3461   /* Declare this new variable.  */
3462   DECL_CHAIN (var) = *vars;
3463   *vars = var;
3464 
3465   /* Make gimplifier happy about this variable.  */
3466   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3467 
3468   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3469      we would not need to create a new variable here at all, if it
3470      weren't for debug info.  Still, we can just use the argument
3471      value.  */
3472   if (TREE_READONLY (p)
3473       && !TREE_ADDRESSABLE (p)
3474       && value && !TREE_SIDE_EFFECTS (value)
3475       && !def)
3476     {
3477       /* We may produce non-gimple trees by adding NOPs or introduce
3478 	 invalid sharing when operand is not really constant.
3479 	 It is not big deal to prohibit constant propagation here as
3480 	 we will constant propagate in DOM1 pass anyway.  */
3481       if (is_gimple_min_invariant (value)
3482 	  && useless_type_conversion_p (TREE_TYPE (p),
3483 						 TREE_TYPE (value))
3484 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3485 	     the base variable isn't a local variable of the inlined
3486 	     function, e.g., when doing recursive inlining, direct or
3487 	     mutually-recursive or whatever, which is why we don't
3488 	     just test whether fn == current_function_decl.  */
3489 	  && ! self_inlining_addr_expr (value, fn))
3490 	{
3491 	  insert_decl_map (id, p, value);
3492 	  insert_debug_decl_map (id, p, var);
3493 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3494 	}
3495     }
3496 
3497   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3498      that way, when the PARM_DECL is encountered, it will be
3499      automatically replaced by the VAR_DECL.  */
3500   insert_decl_map (id, p, var);
3501 
3502   /* Even if P was TREE_READONLY, the new VAR should not be.
3503      In the original code, we would have constructed a
3504      temporary, and then the function body would have never
3505      changed the value of P.  However, now, we will be
3506      constructing VAR directly.  The constructor body may
3507      change its value multiple times as it is being
3508      constructed.  Therefore, it must not be TREE_READONLY;
3509      the back-end assumes that TREE_READONLY variable is
3510      assigned to only once.  */
3511   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3512     TREE_READONLY (var) = 0;
3513 
3514   /* If there is no setup required and we are in SSA, take the easy route
3515      replacing all SSA names representing the function parameter by the
3516      SSA name passed to function.
3517 
3518      We need to construct map for the variable anyway as it might be used
3519      in different SSA names when parameter is set in function.
3520 
3521      Do replacement at -O0 for const arguments replaced by constant.
3522      This is important for builtin_constant_p and other construct requiring
3523      constant argument to be visible in inlined function body.  */
3524   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3525       && (optimize
3526           || (TREE_READONLY (p)
3527 	      && is_gimple_min_invariant (rhs)))
3528       && (TREE_CODE (rhs) == SSA_NAME
3529 	  || is_gimple_min_invariant (rhs))
3530       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3531     {
3532       insert_decl_map (id, def, rhs);
3533       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3534     }
3535 
3536   /* If the value of argument is never used, don't care about initializing
3537      it.  */
3538   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3539     {
3540       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3541       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3542     }
3543 
3544   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3545      the argument to the proper type in case it was promoted.  */
3546   if (value)
3547     {
3548       if (rhs == error_mark_node)
3549 	{
3550 	  insert_decl_map (id, p, var);
3551 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3552 	}
3553 
3554       STRIP_USELESS_TYPE_CONVERSION (rhs);
3555 
3556       /* If we are in SSA form properly remap the default definition
3557          or assign to a dummy SSA name if the parameter is unused and
3558 	 we are not optimizing.  */
3559       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3560 	{
3561 	  if (def)
3562 	    {
3563 	      def = remap_ssa_name (def, id);
3564 	      init_stmt = gimple_build_assign (def, rhs);
3565 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3566 	      set_ssa_default_def (cfun, var, NULL);
3567 	    }
3568 	  else if (!optimize)
3569 	    {
3570 	      def = make_ssa_name (var);
3571 	      init_stmt = gimple_build_assign (def, rhs);
3572 	    }
3573 	}
3574       else
3575         init_stmt = gimple_build_assign (var, rhs);
3576 
3577       if (bb && init_stmt)
3578         insert_init_stmt (id, bb, init_stmt);
3579     }
3580   return init_stmt;
3581 }
3582 
3583 /* Generate code to initialize the parameters of the function at the
3584    top of the stack in ID from the GIMPLE_CALL STMT.  */
3585 
3586 static void
3587 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3588 			       tree fn, basic_block bb)
3589 {
3590   tree parms;
3591   size_t i;
3592   tree p;
3593   tree vars = NULL_TREE;
3594   tree static_chain = gimple_call_chain (stmt);
3595 
3596   /* Figure out what the parameters are.  */
3597   parms = DECL_ARGUMENTS (fn);
3598 
3599   /* Loop through the parameter declarations, replacing each with an
3600      equivalent VAR_DECL, appropriately initialized.  */
3601   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3602     {
3603       tree val;
3604       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3605       setup_one_parameter (id, p, val, fn, bb, &vars);
3606     }
3607   /* After remapping parameters remap their types.  This has to be done
3608      in a second loop over all parameters to appropriately remap
3609      variable sized arrays when the size is specified in a
3610      parameter following the array.  */
3611   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3612     {
3613       tree *varp = id->decl_map->get (p);
3614       if (varp && VAR_P (*varp))
3615 	{
3616 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3617 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3618 	  tree var = *varp;
3619 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3620 	  /* Also remap the default definition if it was remapped
3621 	     to the default definition of the parameter replacement
3622 	     by the parameter setup.  */
3623 	  if (def)
3624 	    {
3625 	      tree *defp = id->decl_map->get (def);
3626 	      if (defp
3627 		  && TREE_CODE (*defp) == SSA_NAME
3628 		  && SSA_NAME_VAR (*defp) == var)
3629 		TREE_TYPE (*defp) = TREE_TYPE (var);
3630 	    }
3631 	}
3632     }
3633 
3634   /* Initialize the static chain.  */
3635   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3636   gcc_assert (fn != current_function_decl);
3637   if (p)
3638     {
3639       /* No static chain?  Seems like a bug in tree-nested.c.  */
3640       gcc_assert (static_chain);
3641 
3642       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3643     }
3644 
3645   declare_inline_vars (id->block, vars);
3646 }
3647 
3648 
3649 /* Declare a return variable to replace the RESULT_DECL for the
3650    function we are calling.  An appropriate DECL_STMT is returned.
3651    The USE_STMT is filled to contain a use of the declaration to
3652    indicate the return value of the function.
3653 
3654    RETURN_SLOT, if non-null is place where to store the result.  It
3655    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3656    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3657 
3658    The return value is a (possibly null) value that holds the result
3659    as seen by the caller.  */
3660 
3661 static tree
3662 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3663 			 basic_block entry_bb)
3664 {
3665   tree callee = id->src_fn;
3666   tree result = DECL_RESULT (callee);
3667   tree callee_type = TREE_TYPE (result);
3668   tree caller_type;
3669   tree var, use;
3670 
3671   /* Handle type-mismatches in the function declaration return type
3672      vs. the call expression.  */
3673   if (modify_dest)
3674     caller_type = TREE_TYPE (modify_dest);
3675   else if (return_slot)
3676     caller_type = TREE_TYPE (return_slot);
3677   else /* No LHS on the call.  */
3678     caller_type = TREE_TYPE (TREE_TYPE (callee));
3679 
3680   /* We don't need to do anything for functions that don't return anything.  */
3681   if (VOID_TYPE_P (callee_type))
3682     return NULL_TREE;
3683 
3684   /* If there was a return slot, then the return value is the
3685      dereferenced address of that object.  */
3686   if (return_slot)
3687     {
3688       /* The front end shouldn't have used both return_slot and
3689 	 a modify expression.  */
3690       gcc_assert (!modify_dest);
3691       if (DECL_BY_REFERENCE (result))
3692 	{
3693 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3694 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3695 
3696 	  /* We are going to construct *&return_slot and we can't do that
3697 	     for variables believed to be not addressable.
3698 
3699 	     FIXME: This check possibly can match, because values returned
3700 	     via return slot optimization are not believed to have address
3701 	     taken by alias analysis.  */
3702 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3703 	  var = return_slot_addr;
3704 	  mark_addressable (return_slot);
3705 	}
3706       else
3707 	{
3708 	  var = return_slot;
3709 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3710 	  if (TREE_ADDRESSABLE (result))
3711 	    mark_addressable (var);
3712 	}
3713       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3714            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3715 	  && !DECL_GIMPLE_REG_P (result)
3716 	  && DECL_P (var))
3717 	DECL_GIMPLE_REG_P (var) = 0;
3718 
3719       if (!useless_type_conversion_p (callee_type, caller_type))
3720 	var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3721 
3722       use = NULL;
3723       goto done;
3724     }
3725 
3726   /* All types requiring non-trivial constructors should have been handled.  */
3727   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3728 
3729   /* Attempt to avoid creating a new temporary variable.  */
3730   if (modify_dest
3731       && TREE_CODE (modify_dest) != SSA_NAME)
3732     {
3733       bool use_it = false;
3734 
3735       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3736       if (!useless_type_conversion_p (callee_type, caller_type))
3737 	use_it = false;
3738 
3739       /* ??? If we're assigning to a variable sized type, then we must
3740 	 reuse the destination variable, because we've no good way to
3741 	 create variable sized temporaries at this point.  */
3742       else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3743 	use_it = true;
3744 
3745       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3746 	 reuse it as the result of the call directly.  Don't do this if
3747 	 it would promote MODIFY_DEST to addressable.  */
3748       else if (TREE_ADDRESSABLE (result))
3749 	use_it = false;
3750       else
3751 	{
3752 	  tree base_m = get_base_address (modify_dest);
3753 
3754 	  /* If the base isn't a decl, then it's a pointer, and we don't
3755 	     know where that's going to go.  */
3756 	  if (!DECL_P (base_m))
3757 	    use_it = false;
3758 	  else if (is_global_var (base_m))
3759 	    use_it = false;
3760 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3761 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3762 		   && !DECL_GIMPLE_REG_P (result)
3763 		   && DECL_GIMPLE_REG_P (base_m))
3764 	    use_it = false;
3765 	  else if (!TREE_ADDRESSABLE (base_m))
3766 	    use_it = true;
3767 	}
3768 
3769       if (use_it)
3770 	{
3771 	  var = modify_dest;
3772 	  use = NULL;
3773 	  goto done;
3774 	}
3775     }
3776 
3777   gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3778 
3779   var = copy_result_decl_to_var (result, id);
3780   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3781 
3782   /* Do not have the rest of GCC warn about this variable as it should
3783      not be visible to the user.  */
3784   TREE_NO_WARNING (var) = 1;
3785 
3786   declare_inline_vars (id->block, var);
3787 
3788   /* Build the use expr.  If the return type of the function was
3789      promoted, convert it back to the expected type.  */
3790   use = var;
3791   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3792     {
3793       /* If we can match up types by promotion/demotion do so.  */
3794       if (fold_convertible_p (caller_type, var))
3795 	use = fold_convert (caller_type, var);
3796       else
3797 	{
3798 	  /* ???  For valid programs we should not end up here.
3799 	     Still if we end up with truly mismatched types here, fall back
3800 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3801 	     passes.  */
3802 	  /* Prevent var from being written into SSA form.  */
3803 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3804 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3805 	    DECL_GIMPLE_REG_P (var) = false;
3806 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3807 	    TREE_ADDRESSABLE (var) = true;
3808 	  use = fold_build2 (MEM_REF, caller_type,
3809 			     build_fold_addr_expr (var),
3810 			     build_int_cst (ptr_type_node, 0));
3811 	}
3812     }
3813 
3814   STRIP_USELESS_TYPE_CONVERSION (use);
3815 
3816   if (DECL_BY_REFERENCE (result))
3817     {
3818       TREE_ADDRESSABLE (var) = 1;
3819       var = build_fold_addr_expr (var);
3820     }
3821 
3822  done:
3823   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3824      way, when the RESULT_DECL is encountered, it will be
3825      automatically replaced by the VAR_DECL.
3826 
3827      When returning by reference, ensure that RESULT_DECL remaps to
3828      gimple_val.  */
3829   if (DECL_BY_REFERENCE (result)
3830       && !is_gimple_val (var))
3831     {
3832       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3833       insert_decl_map (id, result, temp);
3834       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3835 	 it's default_def SSA_NAME.  */
3836       if (gimple_in_ssa_p (id->src_cfun)
3837 	  && is_gimple_reg (result))
3838 	{
3839 	  temp = make_ssa_name (temp);
3840 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3841 	}
3842       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3843     }
3844   else
3845     insert_decl_map (id, result, var);
3846 
3847   /* Remember this so we can ignore it in remap_decls.  */
3848   id->retvar = var;
3849   return use;
3850 }
3851 
3852 /* Determine if the function can be copied.  If so return NULL.  If
3853    not return a string describng the reason for failure.  */
3854 
3855 const char *
3856 copy_forbidden (struct function *fun)
3857 {
3858   const char *reason = fun->cannot_be_copied_reason;
3859 
3860   /* Only examine the function once.  */
3861   if (fun->cannot_be_copied_set)
3862     return reason;
3863 
3864   /* We cannot copy a function that receives a non-local goto
3865      because we cannot remap the destination label used in the
3866      function that is performing the non-local goto.  */
3867   /* ??? Actually, this should be possible, if we work at it.
3868      No doubt there's just a handful of places that simply
3869      assume it doesn't happen and don't substitute properly.  */
3870   if (fun->has_nonlocal_label)
3871     {
3872       reason = G_("function %q+F can never be copied "
3873 		  "because it receives a non-local goto");
3874       goto fail;
3875     }
3876 
3877   if (fun->has_forced_label_in_static)
3878     {
3879       reason = G_("function %q+F can never be copied because it saves "
3880 		  "address of local label in a static variable");
3881       goto fail;
3882     }
3883 
3884  fail:
3885   fun->cannot_be_copied_reason = reason;
3886   fun->cannot_be_copied_set = true;
3887   return reason;
3888 }
3889 
3890 
3891 static const char *inline_forbidden_reason;
3892 
3893 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3894    iff a function cannot be inlined.  Also sets the reason why. */
3895 
3896 static tree
3897 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3898 			 struct walk_stmt_info *wip)
3899 {
3900   tree fn = (tree) wip->info;
3901   tree t;
3902   gimple *stmt = gsi_stmt (*gsi);
3903 
3904   switch (gimple_code (stmt))
3905     {
3906     case GIMPLE_CALL:
3907       /* Refuse to inline alloca call unless user explicitly forced so as
3908 	 this may change program's memory overhead drastically when the
3909 	 function using alloca is called in loop.  In GCC present in
3910 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3911 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3912 	 VLA objects as those can't cause unbounded growth (they're always
3913 	 wrapped inside stack_save/stack_restore regions.  */
3914       if (gimple_maybe_alloca_call_p (stmt)
3915 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3916 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3917 	{
3918 	  inline_forbidden_reason
3919 	    = G_("function %q+F can never be inlined because it uses "
3920 		 "alloca (override using the always_inline attribute)");
3921 	  *handled_ops_p = true;
3922 	  return fn;
3923 	}
3924 
3925       t = gimple_call_fndecl (stmt);
3926       if (t == NULL_TREE)
3927 	break;
3928 
3929       /* We cannot inline functions that call setjmp.  */
3930       if (setjmp_call_p (t))
3931 	{
3932 	  inline_forbidden_reason
3933 	    = G_("function %q+F can never be inlined because it uses setjmp");
3934 	  *handled_ops_p = true;
3935 	  return t;
3936 	}
3937 
3938       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3939 	switch (DECL_FUNCTION_CODE (t))
3940 	  {
3941 	    /* We cannot inline functions that take a variable number of
3942 	       arguments.  */
3943 	  case BUILT_IN_VA_START:
3944 	  case BUILT_IN_NEXT_ARG:
3945 	  case BUILT_IN_VA_END:
3946 	    inline_forbidden_reason
3947 	      = G_("function %q+F can never be inlined because it "
3948 		   "uses variable argument lists");
3949 	    *handled_ops_p = true;
3950 	    return t;
3951 
3952 	  case BUILT_IN_LONGJMP:
3953 	    /* We can't inline functions that call __builtin_longjmp at
3954 	       all.  The non-local goto machinery really requires the
3955 	       destination be in a different function.  If we allow the
3956 	       function calling __builtin_longjmp to be inlined into the
3957 	       function calling __builtin_setjmp, Things will Go Awry.  */
3958 	    inline_forbidden_reason
3959 	      = G_("function %q+F can never be inlined because "
3960 		   "it uses setjmp-longjmp exception handling");
3961 	    *handled_ops_p = true;
3962 	    return t;
3963 
3964 	  case BUILT_IN_NONLOCAL_GOTO:
3965 	    /* Similarly.  */
3966 	    inline_forbidden_reason
3967 	      = G_("function %q+F can never be inlined because "
3968 		   "it uses non-local goto");
3969 	    *handled_ops_p = true;
3970 	    return t;
3971 
3972 	  case BUILT_IN_RETURN:
3973 	  case BUILT_IN_APPLY_ARGS:
3974 	    /* If a __builtin_apply_args caller would be inlined,
3975 	       it would be saving arguments of the function it has
3976 	       been inlined into.  Similarly __builtin_return would
3977 	       return from the function the inline has been inlined into.  */
3978 	    inline_forbidden_reason
3979 	      = G_("function %q+F can never be inlined because "
3980 		   "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3981 	    *handled_ops_p = true;
3982 	    return t;
3983 
3984 	  default:
3985 	    break;
3986 	  }
3987       break;
3988 
3989     case GIMPLE_GOTO:
3990       t = gimple_goto_dest (stmt);
3991 
3992       /* We will not inline a function which uses computed goto.  The
3993 	 addresses of its local labels, which may be tucked into
3994 	 global storage, are of course not constant across
3995 	 instantiations, which causes unexpected behavior.  */
3996       if (TREE_CODE (t) != LABEL_DECL)
3997 	{
3998 	  inline_forbidden_reason
3999 	    = G_("function %q+F can never be inlined "
4000 		 "because it contains a computed goto");
4001 	  *handled_ops_p = true;
4002 	  return t;
4003 	}
4004       break;
4005 
4006     default:
4007       break;
4008     }
4009 
4010   *handled_ops_p = false;
4011   return NULL_TREE;
4012 }
4013 
4014 /* Return true if FNDECL is a function that cannot be inlined into
4015    another one.  */
4016 
4017 static bool
4018 inline_forbidden_p (tree fndecl)
4019 {
4020   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4021   struct walk_stmt_info wi;
4022   basic_block bb;
4023   bool forbidden_p = false;
4024 
4025   /* First check for shared reasons not to copy the code.  */
4026   inline_forbidden_reason = copy_forbidden (fun);
4027   if (inline_forbidden_reason != NULL)
4028     return true;
4029 
4030   /* Next, walk the statements of the function looking for
4031      constraucts we can't handle, or are non-optimal for inlining.  */
4032   hash_set<tree> visited_nodes;
4033   memset (&wi, 0, sizeof (wi));
4034   wi.info = (void *) fndecl;
4035   wi.pset = &visited_nodes;
4036 
4037   FOR_EACH_BB_FN (bb, fun)
4038     {
4039       gimple *ret;
4040       gimple_seq seq = bb_seq (bb);
4041       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4042       forbidden_p = (ret != NULL);
4043       if (forbidden_p)
4044 	break;
4045     }
4046 
4047   return forbidden_p;
4048 }
4049 
4050 /* Return false if the function FNDECL cannot be inlined on account of its
4051    attributes, true otherwise.  */
4052 static bool
4053 function_attribute_inlinable_p (const_tree fndecl)
4054 {
4055   if (targetm.attribute_table)
4056     {
4057       const_tree a;
4058 
4059       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4060 	{
4061 	  const_tree name = get_attribute_name (a);
4062 	  int i;
4063 
4064 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4065 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
4066 	      return targetm.function_attribute_inlinable_p (fndecl);
4067 	}
4068     }
4069 
4070   return true;
4071 }
4072 
4073 /* Returns nonzero if FN is a function that does not have any
4074    fundamental inline blocking properties.  */
4075 
4076 bool
4077 tree_inlinable_function_p (tree fn)
4078 {
4079   bool inlinable = true;
4080   bool do_warning;
4081   tree always_inline;
4082 
4083   /* If we've already decided this function shouldn't be inlined,
4084      there's no need to check again.  */
4085   if (DECL_UNINLINABLE (fn))
4086     return false;
4087 
4088   /* We only warn for functions declared `inline' by the user.  */
4089   do_warning = (opt_for_fn (fn, warn_inline)
4090 		&& DECL_DECLARED_INLINE_P (fn)
4091 		&& !DECL_NO_INLINE_WARNING_P (fn)
4092 		&& !DECL_IN_SYSTEM_HEADER (fn));
4093 
4094   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4095 
4096   if (flag_no_inline
4097       && always_inline == NULL)
4098     {
4099       if (do_warning)
4100 	warning (OPT_Winline, "function %q+F can never be inlined because it "
4101 		 "is suppressed using %<-fno-inline%>", fn);
4102       inlinable = false;
4103     }
4104 
4105   else if (!function_attribute_inlinable_p (fn))
4106     {
4107       if (do_warning)
4108         warning (OPT_Winline, "function %q+F can never be inlined because it "
4109                  "uses attributes conflicting with inlining", fn);
4110       inlinable = false;
4111     }
4112 
4113   else if (inline_forbidden_p (fn))
4114     {
4115       /* See if we should warn about uninlinable functions.  Previously,
4116 	 some of these warnings would be issued while trying to expand
4117 	 the function inline, but that would cause multiple warnings
4118 	 about functions that would for example call alloca.  But since
4119 	 this a property of the function, just one warning is enough.
4120 	 As a bonus we can now give more details about the reason why a
4121 	 function is not inlinable.  */
4122       if (always_inline)
4123 	error (inline_forbidden_reason, fn);
4124       else if (do_warning)
4125 	warning (OPT_Winline, inline_forbidden_reason, fn);
4126 
4127       inlinable = false;
4128     }
4129 
4130   /* Squirrel away the result so that we don't have to check again.  */
4131   DECL_UNINLINABLE (fn) = !inlinable;
4132 
4133   return inlinable;
4134 }
4135 
4136 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
4137    word size and take possible memcpy call into account and return
4138    cost based on whether optimizing for size or speed according to SPEED_P.  */
4139 
4140 int
4141 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4142 {
4143   HOST_WIDE_INT size;
4144 
4145   gcc_assert (!VOID_TYPE_P (type));
4146 
4147   if (TREE_CODE (type) == VECTOR_TYPE)
4148     {
4149       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4150       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4151       int orig_mode_size
4152 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4153       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4154       return ((orig_mode_size + simd_mode_size - 1)
4155 	      / simd_mode_size);
4156     }
4157 
4158   size = int_size_in_bytes (type);
4159 
4160   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4161     /* Cost of a memcpy call, 3 arguments and the call.  */
4162     return 4;
4163   else
4164     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4165 }
4166 
4167 /* Returns cost of operation CODE, according to WEIGHTS  */
4168 
4169 static int
4170 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4171 			tree op1 ATTRIBUTE_UNUSED, tree op2)
4172 {
4173   switch (code)
4174     {
4175     /* These are "free" conversions, or their presumed cost
4176        is folded into other operations.  */
4177     case RANGE_EXPR:
4178     CASE_CONVERT:
4179     case COMPLEX_EXPR:
4180     case PAREN_EXPR:
4181     case VIEW_CONVERT_EXPR:
4182       return 0;
4183 
4184     /* Assign cost of 1 to usual operations.
4185        ??? We may consider mapping RTL costs to this.  */
4186     case COND_EXPR:
4187     case VEC_COND_EXPR:
4188     case VEC_PERM_EXPR:
4189 
4190     case PLUS_EXPR:
4191     case POINTER_PLUS_EXPR:
4192     case POINTER_DIFF_EXPR:
4193     case MINUS_EXPR:
4194     case MULT_EXPR:
4195     case MULT_HIGHPART_EXPR:
4196 
4197     case ADDR_SPACE_CONVERT_EXPR:
4198     case FIXED_CONVERT_EXPR:
4199     case FIX_TRUNC_EXPR:
4200 
4201     case NEGATE_EXPR:
4202     case FLOAT_EXPR:
4203     case MIN_EXPR:
4204     case MAX_EXPR:
4205     case ABS_EXPR:
4206     case ABSU_EXPR:
4207 
4208     case LSHIFT_EXPR:
4209     case RSHIFT_EXPR:
4210     case LROTATE_EXPR:
4211     case RROTATE_EXPR:
4212 
4213     case BIT_IOR_EXPR:
4214     case BIT_XOR_EXPR:
4215     case BIT_AND_EXPR:
4216     case BIT_NOT_EXPR:
4217 
4218     case TRUTH_ANDIF_EXPR:
4219     case TRUTH_ORIF_EXPR:
4220     case TRUTH_AND_EXPR:
4221     case TRUTH_OR_EXPR:
4222     case TRUTH_XOR_EXPR:
4223     case TRUTH_NOT_EXPR:
4224 
4225     case LT_EXPR:
4226     case LE_EXPR:
4227     case GT_EXPR:
4228     case GE_EXPR:
4229     case EQ_EXPR:
4230     case NE_EXPR:
4231     case ORDERED_EXPR:
4232     case UNORDERED_EXPR:
4233 
4234     case UNLT_EXPR:
4235     case UNLE_EXPR:
4236     case UNGT_EXPR:
4237     case UNGE_EXPR:
4238     case UNEQ_EXPR:
4239     case LTGT_EXPR:
4240 
4241     case CONJ_EXPR:
4242 
4243     case PREDECREMENT_EXPR:
4244     case PREINCREMENT_EXPR:
4245     case POSTDECREMENT_EXPR:
4246     case POSTINCREMENT_EXPR:
4247 
4248     case REALIGN_LOAD_EXPR:
4249 
4250     case WIDEN_SUM_EXPR:
4251     case WIDEN_MULT_EXPR:
4252     case DOT_PROD_EXPR:
4253     case SAD_EXPR:
4254     case WIDEN_MULT_PLUS_EXPR:
4255     case WIDEN_MULT_MINUS_EXPR:
4256     case WIDEN_LSHIFT_EXPR:
4257 
4258     case VEC_WIDEN_MULT_HI_EXPR:
4259     case VEC_WIDEN_MULT_LO_EXPR:
4260     case VEC_WIDEN_MULT_EVEN_EXPR:
4261     case VEC_WIDEN_MULT_ODD_EXPR:
4262     case VEC_UNPACK_HI_EXPR:
4263     case VEC_UNPACK_LO_EXPR:
4264     case VEC_UNPACK_FLOAT_HI_EXPR:
4265     case VEC_UNPACK_FLOAT_LO_EXPR:
4266     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4267     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4268     case VEC_PACK_TRUNC_EXPR:
4269     case VEC_PACK_SAT_EXPR:
4270     case VEC_PACK_FIX_TRUNC_EXPR:
4271     case VEC_PACK_FLOAT_EXPR:
4272     case VEC_WIDEN_LSHIFT_HI_EXPR:
4273     case VEC_WIDEN_LSHIFT_LO_EXPR:
4274     case VEC_DUPLICATE_EXPR:
4275     case VEC_SERIES_EXPR:
4276 
4277       return 1;
4278 
4279     /* Few special cases of expensive operations.  This is useful
4280        to avoid inlining on functions having too many of these.  */
4281     case TRUNC_DIV_EXPR:
4282     case CEIL_DIV_EXPR:
4283     case FLOOR_DIV_EXPR:
4284     case ROUND_DIV_EXPR:
4285     case EXACT_DIV_EXPR:
4286     case TRUNC_MOD_EXPR:
4287     case CEIL_MOD_EXPR:
4288     case FLOOR_MOD_EXPR:
4289     case ROUND_MOD_EXPR:
4290     case RDIV_EXPR:
4291       if (TREE_CODE (op2) != INTEGER_CST)
4292         return weights->div_mod_cost;
4293       return 1;
4294 
4295     /* Bit-field insertion needs several shift and mask operations.  */
4296     case BIT_INSERT_EXPR:
4297       return 3;
4298 
4299     default:
4300       /* We expect a copy assignment with no operator.  */
4301       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4302       return 0;
4303     }
4304 }
4305 
4306 
4307 /* Estimate number of instructions that will be created by expanding
4308    the statements in the statement sequence STMTS.
4309    WEIGHTS contains weights attributed to various constructs.  */
4310 
4311 int
4312 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4313 {
4314   int cost;
4315   gimple_stmt_iterator gsi;
4316 
4317   cost = 0;
4318   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4319     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4320 
4321   return cost;
4322 }
4323 
4324 
4325 /* Estimate number of instructions that will be created by expanding STMT.
4326    WEIGHTS contains weights attributed to various constructs.  */
4327 
4328 int
4329 estimate_num_insns (gimple *stmt, eni_weights *weights)
4330 {
4331   unsigned cost, i;
4332   enum gimple_code code = gimple_code (stmt);
4333   tree lhs;
4334   tree rhs;
4335 
4336   switch (code)
4337     {
4338     case GIMPLE_ASSIGN:
4339       /* Try to estimate the cost of assignments.  We have three cases to
4340 	 deal with:
4341 	 1) Simple assignments to registers;
4342 	 2) Stores to things that must live in memory.  This includes
4343 	    "normal" stores to scalars, but also assignments of large
4344 	    structures, or constructors of big arrays;
4345 
4346 	 Let us look at the first two cases, assuming we have "a = b + C":
4347 	 <GIMPLE_ASSIGN <var_decl "a">
4348 	        <plus_expr <var_decl "b"> <constant C>>
4349 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4350 	 any target, because "a" usually ends up in a real register.  Hence
4351 	 the only cost of this expression comes from the PLUS_EXPR, and we
4352 	 can ignore the GIMPLE_ASSIGN.
4353 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4354 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4355 	 of moving something into "a", which we compute using the function
4356 	 estimate_move_cost.  */
4357       if (gimple_clobber_p (stmt))
4358 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4359 
4360       lhs = gimple_assign_lhs (stmt);
4361       rhs = gimple_assign_rhs1 (stmt);
4362 
4363       cost = 0;
4364 
4365       /* Account for the cost of moving to / from memory.  */
4366       if (gimple_store_p (stmt))
4367 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4368       if (gimple_assign_load_p (stmt))
4369 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4370 
4371       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4372       				      gimple_assign_rhs1 (stmt),
4373 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4374 				      == GIMPLE_BINARY_RHS
4375 				      ? gimple_assign_rhs2 (stmt) : NULL);
4376       break;
4377 
4378     case GIMPLE_COND:
4379       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4380       				         gimple_op (stmt, 0),
4381 				         gimple_op (stmt, 1));
4382       break;
4383 
4384     case GIMPLE_SWITCH:
4385       {
4386 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4387 	/* Take into account cost of the switch + guess 2 conditional jumps for
4388 	   each case label.
4389 
4390 	   TODO: once the switch expansion logic is sufficiently separated, we can
4391 	   do better job on estimating cost of the switch.  */
4392 	if (weights->time_based)
4393 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4394 	else
4395 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4396       }
4397       break;
4398 
4399     case GIMPLE_CALL:
4400       {
4401 	tree decl;
4402 
4403 	if (gimple_call_internal_p (stmt))
4404 	  return 0;
4405 	else if ((decl = gimple_call_fndecl (stmt))
4406 		 && fndecl_built_in_p (decl))
4407 	  {
4408 	    /* Do not special case builtins where we see the body.
4409 	       This just confuse inliner.  */
4410 	    struct cgraph_node *node;
4411 	    if (!(node = cgraph_node::get (decl))
4412 		|| node->definition)
4413 	      ;
4414 	    /* For buitins that are likely expanded to nothing or
4415 	       inlined do not account operand costs.  */
4416 	    else if (is_simple_builtin (decl))
4417 	      return 0;
4418 	    else if (is_inexpensive_builtin (decl))
4419 	      return weights->target_builtin_call_cost;
4420 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4421 	      {
4422 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4423 		   specialize the cheap expansion we do here.
4424 		   ???  This asks for a more general solution.  */
4425 		switch (DECL_FUNCTION_CODE (decl))
4426 		  {
4427 		    case BUILT_IN_POW:
4428 		    case BUILT_IN_POWF:
4429 		    case BUILT_IN_POWL:
4430 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4431 			  && (real_equal
4432 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4433 			       &dconst2)))
4434 			return estimate_operator_cost
4435 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4436 			     gimple_call_arg (stmt, 0));
4437 		      break;
4438 
4439 		    default:
4440 		      break;
4441 		  }
4442 	      }
4443 	  }
4444 
4445 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4446 	if (gimple_call_lhs (stmt))
4447 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4448 				      weights->time_based);
4449 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4450 	  {
4451 	    tree arg = gimple_call_arg (stmt, i);
4452 	    cost += estimate_move_cost (TREE_TYPE (arg),
4453 					weights->time_based);
4454 	  }
4455 	break;
4456       }
4457 
4458     case GIMPLE_RETURN:
4459       return weights->return_cost;
4460 
4461     case GIMPLE_GOTO:
4462     case GIMPLE_LABEL:
4463     case GIMPLE_NOP:
4464     case GIMPLE_PHI:
4465     case GIMPLE_PREDICT:
4466     case GIMPLE_DEBUG:
4467       return 0;
4468 
4469     case GIMPLE_ASM:
4470       {
4471 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4472 	/* 1000 means infinity. This avoids overflows later
4473 	   with very long asm statements.  */
4474 	if (count > 1000)
4475 	  count = 1000;
4476 	/* If this asm is asm inline, count anything as minimum size.  */
4477 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4478 	  count = MIN (1, count);
4479 	return MAX (1, count);
4480       }
4481 
4482     case GIMPLE_RESX:
4483       /* This is either going to be an external function call with one
4484 	 argument, or two register copy statements plus a goto.  */
4485       return 2;
4486 
4487     case GIMPLE_EH_DISPATCH:
4488       /* ??? This is going to turn into a switch statement.  Ideally
4489 	 we'd have a look at the eh region and estimate the number of
4490 	 edges involved.  */
4491       return 10;
4492 
4493     case GIMPLE_BIND:
4494       return estimate_num_insns_seq (
4495 	       gimple_bind_body (as_a <gbind *> (stmt)),
4496 	       weights);
4497 
4498     case GIMPLE_EH_FILTER:
4499       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4500 
4501     case GIMPLE_CATCH:
4502       return estimate_num_insns_seq (gimple_catch_handler (
4503 				       as_a <gcatch *> (stmt)),
4504 				     weights);
4505 
4506     case GIMPLE_TRY:
4507       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4508               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4509 
4510     /* OMP directives are generally very expensive.  */
4511 
4512     case GIMPLE_OMP_RETURN:
4513     case GIMPLE_OMP_SECTIONS_SWITCH:
4514     case GIMPLE_OMP_ATOMIC_STORE:
4515     case GIMPLE_OMP_CONTINUE:
4516       /* ...except these, which are cheap.  */
4517       return 0;
4518 
4519     case GIMPLE_OMP_ATOMIC_LOAD:
4520       return weights->omp_cost;
4521 
4522     case GIMPLE_OMP_FOR:
4523       return (weights->omp_cost
4524               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4525               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4526 
4527     case GIMPLE_OMP_PARALLEL:
4528     case GIMPLE_OMP_TASK:
4529     case GIMPLE_OMP_CRITICAL:
4530     case GIMPLE_OMP_MASTER:
4531     case GIMPLE_OMP_TASKGROUP:
4532     case GIMPLE_OMP_ORDERED:
4533     case GIMPLE_OMP_SCAN:
4534     case GIMPLE_OMP_SECTION:
4535     case GIMPLE_OMP_SECTIONS:
4536     case GIMPLE_OMP_SINGLE:
4537     case GIMPLE_OMP_TARGET:
4538     case GIMPLE_OMP_TEAMS:
4539       return (weights->omp_cost
4540               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4541 
4542     case GIMPLE_TRANSACTION:
4543       return (weights->tm_cost
4544 	      + estimate_num_insns_seq (gimple_transaction_body (
4545 					  as_a <gtransaction *> (stmt)),
4546 					weights));
4547 
4548     default:
4549       gcc_unreachable ();
4550     }
4551 
4552   return cost;
4553 }
4554 
4555 /* Estimate number of instructions that will be created by expanding
4556    function FNDECL.  WEIGHTS contains weights attributed to various
4557    constructs.  */
4558 
4559 int
4560 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4561 {
4562   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4563   gimple_stmt_iterator bsi;
4564   basic_block bb;
4565   int n = 0;
4566 
4567   gcc_assert (my_function && my_function->cfg);
4568   FOR_EACH_BB_FN (bb, my_function)
4569     {
4570       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4571 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4572     }
4573 
4574   return n;
4575 }
4576 
4577 
4578 /* Initializes weights used by estimate_num_insns.  */
4579 
4580 void
4581 init_inline_once (void)
4582 {
4583   eni_size_weights.call_cost = 1;
4584   eni_size_weights.indirect_call_cost = 3;
4585   eni_size_weights.target_builtin_call_cost = 1;
4586   eni_size_weights.div_mod_cost = 1;
4587   eni_size_weights.omp_cost = 40;
4588   eni_size_weights.tm_cost = 10;
4589   eni_size_weights.time_based = false;
4590   eni_size_weights.return_cost = 1;
4591 
4592   /* Estimating time for call is difficult, since we have no idea what the
4593      called function does.  In the current uses of eni_time_weights,
4594      underestimating the cost does less harm than overestimating it, so
4595      we choose a rather small value here.  */
4596   eni_time_weights.call_cost = 10;
4597   eni_time_weights.indirect_call_cost = 15;
4598   eni_time_weights.target_builtin_call_cost = 1;
4599   eni_time_weights.div_mod_cost = 10;
4600   eni_time_weights.omp_cost = 40;
4601   eni_time_weights.tm_cost = 40;
4602   eni_time_weights.time_based = true;
4603   eni_time_weights.return_cost = 2;
4604 }
4605 
4606 
4607 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4608 
4609 static void
4610 prepend_lexical_block (tree current_block, tree new_block)
4611 {
4612   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4613   BLOCK_SUBBLOCKS (current_block) = new_block;
4614   BLOCK_SUPERCONTEXT (new_block) = current_block;
4615 }
4616 
4617 /* Add local variables from CALLEE to CALLER.  */
4618 
4619 static inline void
4620 add_local_variables (struct function *callee, struct function *caller,
4621 		     copy_body_data *id)
4622 {
4623   tree var;
4624   unsigned ix;
4625 
4626   FOR_EACH_LOCAL_DECL (callee, ix, var)
4627     if (!can_be_nonlocal (var, id))
4628       {
4629         tree new_var = remap_decl (var, id);
4630 
4631         /* Remap debug-expressions.  */
4632 	if (VAR_P (new_var)
4633 	    && DECL_HAS_DEBUG_EXPR_P (var)
4634 	    && new_var != var)
4635 	  {
4636 	    tree tem = DECL_DEBUG_EXPR (var);
4637 	    bool old_regimplify = id->regimplify;
4638 	    id->remapping_type_depth++;
4639 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4640 	    id->remapping_type_depth--;
4641 	    id->regimplify = old_regimplify;
4642 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4643 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4644 	  }
4645 	add_local_decl (caller, new_var);
4646       }
4647 }
4648 
4649 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4650    have brought in or introduced any debug stmts for SRCVAR.  */
4651 
4652 static inline void
4653 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4654 {
4655   tree *remappedvarp = id->decl_map->get (srcvar);
4656 
4657   if (!remappedvarp)
4658     return;
4659 
4660   if (!VAR_P (*remappedvarp))
4661     return;
4662 
4663   if (*remappedvarp == id->retvar)
4664     return;
4665 
4666   tree tvar = target_for_debug_bind (*remappedvarp);
4667   if (!tvar)
4668     return;
4669 
4670   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4671 					  id->call_stmt);
4672   gimple_seq_add_stmt (bindings, stmt);
4673 }
4674 
4675 /* For each inlined variable for which we may have debug bind stmts,
4676    add before GSI a final debug stmt resetting it, marking the end of
4677    its life, so that var-tracking knows it doesn't have to compute
4678    further locations for it.  */
4679 
4680 static inline void
4681 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4682 {
4683   tree var;
4684   unsigned ix;
4685   gimple_seq bindings = NULL;
4686 
4687   if (!gimple_in_ssa_p (id->src_cfun))
4688     return;
4689 
4690   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4691     return;
4692 
4693   for (var = DECL_ARGUMENTS (id->src_fn);
4694        var; var = DECL_CHAIN (var))
4695     reset_debug_binding (id, var, &bindings);
4696 
4697   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4698     reset_debug_binding (id, var, &bindings);
4699 
4700   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4701 }
4702 
4703 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4704 
4705 static bool
4706 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4707 		    bitmap to_purge)
4708 {
4709   tree use_retvar;
4710   tree fn;
4711   hash_map<tree, tree> *dst;
4712   hash_map<tree, tree> *st = NULL;
4713   tree return_slot;
4714   tree modify_dest;
4715   struct cgraph_edge *cg_edge;
4716   cgraph_inline_failed_t reason;
4717   basic_block return_block;
4718   edge e;
4719   gimple_stmt_iterator gsi, stmt_gsi;
4720   bool successfully_inlined = false;
4721   bool purge_dead_abnormal_edges;
4722   gcall *call_stmt;
4723   unsigned int prop_mask, src_properties;
4724   struct function *dst_cfun;
4725   tree simduid;
4726   use_operand_p use;
4727   gimple *simtenter_stmt = NULL;
4728   vec<tree> *simtvars_save;
4729 
4730   /* The gimplifier uses input_location in too many places, such as
4731      internal_get_tmp_var ().  */
4732   location_t saved_location = input_location;
4733   input_location = gimple_location (stmt);
4734 
4735   /* From here on, we're only interested in CALL_EXPRs.  */
4736   call_stmt = dyn_cast <gcall *> (stmt);
4737   if (!call_stmt)
4738     goto egress;
4739 
4740   cg_edge = id->dst_node->get_edge (stmt);
4741   gcc_checking_assert (cg_edge);
4742   /* First, see if we can figure out what function is being called.
4743      If we cannot, then there is no hope of inlining the function.  */
4744   if (cg_edge->indirect_unknown_callee)
4745     goto egress;
4746   fn = cg_edge->callee->decl;
4747   gcc_checking_assert (fn);
4748 
4749   /* If FN is a declaration of a function in a nested scope that was
4750      globally declared inline, we don't set its DECL_INITIAL.
4751      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4752      C++ front-end uses it for cdtors to refer to their internal
4753      declarations, that are not real functions.  Fortunately those
4754      don't have trees to be saved, so we can tell by checking their
4755      gimple_body.  */
4756   if (!DECL_INITIAL (fn)
4757       && DECL_ABSTRACT_ORIGIN (fn)
4758       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4759     fn = DECL_ABSTRACT_ORIGIN (fn);
4760 
4761   /* Don't try to inline functions that are not well-suited to inlining.  */
4762   if (cg_edge->inline_failed)
4763     {
4764       reason = cg_edge->inline_failed;
4765       /* If this call was originally indirect, we do not want to emit any
4766 	 inlining related warnings or sorry messages because there are no
4767 	 guarantees regarding those.  */
4768       if (cg_edge->indirect_inlining_edge)
4769 	goto egress;
4770 
4771       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4772           /* For extern inline functions that get redefined we always
4773 	     silently ignored always_inline flag. Better behavior would
4774 	     be to be able to keep both bodies and use extern inline body
4775 	     for inlining, but we can't do that because frontends overwrite
4776 	     the body.  */
4777 	  && !cg_edge->callee->redefined_extern_inline
4778 	  /* During early inline pass, report only when optimization is
4779 	     not turned on.  */
4780 	  && (symtab->global_info_ready
4781 	      || !optimize
4782 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4783 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4784 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4785 	{
4786 	  error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4787 		 cgraph_inline_failed_string (reason));
4788 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4789 	    inform (gimple_location (stmt), "called from here");
4790 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4791 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4792                    "called from this function");
4793 	}
4794       else if (opt_for_fn (fn, warn_inline)
4795 	       && DECL_DECLARED_INLINE_P (fn)
4796 	       && !DECL_NO_INLINE_WARNING_P (fn)
4797 	       && !DECL_IN_SYSTEM_HEADER (fn)
4798 	       && reason != CIF_UNSPECIFIED
4799 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4800 	       /* Do not warn about not inlined recursive calls.  */
4801 	       && !cg_edge->recursive_p ()
4802 	       /* Avoid warnings during early inline pass. */
4803 	       && symtab->global_info_ready)
4804 	{
4805 	  auto_diagnostic_group d;
4806 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4807 		       fn, _(cgraph_inline_failed_string (reason))))
4808 	    {
4809 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4810 		inform (gimple_location (stmt), "called from here");
4811 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4812 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4813                        "called from this function");
4814 	    }
4815 	}
4816       goto egress;
4817     }
4818   id->src_node = cg_edge->callee;
4819 
4820   /* If callee is thunk, all we need is to adjust the THIS pointer
4821      and redirect to function being thunked.  */
4822   if (id->src_node->thunk.thunk_p)
4823     {
4824       cgraph_edge *edge;
4825       tree virtual_offset = NULL;
4826       profile_count count = cg_edge->count;
4827       tree op;
4828       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4829 
4830       cgraph_edge::remove (cg_edge);
4831       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4832 		   		           gimple_uid (stmt),
4833 				   	   profile_count::one (),
4834 					   profile_count::one (),
4835 				           true);
4836       edge->count = count;
4837       if (id->src_node->thunk.virtual_offset_p)
4838         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4839       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4840 			      NULL);
4841       gsi_insert_before (&iter, gimple_build_assign (op,
4842 						    gimple_call_arg (stmt, 0)),
4843 			 GSI_NEW_STMT);
4844       gcc_assert (id->src_node->thunk.this_adjusting);
4845       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4846 			 virtual_offset, id->src_node->thunk.indirect_offset);
4847 
4848       gimple_call_set_arg (stmt, 0, op);
4849       gimple_call_set_fndecl (stmt, edge->callee->decl);
4850       update_stmt (stmt);
4851       id->src_node->remove ();
4852       expand_call_inline (bb, stmt, id, to_purge);
4853       maybe_remove_unused_call_args (cfun, stmt);
4854       return true;
4855     }
4856   fn = cg_edge->callee->decl;
4857   cg_edge->callee->get_untransformed_body ();
4858 
4859   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4860     cg_edge->callee->verify ();
4861 
4862   /* We will be inlining this callee.  */
4863   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4864 
4865   /* Update the callers EH personality.  */
4866   if (DECL_FUNCTION_PERSONALITY (fn))
4867     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4868       = DECL_FUNCTION_PERSONALITY (fn);
4869 
4870   /* Split the block before the GIMPLE_CALL.  */
4871   stmt_gsi = gsi_for_stmt (stmt);
4872   gsi_prev (&stmt_gsi);
4873   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4874   bb = e->src;
4875   return_block = e->dest;
4876   remove_edge (e);
4877 
4878   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4879      been the source of abnormal edges.  In this case, schedule
4880      the removal of dead abnormal edges.  */
4881   gsi = gsi_start_bb (return_block);
4882   gsi_next (&gsi);
4883   purge_dead_abnormal_edges = gsi_end_p (gsi);
4884 
4885   stmt_gsi = gsi_start_bb (return_block);
4886 
4887   /* Build a block containing code to initialize the arguments, the
4888      actual inline expansion of the body, and a label for the return
4889      statements within the function to jump to.  The type of the
4890      statement expression is the return type of the function call.
4891      ???  If the call does not have an associated block then we will
4892      remap all callee blocks to NULL, effectively dropping most of
4893      its debug information.  This should only happen for calls to
4894      artificial decls inserted by the compiler itself.  We need to
4895      either link the inlined blocks into the caller block tree or
4896      not refer to them in any way to not break GC for locations.  */
4897   if (tree block = gimple_block (stmt))
4898     {
4899       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4900          to make inlined_function_outer_scope_p return true on this BLOCK.  */
4901       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4902       if (loc == UNKNOWN_LOCATION)
4903 	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4904       if (loc == UNKNOWN_LOCATION)
4905 	loc = BUILTINS_LOCATION;
4906       id->block = make_node (BLOCK);
4907       BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4908       BLOCK_SOURCE_LOCATION (id->block) = loc;
4909       prepend_lexical_block (block, id->block);
4910     }
4911 
4912   /* Local declarations will be replaced by their equivalents in this map.  */
4913   st = id->decl_map;
4914   id->decl_map = new hash_map<tree, tree>;
4915   dst = id->debug_map;
4916   id->debug_map = NULL;
4917   if (flag_stack_reuse != SR_NONE)
4918     id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4919 
4920   /* Record the function we are about to inline.  */
4921   id->src_fn = fn;
4922   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4923   id->reset_location = DECL_IGNORED_P (fn);
4924   id->call_stmt = call_stmt;
4925 
4926   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4927      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4928   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4929   simtvars_save = id->dst_simt_vars;
4930   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4931       && (simduid = bb->loop_father->simduid) != NULL_TREE
4932       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4933       && single_imm_use (simduid, &use, &simtenter_stmt)
4934       && is_gimple_call (simtenter_stmt)
4935       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4936     vec_alloc (id->dst_simt_vars, 0);
4937   else
4938     id->dst_simt_vars = NULL;
4939 
4940   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4941     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4942 
4943   /* If the src function contains an IFN_VA_ARG, then so will the dst
4944      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4945   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4946   src_properties = id->src_cfun->curr_properties & prop_mask;
4947   if (src_properties != prop_mask)
4948     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4949   dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4950 
4951   gcc_assert (!id->src_cfun->after_inlining);
4952 
4953   id->entry_bb = bb;
4954   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4955     {
4956       gimple_stmt_iterator si = gsi_last_bb (bb);
4957       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4958       						   NOT_TAKEN),
4959 			GSI_NEW_STMT);
4960     }
4961   initialize_inlined_parameters (id, stmt, fn, bb);
4962   if (debug_nonbind_markers_p && debug_inline_points && id->block
4963       && inlined_function_outer_scope_p (id->block))
4964     {
4965       gimple_stmt_iterator si = gsi_last_bb (bb);
4966       gsi_insert_after (&si, gimple_build_debug_inline_entry
4967 			(id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4968 			GSI_NEW_STMT);
4969     }
4970 
4971   if (DECL_INITIAL (fn))
4972     {
4973       if (gimple_block (stmt))
4974 	{
4975 	  tree *var;
4976 
4977 	  prepend_lexical_block (id->block,
4978 				 remap_blocks (DECL_INITIAL (fn), id));
4979 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4980 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4981 				   == NULL_TREE));
4982 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4983 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4984 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4985 	     under it.  The parameters can be then evaluated in the debugger,
4986 	     but don't show in backtraces.  */
4987 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4988 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4989 	      {
4990 		tree v = *var;
4991 		*var = TREE_CHAIN (v);
4992 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4993 		BLOCK_VARS (id->block) = v;
4994 	      }
4995 	    else
4996 	      var = &TREE_CHAIN (*var);
4997 	}
4998       else
4999 	remap_blocks_to_null (DECL_INITIAL (fn), id);
5000     }
5001 
5002   /* Return statements in the function body will be replaced by jumps
5003      to the RET_LABEL.  */
5004   gcc_assert (DECL_INITIAL (fn));
5005   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5006 
5007   /* Find the LHS to which the result of this call is assigned.  */
5008   return_slot = NULL;
5009   if (gimple_call_lhs (stmt))
5010     {
5011       modify_dest = gimple_call_lhs (stmt);
5012 
5013       /* The function which we are inlining might not return a value,
5014 	 in which case we should issue a warning that the function
5015 	 does not return a value.  In that case the optimizers will
5016 	 see that the variable to which the value is assigned was not
5017 	 initialized.  We do not want to issue a warning about that
5018 	 uninitialized variable.  */
5019       if (DECL_P (modify_dest))
5020 	TREE_NO_WARNING (modify_dest) = 1;
5021 
5022       if (gimple_call_return_slot_opt_p (call_stmt))
5023 	{
5024 	  return_slot = modify_dest;
5025 	  modify_dest = NULL;
5026 	}
5027     }
5028   else
5029     modify_dest = NULL;
5030 
5031   /* If we are inlining a call to the C++ operator new, we don't want
5032      to use type based alias analysis on the return value.  Otherwise
5033      we may get confused if the compiler sees that the inlined new
5034      function returns a pointer which was just deleted.  See bug
5035      33407.  */
5036   if (DECL_IS_OPERATOR_NEW_P (fn))
5037     {
5038       return_slot = NULL;
5039       modify_dest = NULL;
5040     }
5041 
5042   /* Declare the return variable for the function.  */
5043   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5044 
5045   /* Add local vars in this inlined callee to caller.  */
5046   add_local_variables (id->src_cfun, cfun, id);
5047 
5048   if (id->src_node->clone.performed_splits)
5049     {
5050       /* Any calls from the inlined function will be turned into calls from the
5051 	 function we inline into.  We must preserve notes about how to split
5052 	 parameters such calls should be redirected/updated.  */
5053       unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
5054       for (unsigned i = 0; i < len; i++)
5055 	{
5056 	  ipa_param_performed_split ps
5057 	    = (*id->src_node->clone.performed_splits)[i];
5058 	  ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5059 	  vec_safe_push (id->dst_node->clone.performed_splits, ps);
5060 	}
5061 
5062       if (flag_checking)
5063 	{
5064 	  len = vec_safe_length (id->dst_node->clone.performed_splits);
5065 	  for (unsigned i = 0; i < len; i++)
5066 	    {
5067 	      ipa_param_performed_split *ps1
5068 		= &(*id->dst_node->clone.performed_splits)[i];
5069 	      for (unsigned j = i + 1; j < len; j++)
5070 		{
5071 		  ipa_param_performed_split *ps2
5072 		    = &(*id->dst_node->clone.performed_splits)[j];
5073 		  gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5074 			      || ps1->unit_offset != ps2->unit_offset);
5075 		}
5076 	    }
5077 	}
5078     }
5079 
5080   if (dump_enabled_p ())
5081     {
5082       char buf[128];
5083       snprintf (buf, sizeof(buf), "%4.2f",
5084 		cg_edge->sreal_frequency ().to_double ());
5085       dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5086 		       call_stmt,
5087 		       "Inlining %C to %C with frequency %s\n",
5088 		       id->src_node, id->dst_node, buf);
5089       if (dump_file && (dump_flags & TDF_DETAILS))
5090 	{
5091 	  id->src_node->dump (dump_file);
5092 	  id->dst_node->dump (dump_file);
5093 	}
5094     }
5095 
5096   /* This is it.  Duplicate the callee body.  Assume callee is
5097      pre-gimplified.  Note that we must not alter the caller
5098      function in any way before this point, as this CALL_EXPR may be
5099      a self-referential call; if we're calling ourselves, we need to
5100      duplicate our body before altering anything.  */
5101   copy_body (id, bb, return_block, NULL);
5102 
5103   reset_debug_bindings (id, stmt_gsi);
5104 
5105   if (flag_stack_reuse != SR_NONE)
5106     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5107       if (!TREE_THIS_VOLATILE (p))
5108 	{
5109 	  tree *varp = id->decl_map->get (p);
5110 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5111 	    {
5112 	      tree clobber = build_clobber (TREE_TYPE (*varp));
5113 	      gimple *clobber_stmt;
5114 	      clobber_stmt = gimple_build_assign (*varp, clobber);
5115 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
5116 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5117 	    }
5118 	}
5119 
5120   /* Reset the escaped solution.  */
5121   if (cfun->gimple_df)
5122     pt_solution_reset (&cfun->gimple_df->escaped);
5123 
5124   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
5125   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5126     {
5127       size_t nargs = gimple_call_num_args (simtenter_stmt);
5128       vec<tree> *vars = id->dst_simt_vars;
5129       auto_vec<tree> newargs (nargs + vars->length ());
5130       for (size_t i = 0; i < nargs; i++)
5131 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5132       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5133 	{
5134 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5135 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5136 	}
5137       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5138       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5139       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5140       gsi_replace (&gsi, g, false);
5141     }
5142   vec_free (id->dst_simt_vars);
5143   id->dst_simt_vars = simtvars_save;
5144 
5145   /* Clean up.  */
5146   if (id->debug_map)
5147     {
5148       delete id->debug_map;
5149       id->debug_map = dst;
5150     }
5151   delete id->decl_map;
5152   id->decl_map = st;
5153 
5154   /* Unlink the calls virtual operands before replacing it.  */
5155   unlink_stmt_vdef (stmt);
5156   if (gimple_vdef (stmt)
5157       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5158     release_ssa_name (gimple_vdef (stmt));
5159 
5160   /* If the inlined function returns a result that we care about,
5161      substitute the GIMPLE_CALL with an assignment of the return
5162      variable to the LHS of the call.  That is, if STMT was
5163      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
5164   if (use_retvar && gimple_call_lhs (stmt))
5165     {
5166       gimple *old_stmt = stmt;
5167       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5168       gimple_set_location (stmt, gimple_location (old_stmt));
5169       gsi_replace (&stmt_gsi, stmt, false);
5170       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5171       /* Append a clobber for id->retvar if easily possible.  */
5172       if (flag_stack_reuse != SR_NONE
5173 	  && id->retvar
5174 	  && VAR_P (id->retvar)
5175 	  && id->retvar != return_slot
5176 	  && id->retvar != modify_dest
5177 	  && !TREE_THIS_VOLATILE (id->retvar)
5178 	  && !is_gimple_reg (id->retvar)
5179 	  && !stmt_ends_bb_p (stmt))
5180 	{
5181 	  tree clobber = build_clobber (TREE_TYPE (id->retvar));
5182 	  gimple *clobber_stmt;
5183 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5184 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5185 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5186 	}
5187     }
5188   else
5189     {
5190       /* Handle the case of inlining a function with no return
5191 	 statement, which causes the return value to become undefined.  */
5192       if (gimple_call_lhs (stmt)
5193 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5194 	{
5195 	  tree name = gimple_call_lhs (stmt);
5196 	  tree var = SSA_NAME_VAR (name);
5197 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
5198 
5199 	  if (def)
5200 	    {
5201 	      /* If the variable is used undefined, make this name
5202 		 undefined via a move.  */
5203 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5204 	      gsi_replace (&stmt_gsi, stmt, true);
5205 	    }
5206 	  else
5207 	    {
5208 	      if (!var)
5209 		{
5210 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5211 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5212 		}
5213 	      /* Otherwise make this variable undefined.  */
5214 	      gsi_remove (&stmt_gsi, true);
5215 	      set_ssa_default_def (cfun, var, name);
5216 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5217 	    }
5218 	}
5219       /* Replace with a clobber for id->retvar.  */
5220       else if (flag_stack_reuse != SR_NONE
5221 	       && id->retvar
5222 	       && VAR_P (id->retvar)
5223 	       && id->retvar != return_slot
5224 	       && id->retvar != modify_dest
5225 	       && !TREE_THIS_VOLATILE (id->retvar)
5226 	       && !is_gimple_reg (id->retvar))
5227 	{
5228 	  tree clobber = build_clobber (TREE_TYPE (id->retvar));
5229 	  gimple *clobber_stmt;
5230 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5231 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
5232 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
5233 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5234 	}
5235       else
5236 	gsi_remove (&stmt_gsi, true);
5237     }
5238 
5239   if (purge_dead_abnormal_edges)
5240     bitmap_set_bit (to_purge, return_block->index);
5241 
5242   /* If the value of the new expression is ignored, that's OK.  We
5243      don't warn about this for CALL_EXPRs, so we shouldn't warn about
5244      the equivalent inlined version either.  */
5245   if (is_gimple_assign (stmt))
5246     {
5247       gcc_assert (gimple_assign_single_p (stmt)
5248 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5249       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5250     }
5251 
5252   id->add_clobbers_to_eh_landing_pads = 0;
5253 
5254   /* Output the inlining info for this abstract function, since it has been
5255      inlined.  If we don't do this now, we can lose the information about the
5256      variables in the function when the blocks get blown away as soon as we
5257      remove the cgraph node.  */
5258   if (gimple_block (stmt))
5259     (*debug_hooks->outlining_inline_function) (fn);
5260 
5261   /* Update callgraph if needed.  */
5262   cg_edge->callee->remove ();
5263 
5264   id->block = NULL_TREE;
5265   id->retvar = NULL_TREE;
5266   successfully_inlined = true;
5267 
5268  egress:
5269   input_location = saved_location;
5270   return successfully_inlined;
5271 }
5272 
5273 /* Expand call statements reachable from STMT_P.
5274    We can only have CALL_EXPRs as the "toplevel" tree code or nested
5275    in a MODIFY_EXPR.  */
5276 
5277 static bool
5278 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5279 			    bitmap to_purge)
5280 {
5281   gimple_stmt_iterator gsi;
5282   bool inlined = false;
5283 
5284   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5285     {
5286       gimple *stmt = gsi_stmt (gsi);
5287       gsi_prev (&gsi);
5288 
5289       if (is_gimple_call (stmt)
5290 	  && !gimple_call_internal_p (stmt))
5291 	inlined |= expand_call_inline (bb, stmt, id, to_purge);
5292     }
5293 
5294   return inlined;
5295 }
5296 
5297 
5298 /* Walk all basic blocks created after FIRST and try to fold every statement
5299    in the STATEMENTS pointer set.  */
5300 
5301 static void
5302 fold_marked_statements (int first, hash_set<gimple *> *statements)
5303 {
5304   auto_bitmap to_purge;
5305 
5306   auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5307   auto_sbitmap visited (last_basic_block_for_fn (cfun));
5308   bitmap_clear (visited);
5309 
5310   stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5311   while (!stack.is_empty ())
5312     {
5313       /* Look at the edge on the top of the stack.  */
5314       edge e = stack.pop ();
5315       basic_block dest = e->dest;
5316 
5317       if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5318 	  || bitmap_bit_p (visited, dest->index))
5319 	continue;
5320 
5321       bitmap_set_bit (visited, dest->index);
5322 
5323       if (dest->index >= first)
5324 	for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5325 	     !gsi_end_p (gsi); gsi_next (&gsi))
5326 	  {
5327 	    if (!statements->contains (gsi_stmt (gsi)))
5328 	      continue;
5329 
5330 	    gimple *old_stmt = gsi_stmt (gsi);
5331 	    tree old_decl = (is_gimple_call (old_stmt)
5332 			     ? gimple_call_fndecl (old_stmt) : 0);
5333 	    if (old_decl && fndecl_built_in_p (old_decl))
5334 	      {
5335 		/* Folding builtins can create multiple instructions,
5336 		   we need to look at all of them.  */
5337 		gimple_stmt_iterator i2 = gsi;
5338 		gsi_prev (&i2);
5339 		if (fold_stmt (&gsi))
5340 		  {
5341 		    gimple *new_stmt;
5342 		    /* If a builtin at the end of a bb folded into nothing,
5343 		       the following loop won't work.  */
5344 		    if (gsi_end_p (gsi))
5345 		      {
5346 			cgraph_update_edges_for_call_stmt (old_stmt,
5347 							   old_decl, NULL);
5348 			break;
5349 		      }
5350 		    if (gsi_end_p (i2))
5351 		      i2 = gsi_start_bb (dest);
5352 		    else
5353 		      gsi_next (&i2);
5354 		    while (1)
5355 		      {
5356 			new_stmt = gsi_stmt (i2);
5357 			update_stmt (new_stmt);
5358 			cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5359 							   new_stmt);
5360 
5361 			if (new_stmt == gsi_stmt (gsi))
5362 			  {
5363 			    /* It is okay to check only for the very last
5364 			       of these statements.  If it is a throwing
5365 			       statement nothing will change.  If it isn't
5366 			       this can remove EH edges.  If that weren't
5367 			       correct then because some intermediate stmts
5368 			       throw, but not the last one.  That would mean
5369 			       we'd have to split the block, which we can't
5370 			       here and we'd loose anyway.  And as builtins
5371 			       probably never throw, this all
5372 			       is mood anyway.  */
5373 			    if (maybe_clean_or_replace_eh_stmt (old_stmt,
5374 								new_stmt))
5375 			      bitmap_set_bit (to_purge, dest->index);
5376 			    break;
5377 			  }
5378 			gsi_next (&i2);
5379 		      }
5380 		  }
5381 	      }
5382 	    else if (fold_stmt (&gsi))
5383 	      {
5384 		/* Re-read the statement from GSI as fold_stmt() may
5385 		   have changed it.  */
5386 		gimple *new_stmt = gsi_stmt (gsi);
5387 		update_stmt (new_stmt);
5388 
5389 		if (is_gimple_call (old_stmt)
5390 		    || is_gimple_call (new_stmt))
5391 		  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5392 						     new_stmt);
5393 
5394 		if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5395 		  bitmap_set_bit (to_purge, dest->index);
5396 	      }
5397 	  }
5398 
5399       if (EDGE_COUNT (dest->succs) > 0)
5400 	{
5401 	  /* Avoid warnings emitted from folding statements that
5402 	     became unreachable because of inlined function parameter
5403 	     propagation.  */
5404 	  e = find_taken_edge (dest, NULL_TREE);
5405 	  if (e)
5406 	    stack.quick_push (e);
5407 	  else
5408 	    {
5409 	      edge_iterator ei;
5410 	      FOR_EACH_EDGE (e, ei, dest->succs)
5411 		stack.safe_push (e);
5412 	    }
5413 	}
5414     }
5415 
5416   gimple_purge_all_dead_eh_edges (to_purge);
5417 }
5418 
5419 /* Expand calls to inline functions in the body of FN.  */
5420 
5421 unsigned int
5422 optimize_inline_calls (tree fn)
5423 {
5424   copy_body_data id;
5425   basic_block bb;
5426   int last = n_basic_blocks_for_fn (cfun);
5427   bool inlined_p = false;
5428 
5429   /* Clear out ID.  */
5430   memset (&id, 0, sizeof (id));
5431 
5432   id.src_node = id.dst_node = cgraph_node::get (fn);
5433   gcc_assert (id.dst_node->definition);
5434   id.dst_fn = fn;
5435   /* Or any functions that aren't finished yet.  */
5436   if (current_function_decl)
5437     id.dst_fn = current_function_decl;
5438 
5439   id.copy_decl = copy_decl_maybe_to_var;
5440   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5441   id.transform_new_cfg = false;
5442   id.transform_return_to_modify = true;
5443   id.transform_parameter = true;
5444   id.transform_lang_insert_block = NULL;
5445   id.statements_to_fold = new hash_set<gimple *>;
5446 
5447   push_gimplify_context ();
5448 
5449   /* We make no attempts to keep dominance info up-to-date.  */
5450   free_dominance_info (CDI_DOMINATORS);
5451   free_dominance_info (CDI_POST_DOMINATORS);
5452 
5453   /* Register specific gimple functions.  */
5454   gimple_register_cfg_hooks ();
5455 
5456   /* Reach the trees by walking over the CFG, and note the
5457      enclosing basic-blocks in the call edges.  */
5458   /* We walk the blocks going forward, because inlined function bodies
5459      will split id->current_basic_block, and the new blocks will
5460      follow it; we'll trudge through them, processing their CALL_EXPRs
5461      along the way.  */
5462   auto_bitmap to_purge;
5463   FOR_EACH_BB_FN (bb, cfun)
5464     inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5465 
5466   pop_gimplify_context (NULL);
5467 
5468   if (flag_checking)
5469     {
5470       struct cgraph_edge *e;
5471 
5472       id.dst_node->verify ();
5473 
5474       /* Double check that we inlined everything we are supposed to inline.  */
5475       for (e = id.dst_node->callees; e; e = e->next_callee)
5476 	gcc_assert (e->inline_failed);
5477     }
5478 
5479   /* If we didn't inline into the function there is nothing to do.  */
5480   if (!inlined_p)
5481     {
5482       delete id.statements_to_fold;
5483       return 0;
5484     }
5485 
5486   /* Fold queued statements.  */
5487   update_max_bb_count ();
5488   fold_marked_statements (last, id.statements_to_fold);
5489   delete id.statements_to_fold;
5490 
5491   /* Finally purge EH and abnormal edges from the call stmts we inlined.
5492      We need to do this after fold_marked_statements since that may walk
5493      the SSA use-def chain.  */
5494   unsigned i;
5495   bitmap_iterator bi;
5496   EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5497     {
5498       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5499       if (bb)
5500 	{
5501 	  gimple_purge_dead_eh_edges (bb);
5502 	  gimple_purge_dead_abnormal_call_edges (bb);
5503 	}
5504     }
5505 
5506   gcc_assert (!id.debug_stmts.exists ());
5507 
5508   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5509   number_blocks (fn);
5510 
5511   delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5512   id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5513 
5514   if (flag_checking)
5515     id.dst_node->verify ();
5516 
5517   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5518      not possible yet - the IPA passes might make various functions to not
5519      throw and they don't care to proactively update local EH info.  This is
5520      done later in fixup_cfg pass that also execute the verification.  */
5521   return (TODO_update_ssa
5522 	  | TODO_cleanup_cfg
5523 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5524 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5525 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5526 	     ? TODO_rebuild_frequencies : 0));
5527 }
5528 
5529 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5530 
5531 tree
5532 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5533 {
5534   enum tree_code code = TREE_CODE (*tp);
5535   enum tree_code_class cl = TREE_CODE_CLASS (code);
5536 
5537   /* We make copies of most nodes.  */
5538   if (IS_EXPR_CODE_CLASS (cl)
5539       || code == TREE_LIST
5540       || code == TREE_VEC
5541       || code == TYPE_DECL
5542       || code == OMP_CLAUSE)
5543     {
5544       /* Because the chain gets clobbered when we make a copy, we save it
5545 	 here.  */
5546       tree chain = NULL_TREE, new_tree;
5547 
5548       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5549 	chain = TREE_CHAIN (*tp);
5550 
5551       /* Copy the node.  */
5552       new_tree = copy_node (*tp);
5553 
5554       *tp = new_tree;
5555 
5556       /* Now, restore the chain, if appropriate.  That will cause
5557 	 walk_tree to walk into the chain as well.  */
5558       if (code == PARM_DECL
5559 	  || code == TREE_LIST
5560 	  || code == OMP_CLAUSE)
5561 	TREE_CHAIN (*tp) = chain;
5562 
5563       /* For now, we don't update BLOCKs when we make copies.  So, we
5564 	 have to nullify all BIND_EXPRs.  */
5565       if (TREE_CODE (*tp) == BIND_EXPR)
5566 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5567     }
5568   else if (code == CONSTRUCTOR)
5569     {
5570       /* CONSTRUCTOR nodes need special handling because
5571          we need to duplicate the vector of elements.  */
5572       tree new_tree;
5573 
5574       new_tree = copy_node (*tp);
5575       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5576       *tp = new_tree;
5577     }
5578   else if (code == STATEMENT_LIST)
5579     /* We used to just abort on STATEMENT_LIST, but we can run into them
5580        with statement-expressions (c++/40975).  */
5581     copy_statement_list (tp);
5582   else if (TREE_CODE_CLASS (code) == tcc_type)
5583     *walk_subtrees = 0;
5584   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5585     *walk_subtrees = 0;
5586   else if (TREE_CODE_CLASS (code) == tcc_constant)
5587     *walk_subtrees = 0;
5588   return NULL_TREE;
5589 }
5590 
5591 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5592    information indicating to what new SAVE_EXPR this one should be mapped,
5593    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5594    the function into which the copy will be placed.  */
5595 
5596 static void
5597 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5598 {
5599   tree *n;
5600   tree t;
5601 
5602   /* See if we already encountered this SAVE_EXPR.  */
5603   n = st->get (*tp);
5604 
5605   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5606   if (!n)
5607     {
5608       t = copy_node (*tp);
5609 
5610       /* Remember this SAVE_EXPR.  */
5611       st->put (*tp, t);
5612       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5613       st->put (t, t);
5614     }
5615   else
5616     {
5617       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5618       *walk_subtrees = 0;
5619       t = *n;
5620     }
5621 
5622   /* Replace this SAVE_EXPR with the copy.  */
5623   *tp = t;
5624 }
5625 
5626 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5627    label, copies the declaration and enters it in the splay_tree in DATA (which
5628    is really a 'copy_body_data *'.  */
5629 
5630 static tree
5631 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5632 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5633 		        struct walk_stmt_info *wi)
5634 {
5635   copy_body_data *id = (copy_body_data *) wi->info;
5636   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5637 
5638   if (stmt)
5639     {
5640       tree decl = gimple_label_label (stmt);
5641 
5642       /* Copy the decl and remember the copy.  */
5643       insert_decl_map (id, decl, id->copy_decl (decl, id));
5644     }
5645 
5646   return NULL_TREE;
5647 }
5648 
5649 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5650 						  struct walk_stmt_info *wi);
5651 
5652 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5653    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5654    remaps all local declarations to appropriate replacements in gimple
5655    operands. */
5656 
5657 static tree
5658 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5659 {
5660   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5661   copy_body_data *id = (copy_body_data *) wi->info;
5662   hash_map<tree, tree> *st = id->decl_map;
5663   tree *n;
5664   tree expr = *tp;
5665 
5666   /* For recursive invocations this is no longer the LHS itself.  */
5667   bool is_lhs = wi->is_lhs;
5668   wi->is_lhs = false;
5669 
5670   if (TREE_CODE (expr) == SSA_NAME)
5671     {
5672       *tp = remap_ssa_name (*tp, id);
5673       *walk_subtrees = 0;
5674       if (is_lhs)
5675 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5676     }
5677   /* Only a local declaration (variable or label).  */
5678   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5679 	   || TREE_CODE (expr) == LABEL_DECL)
5680     {
5681       /* Lookup the declaration.  */
5682       n = st->get (expr);
5683 
5684       /* If it's there, remap it.  */
5685       if (n)
5686 	*tp = *n;
5687       *walk_subtrees = 0;
5688     }
5689   else if (TREE_CODE (expr) == STATEMENT_LIST
5690 	   || TREE_CODE (expr) == BIND_EXPR
5691 	   || TREE_CODE (expr) == SAVE_EXPR)
5692     gcc_unreachable ();
5693   else if (TREE_CODE (expr) == TARGET_EXPR)
5694     {
5695       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5696          It's OK for this to happen if it was part of a subtree that
5697          isn't immediately expanded, such as operand 2 of another
5698          TARGET_EXPR.  */
5699       if (!TREE_OPERAND (expr, 1))
5700 	{
5701 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5702 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5703 	}
5704     }
5705   else if (TREE_CODE (expr) == OMP_CLAUSE)
5706     {
5707       /* Before the omplower pass completes, some OMP clauses can contain
5708 	 sequences that are neither copied by gimple_seq_copy nor walked by
5709 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5710 	 in those situations, we have to copy and process them explicitely.  */
5711 
5712       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5713 	{
5714 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5715 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5716 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5717 	}
5718       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5719 	{
5720 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5721 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5722 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5723 	}
5724       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5725 	{
5726 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5727 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5728 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5729 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5730 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5731 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5732 	}
5733     }
5734 
5735   /* Keep iterating.  */
5736   return NULL_TREE;
5737 }
5738 
5739 
5740 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5741    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5742    remaps all local declarations to appropriate replacements in gimple
5743    statements. */
5744 
5745 static tree
5746 replace_locals_stmt (gimple_stmt_iterator *gsip,
5747 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5748 		     struct walk_stmt_info *wi)
5749 {
5750   copy_body_data *id = (copy_body_data *) wi->info;
5751   gimple *gs = gsi_stmt (*gsip);
5752 
5753   if (gbind *stmt = dyn_cast <gbind *> (gs))
5754     {
5755       tree block = gimple_bind_block (stmt);
5756 
5757       if (block)
5758 	{
5759 	  remap_block (&block, id);
5760 	  gimple_bind_set_block (stmt, block);
5761 	}
5762 
5763       /* This will remap a lot of the same decls again, but this should be
5764 	 harmless.  */
5765       if (gimple_bind_vars (stmt))
5766 	{
5767 	  tree old_var, decls = gimple_bind_vars (stmt);
5768 
5769 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5770 	    if (!can_be_nonlocal (old_var, id)
5771 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5772 	      remap_decl (old_var, id);
5773 
5774 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5775 	  id->prevent_decl_creation_for_types = true;
5776 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5777 	  id->prevent_decl_creation_for_types = false;
5778 	}
5779     }
5780 
5781   /* Keep iterating.  */
5782   return NULL_TREE;
5783 }
5784 
5785 /* Create a copy of SEQ and remap all decls in it.  */
5786 
5787 static gimple_seq
5788 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5789 {
5790   if (!seq)
5791     return NULL;
5792 
5793   /* If there are any labels in OMP sequences, they can be only referred to in
5794      the sequence itself and therefore we can do both here.  */
5795   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5796   gimple_seq copy = gimple_seq_copy (seq);
5797   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5798   return copy;
5799 }
5800 
5801 /* Copies everything in SEQ and replaces variables and labels local to
5802    current_function_decl.  */
5803 
5804 gimple_seq
5805 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5806 {
5807   copy_body_data id;
5808   struct walk_stmt_info wi;
5809   gimple_seq copy;
5810 
5811   /* There's nothing to do for NULL_TREE.  */
5812   if (seq == NULL)
5813     return seq;
5814 
5815   /* Set up ID.  */
5816   memset (&id, 0, sizeof (id));
5817   id.src_fn = current_function_decl;
5818   id.dst_fn = current_function_decl;
5819   id.src_cfun = cfun;
5820   id.decl_map = new hash_map<tree, tree>;
5821   id.debug_map = NULL;
5822 
5823   id.copy_decl = copy_decl_no_change;
5824   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5825   id.transform_new_cfg = false;
5826   id.transform_return_to_modify = false;
5827   id.transform_parameter = false;
5828   id.transform_lang_insert_block = NULL;
5829 
5830   /* Walk the tree once to find local labels.  */
5831   memset (&wi, 0, sizeof (wi));
5832   hash_set<tree> visited;
5833   wi.info = &id;
5834   wi.pset = &visited;
5835   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5836 
5837   copy = gimple_seq_copy (seq);
5838 
5839   /* Walk the copy, remapping decls.  */
5840   memset (&wi, 0, sizeof (wi));
5841   wi.info = &id;
5842   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5843 
5844   /* Clean up.  */
5845   delete id.decl_map;
5846   if (id.debug_map)
5847     delete id.debug_map;
5848   if (id.dependence_map)
5849     {
5850       delete id.dependence_map;
5851       id.dependence_map = NULL;
5852     }
5853 
5854   return copy;
5855 }
5856 
5857 
5858 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5859 
5860 static tree
5861 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5862 {
5863   if (*tp == data)
5864     return (tree) data;
5865   else
5866     return NULL;
5867 }
5868 
5869 DEBUG_FUNCTION bool
5870 debug_find_tree (tree top, tree search)
5871 {
5872   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5873 }
5874 
5875 
5876 /* Declare the variables created by the inliner.  Add all the variables in
5877    VARS to BIND_EXPR.  */
5878 
5879 static void
5880 declare_inline_vars (tree block, tree vars)
5881 {
5882   tree t;
5883   for (t = vars; t; t = DECL_CHAIN (t))
5884     {
5885       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5886       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5887       add_local_decl (cfun, t);
5888     }
5889 
5890   if (block)
5891     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5892 }
5893 
5894 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5895    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5896    VAR_DECL translation.  */
5897 
5898 tree
5899 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5900 {
5901   /* Don't generate debug information for the copy if we wouldn't have
5902      generated it for the copy either.  */
5903   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5904   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5905 
5906   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5907      declaration inspired this copy.  */
5908   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5909 
5910   /* The new variable/label has no RTL, yet.  */
5911   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5912       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5913     SET_DECL_RTL (copy, 0);
5914   /* For vector typed decls make sure to update DECL_MODE according
5915      to the new function context.  */
5916   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5917     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5918 
5919   /* These args would always appear unused, if not for this.  */
5920   TREE_USED (copy) = 1;
5921 
5922   /* Set the context for the new declaration.  */
5923   if (!DECL_CONTEXT (decl))
5924     /* Globals stay global.  */
5925     ;
5926   else if (DECL_CONTEXT (decl) != id->src_fn)
5927     /* Things that weren't in the scope of the function we're inlining
5928        from aren't in the scope we're inlining to, either.  */
5929     ;
5930   else if (TREE_STATIC (decl))
5931     /* Function-scoped static variables should stay in the original
5932        function.  */
5933     ;
5934   else
5935     {
5936       /* Ordinary automatic local variables are now in the scope of the
5937 	 new function.  */
5938       DECL_CONTEXT (copy) = id->dst_fn;
5939       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5940 	{
5941 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5942 	    DECL_ATTRIBUTES (copy)
5943 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5944 			   DECL_ATTRIBUTES (copy));
5945 	  id->dst_simt_vars->safe_push (copy);
5946 	}
5947     }
5948 
5949   return copy;
5950 }
5951 
5952 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5953    DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL.  The original
5954    DECL must come from ID->src_fn and the copy will be part of ID->dst_fn.  */
5955 
5956 tree
5957 copy_decl_to_var (tree decl, copy_body_data *id)
5958 {
5959   tree copy, type;
5960 
5961   gcc_assert (TREE_CODE (decl) == PARM_DECL
5962 	      || TREE_CODE (decl) == RESULT_DECL);
5963 
5964   type = TREE_TYPE (decl);
5965 
5966   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5967 		     VAR_DECL, DECL_NAME (decl), type);
5968   if (DECL_PT_UID_SET_P (decl))
5969     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5970   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5971   TREE_READONLY (copy) = TREE_READONLY (decl);
5972   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5973   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5974   DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5975 
5976   return copy_decl_for_dup_finish (id, decl, copy);
5977 }
5978 
5979 /* Like copy_decl_to_var, but create a return slot object instead of a
5980    pointer variable for return by invisible reference.  */
5981 
5982 static tree
5983 copy_result_decl_to_var (tree decl, copy_body_data *id)
5984 {
5985   tree copy, type;
5986 
5987   gcc_assert (TREE_CODE (decl) == PARM_DECL
5988 	      || TREE_CODE (decl) == RESULT_DECL);
5989 
5990   type = TREE_TYPE (decl);
5991   if (DECL_BY_REFERENCE (decl))
5992     type = TREE_TYPE (type);
5993 
5994   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5995 		     VAR_DECL, DECL_NAME (decl), type);
5996   if (DECL_PT_UID_SET_P (decl))
5997     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5998   TREE_READONLY (copy) = TREE_READONLY (decl);
5999   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6000   if (!DECL_BY_REFERENCE (decl))
6001     {
6002       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6003       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
6004     }
6005 
6006   return copy_decl_for_dup_finish (id, decl, copy);
6007 }
6008 
6009 tree
6010 copy_decl_no_change (tree decl, copy_body_data *id)
6011 {
6012   tree copy;
6013 
6014   copy = copy_node (decl);
6015 
6016   /* The COPY is not abstract; it will be generated in DST_FN.  */
6017   DECL_ABSTRACT_P (copy) = false;
6018   lang_hooks.dup_lang_specific_decl (copy);
6019 
6020   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6021      been taken; it's for internal bookkeeping in expand_goto_internal.  */
6022   if (TREE_CODE (copy) == LABEL_DECL)
6023     {
6024       TREE_ADDRESSABLE (copy) = 0;
6025       LABEL_DECL_UID (copy) = -1;
6026     }
6027 
6028   return copy_decl_for_dup_finish (id, decl, copy);
6029 }
6030 
6031 static tree
6032 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6033 {
6034   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6035     return copy_decl_to_var (decl, id);
6036   else
6037     return copy_decl_no_change (decl, id);
6038 }
6039 
6040 /* Return a copy of the function's argument tree without any modifications.  */
6041 
6042 static tree
6043 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6044 {
6045   tree arg, *parg;
6046   tree new_parm = NULL;
6047 
6048   parg = &new_parm;
6049   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6050     {
6051       tree new_tree = remap_decl (arg, id);
6052       if (TREE_CODE (new_tree) != PARM_DECL)
6053 	new_tree = id->copy_decl (arg, id);
6054       lang_hooks.dup_lang_specific_decl (new_tree);
6055       *parg = new_tree;
6056       parg = &DECL_CHAIN (new_tree);
6057     }
6058   return new_parm;
6059 }
6060 
6061 /* Return a copy of the function's static chain.  */
6062 static tree
6063 copy_static_chain (tree static_chain, copy_body_data * id)
6064 {
6065   tree *chain_copy, *pvar;
6066 
6067   chain_copy = &static_chain;
6068   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6069     {
6070       tree new_tree = remap_decl (*pvar, id);
6071       lang_hooks.dup_lang_specific_decl (new_tree);
6072       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6073       *pvar = new_tree;
6074     }
6075   return static_chain;
6076 }
6077 
6078 /* Return true if the function is allowed to be versioned.
6079    This is a guard for the versioning functionality.  */
6080 
6081 bool
6082 tree_versionable_function_p (tree fndecl)
6083 {
6084   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6085 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6086 }
6087 
6088 /* Update clone info after duplication.  */
6089 
6090 static void
6091 update_clone_info (copy_body_data * id)
6092 {
6093   vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6094     = id->dst_node->clone.performed_splits;
6095   if (cur_performed_splits)
6096     {
6097       unsigned len = cur_performed_splits->length ();
6098       for (unsigned i = 0; i < len; i++)
6099 	{
6100 	  ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6101 	  ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6102 	}
6103     }
6104 
6105   struct cgraph_node *node;
6106   if (!id->dst_node->clones)
6107     return;
6108   for (node = id->dst_node->clones; node != id->dst_node;)
6109     {
6110       /* First update replace maps to match the new body.  */
6111       if (node->clone.tree_map)
6112         {
6113 	  unsigned int i;
6114           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6115 	    {
6116 	      struct ipa_replace_map *replace_info;
6117 	      replace_info = (*node->clone.tree_map)[i];
6118 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6119 	    }
6120 	}
6121       if (node->clone.performed_splits)
6122 	{
6123 	  unsigned len = vec_safe_length (node->clone.performed_splits);
6124 	  for (unsigned i = 0; i < len; i++)
6125 	    {
6126 	      ipa_param_performed_split *ps
6127 		= &(*node->clone.performed_splits)[i];
6128 	      ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6129 	    }
6130 	}
6131       if (unsigned len = vec_safe_length (cur_performed_splits))
6132 	{
6133 	  /* We do not want to add current performed splits when we are saving
6134 	     a copy of function body for later during inlining, that would just
6135 	     duplicate all entries.  So let's have a look whether anything
6136 	     referring to the first dummy_decl is present.  */
6137 	  unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6138 	  ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6139 	  for (unsigned i = 0; i < dst_len; i++)
6140 	    if ((*node->clone.performed_splits)[i].dummy_decl
6141 		== first->dummy_decl)
6142 	      {
6143 		len = 0;
6144 		break;
6145 	      }
6146 
6147 	  for (unsigned i = 0; i < len; i++)
6148 	    vec_safe_push (node->clone.performed_splits,
6149 			   (*cur_performed_splits)[i]);
6150 	  if (flag_checking)
6151 	    {
6152 	      for (unsigned i = 0; i < dst_len; i++)
6153 		{
6154 		  ipa_param_performed_split *ps1
6155 		    = &(*node->clone.performed_splits)[i];
6156 		  for (unsigned j = i + 1; j < dst_len; j++)
6157 		    {
6158 		      ipa_param_performed_split *ps2
6159 			= &(*node->clone.performed_splits)[j];
6160 		      gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6161 				  || ps1->unit_offset != ps2->unit_offset);
6162 		    }
6163 		}
6164 	    }
6165 	}
6166 
6167       if (node->clones)
6168 	node = node->clones;
6169       else if (node->next_sibling_clone)
6170 	node = node->next_sibling_clone;
6171       else
6172 	{
6173 	  while (node != id->dst_node && !node->next_sibling_clone)
6174 	    node = node->clone_of;
6175 	  if (node != id->dst_node)
6176 	    node = node->next_sibling_clone;
6177 	}
6178     }
6179 }
6180 
6181 /* Create a copy of a function's tree.
6182    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6183    of the original function and the new copied function
6184    respectively.  In case we want to replace a DECL
6185    tree with another tree while duplicating the function's
6186    body, TREE_MAP represents the mapping between these
6187    trees. If UPDATE_CLONES is set, the call_stmt fields
6188    of edges of clones of the function will be updated.
6189 
6190    If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6191    function parameters and return value) should be modified).
6192    If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6193    If non_NULL NEW_ENTRY determine new entry BB of the clone.
6194 */
6195 void
6196 tree_function_versioning (tree old_decl, tree new_decl,
6197 			  vec<ipa_replace_map *, va_gc> *tree_map,
6198 			  ipa_param_adjustments *param_adjustments,
6199 			  bool update_clones, bitmap blocks_to_copy,
6200 			  basic_block new_entry)
6201 {
6202   struct cgraph_node *old_version_node;
6203   struct cgraph_node *new_version_node;
6204   copy_body_data id;
6205   tree p;
6206   unsigned i;
6207   struct ipa_replace_map *replace_info;
6208   basic_block old_entry_block, bb;
6209   auto_vec<gimple *, 10> init_stmts;
6210   tree vars = NULL_TREE;
6211 
6212   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6213 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
6214   DECL_POSSIBLY_INLINED (old_decl) = 1;
6215 
6216   old_version_node = cgraph_node::get (old_decl);
6217   gcc_checking_assert (old_version_node);
6218   new_version_node = cgraph_node::get (new_decl);
6219   gcc_checking_assert (new_version_node);
6220 
6221   /* Copy over debug args.  */
6222   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6223     {
6224       vec<tree, va_gc> **new_debug_args, **old_debug_args;
6225       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6226       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6227       old_debug_args = decl_debug_args_lookup (old_decl);
6228       if (old_debug_args)
6229 	{
6230 	  new_debug_args = decl_debug_args_insert (new_decl);
6231 	  *new_debug_args = vec_safe_copy (*old_debug_args);
6232 	}
6233     }
6234 
6235   /* Output the inlining info for this abstract function, since it has been
6236      inlined.  If we don't do this now, we can lose the information about the
6237      variables in the function when the blocks get blown away as soon as we
6238      remove the cgraph node.  */
6239   (*debug_hooks->outlining_inline_function) (old_decl);
6240 
6241   DECL_ARTIFICIAL (new_decl) = 1;
6242   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6243   if (DECL_ORIGIN (old_decl) == old_decl)
6244     old_version_node->used_as_abstract_origin = true;
6245   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6246 
6247   /* Prepare the data structures for the tree copy.  */
6248   memset (&id, 0, sizeof (id));
6249 
6250   /* Generate a new name for the new version. */
6251   id.statements_to_fold = new hash_set<gimple *>;
6252 
6253   id.decl_map = new hash_map<tree, tree>;
6254   id.debug_map = NULL;
6255   id.src_fn = old_decl;
6256   id.dst_fn = new_decl;
6257   id.src_node = old_version_node;
6258   id.dst_node = new_version_node;
6259   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6260   id.blocks_to_copy = blocks_to_copy;
6261 
6262   id.copy_decl = copy_decl_no_change;
6263   id.transform_call_graph_edges
6264     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6265   id.transform_new_cfg = true;
6266   id.transform_return_to_modify = false;
6267   id.transform_parameter = false;
6268   id.transform_lang_insert_block = NULL;
6269 
6270   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6271     (DECL_STRUCT_FUNCTION (old_decl));
6272   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6273   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6274   initialize_cfun (new_decl, old_decl,
6275 		   new_entry ? new_entry->count : old_entry_block->count);
6276   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6277     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6278       = id.src_cfun->gimple_df->ipa_pta;
6279 
6280   /* Copy the function's static chain.  */
6281   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6282   if (p)
6283     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6284       = copy_static_chain (p, &id);
6285 
6286   auto_vec<int, 16> new_param_indices;
6287   ipa_param_adjustments *old_param_adjustments
6288     = old_version_node->clone.param_adjustments;
6289   if (old_param_adjustments)
6290     old_param_adjustments->get_updated_indices (&new_param_indices);
6291 
6292   /* If there's a tree_map, prepare for substitution.  */
6293   if (tree_map)
6294     for (i = 0; i < tree_map->length (); i++)
6295       {
6296 	gimple *init;
6297 	replace_info = (*tree_map)[i];
6298 
6299 	int p = replace_info->parm_num;
6300 	if (old_param_adjustments)
6301 	  p = new_param_indices[p];
6302 
6303 	tree parm;
6304 	tree req_type, new_type;
6305 
6306 	for (parm = DECL_ARGUMENTS (old_decl); p;
6307 	     parm = DECL_CHAIN (parm))
6308 	  p--;
6309 	tree old_tree = parm;
6310 	req_type = TREE_TYPE (parm);
6311 	new_type = TREE_TYPE (replace_info->new_tree);
6312 	if (!useless_type_conversion_p (req_type, new_type))
6313 	  {
6314 	    if (fold_convertible_p (req_type, replace_info->new_tree))
6315 	      replace_info->new_tree
6316 		= fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6317 	    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6318 	      replace_info->new_tree
6319 		= fold_build1 (VIEW_CONVERT_EXPR, req_type,
6320 			       replace_info->new_tree);
6321 	    else
6322 	      {
6323 		if (dump_file)
6324 		  {
6325 		    fprintf (dump_file, "    const ");
6326 		    print_generic_expr (dump_file,
6327 					replace_info->new_tree);
6328 		    fprintf (dump_file,
6329 			     "  can't be converted to param ");
6330 		    print_generic_expr (dump_file, parm);
6331 		    fprintf (dump_file, "\n");
6332 		  }
6333 		old_tree = NULL;
6334 	      }
6335 	  }
6336 
6337 	if (old_tree)
6338 	  {
6339 	    init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6340 					id.src_fn, NULL, &vars);
6341 	    if (init)
6342 	      init_stmts.safe_push (init);
6343 	  }
6344       }
6345 
6346   ipa_param_body_adjustments *param_body_adjs = NULL;
6347   if (param_adjustments)
6348     {
6349       param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6350 							new_decl, old_decl,
6351 							&id, &vars, tree_map);
6352       id.param_body_adjs = param_body_adjs;
6353       DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6354     }
6355   else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6356     DECL_ARGUMENTS (new_decl)
6357       = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6358 
6359   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6360   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6361 
6362   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6363 
6364   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6365     /* Add local vars.  */
6366     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6367 
6368   if (DECL_RESULT (old_decl) == NULL_TREE)
6369     ;
6370   else if (param_adjustments && param_adjustments->m_skip_return
6371 	   && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6372     {
6373       tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6374 						   &id);
6375       declare_inline_vars (NULL, resdecl_repl);
6376       insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6377 
6378       DECL_RESULT (new_decl)
6379 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6380 		      RESULT_DECL, NULL_TREE, void_type_node);
6381       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6382       DECL_IS_MALLOC (new_decl) = false;
6383       cfun->returns_struct = 0;
6384       cfun->returns_pcc_struct = 0;
6385     }
6386   else
6387     {
6388       tree old_name;
6389       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6390       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6391       if (gimple_in_ssa_p (id.src_cfun)
6392 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6393 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6394 	{
6395 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6396 	  insert_decl_map (&id, old_name, new_name);
6397 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6398 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6399 	}
6400     }
6401 
6402   /* Set up the destination functions loop tree.  */
6403   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6404     {
6405       cfun->curr_properties &= ~PROP_loops;
6406       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6407       cfun->curr_properties |= PROP_loops;
6408     }
6409 
6410   /* Copy the Function's body.  */
6411   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6412 	     new_entry);
6413 
6414   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6415   number_blocks (new_decl);
6416 
6417   /* We want to create the BB unconditionally, so that the addition of
6418      debug stmts doesn't affect BB count, which may in the end cause
6419      codegen differences.  */
6420   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6421   while (init_stmts.length ())
6422     insert_init_stmt (&id, bb, init_stmts.pop ());
6423   update_clone_info (&id);
6424 
6425   /* Remap the nonlocal_goto_save_area, if any.  */
6426   if (cfun->nonlocal_goto_save_area)
6427     {
6428       struct walk_stmt_info wi;
6429 
6430       memset (&wi, 0, sizeof (wi));
6431       wi.info = &id;
6432       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6433     }
6434 
6435   /* Clean up.  */
6436   delete id.decl_map;
6437   if (id.debug_map)
6438     delete id.debug_map;
6439   free_dominance_info (CDI_DOMINATORS);
6440   free_dominance_info (CDI_POST_DOMINATORS);
6441 
6442   update_max_bb_count ();
6443   fold_marked_statements (0, id.statements_to_fold);
6444   delete id.statements_to_fold;
6445   delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6446   if (id.dst_node->definition)
6447     cgraph_edge::rebuild_references ();
6448   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6449     {
6450       calculate_dominance_info (CDI_DOMINATORS);
6451       fix_loop_structure (NULL);
6452     }
6453   update_ssa (TODO_update_ssa);
6454 
6455   /* After partial cloning we need to rescale frequencies, so they are
6456      within proper range in the cloned function.  */
6457   if (new_entry)
6458     {
6459       struct cgraph_edge *e;
6460       rebuild_frequencies ();
6461 
6462       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6463       for (e = new_version_node->callees; e; e = e->next_callee)
6464 	{
6465 	  basic_block bb = gimple_bb (e->call_stmt);
6466 	  e->count = bb->count;
6467 	}
6468       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6469 	{
6470 	  basic_block bb = gimple_bb (e->call_stmt);
6471 	  e->count = bb->count;
6472 	}
6473     }
6474 
6475   if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6476     {
6477       vec<tree, va_gc> **debug_args = NULL;
6478       unsigned int len = 0;
6479       unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6480 
6481       for (i = 0; i < reset_len; i++)
6482 	{
6483 	  tree parm = param_body_adjs->m_reset_debug_decls[i];
6484 	  gcc_assert (is_gimple_reg (parm));
6485 	  tree ddecl;
6486 
6487 	  if (debug_args == NULL)
6488 	    {
6489 	      debug_args = decl_debug_args_insert (new_decl);
6490 	      len = vec_safe_length (*debug_args);
6491 	    }
6492 	  ddecl = make_node (DEBUG_EXPR_DECL);
6493 	  DECL_ARTIFICIAL (ddecl) = 1;
6494 	  TREE_TYPE (ddecl) = TREE_TYPE (parm);
6495 	  SET_DECL_MODE (ddecl, DECL_MODE (parm));
6496 	  vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6497 	  vec_safe_push (*debug_args, ddecl);
6498 	}
6499       if (debug_args != NULL)
6500 	{
6501 	  /* On the callee side, add
6502 	     DEBUG D#Y s=> parm
6503 	     DEBUG var => D#Y
6504 	     stmts to the first bb where var is a VAR_DECL created for the
6505 	     optimized away parameter in DECL_INITIAL block.  This hints
6506 	     in the debug info that var (whole DECL_ORIGIN is the parm
6507 	     PARM_DECL) is optimized away, but could be looked up at the
6508 	     call site as value of D#X there.  */
6509 	  tree vexpr;
6510 	  gimple_stmt_iterator cgsi
6511 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6512 	  gimple *def_temp;
6513 	  tree var = vars;
6514 	  i = vec_safe_length (*debug_args);
6515 	  do
6516 	    {
6517 	      i -= 2;
6518 	      while (var != NULL_TREE
6519 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6520 		var = TREE_CHAIN (var);
6521 	      if (var == NULL_TREE)
6522 		break;
6523 	      vexpr = make_node (DEBUG_EXPR_DECL);
6524 	      tree parm = (**debug_args)[i];
6525 	      DECL_ARTIFICIAL (vexpr) = 1;
6526 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6527 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6528 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6529 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6530 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6531 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6532 	    }
6533 	  while (i > len);
6534 	}
6535     }
6536   delete param_body_adjs;
6537   free_dominance_info (CDI_DOMINATORS);
6538   free_dominance_info (CDI_POST_DOMINATORS);
6539 
6540   gcc_assert (!id.debug_stmts.exists ());
6541   pop_cfun ();
6542   return;
6543 }
6544 
6545 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6546    the callee and return the inlined body on success.  */
6547 
6548 tree
6549 maybe_inline_call_in_expr (tree exp)
6550 {
6551   tree fn = get_callee_fndecl (exp);
6552 
6553   /* We can only try to inline "const" functions.  */
6554   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6555     {
6556       call_expr_arg_iterator iter;
6557       copy_body_data id;
6558       tree param, arg, t;
6559       hash_map<tree, tree> decl_map;
6560 
6561       /* Remap the parameters.  */
6562       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6563 	   param;
6564 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6565 	decl_map.put (param, arg);
6566 
6567       memset (&id, 0, sizeof (id));
6568       id.src_fn = fn;
6569       id.dst_fn = current_function_decl;
6570       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6571       id.decl_map = &decl_map;
6572 
6573       id.copy_decl = copy_decl_no_change;
6574       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6575       id.transform_new_cfg = false;
6576       id.transform_return_to_modify = true;
6577       id.transform_parameter = true;
6578       id.transform_lang_insert_block = NULL;
6579 
6580       /* Make sure not to unshare trees behind the front-end's back
6581 	 since front-end specific mechanisms may rely on sharing.  */
6582       id.regimplify = false;
6583       id.do_not_unshare = true;
6584 
6585       /* We're not inside any EH region.  */
6586       id.eh_lp_nr = 0;
6587 
6588       t = copy_tree_body (&id);
6589 
6590       /* We can only return something suitable for use in a GENERIC
6591 	 expression tree.  */
6592       if (TREE_CODE (t) == MODIFY_EXPR)
6593 	return TREE_OPERAND (t, 1);
6594     }
6595 
6596    return NULL_TREE;
6597 }
6598 
6599 /* Duplicate a type, fields and all.  */
6600 
6601 tree
6602 build_duplicate_type (tree type)
6603 {
6604   struct copy_body_data id;
6605 
6606   memset (&id, 0, sizeof (id));
6607   id.src_fn = current_function_decl;
6608   id.dst_fn = current_function_decl;
6609   id.src_cfun = cfun;
6610   id.decl_map = new hash_map<tree, tree>;
6611   id.debug_map = NULL;
6612   id.copy_decl = copy_decl_no_change;
6613 
6614   type = remap_type_1 (type, &id);
6615 
6616   delete id.decl_map;
6617   if (id.debug_map)
6618     delete id.debug_map;
6619 
6620   TYPE_CANONICAL (type) = type;
6621 
6622   return type;
6623 }
6624 
6625 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6626    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6627    evaluation.  */
6628 
6629 tree
6630 copy_fn (tree fn, tree& parms, tree& result)
6631 {
6632   copy_body_data id;
6633   tree param;
6634   hash_map<tree, tree> decl_map;
6635 
6636   tree *p = &parms;
6637   *p = NULL_TREE;
6638 
6639   memset (&id, 0, sizeof (id));
6640   id.src_fn = fn;
6641   id.dst_fn = current_function_decl;
6642   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6643   id.decl_map = &decl_map;
6644 
6645   id.copy_decl = copy_decl_no_change;
6646   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6647   id.transform_new_cfg = false;
6648   id.transform_return_to_modify = false;
6649   id.transform_parameter = true;
6650   id.transform_lang_insert_block = NULL;
6651 
6652   /* Make sure not to unshare trees behind the front-end's back
6653      since front-end specific mechanisms may rely on sharing.  */
6654   id.regimplify = false;
6655   id.do_not_unshare = true;
6656   id.do_not_fold = true;
6657 
6658   /* We're not inside any EH region.  */
6659   id.eh_lp_nr = 0;
6660 
6661   /* Remap the parameters and result and return them to the caller.  */
6662   for (param = DECL_ARGUMENTS (fn);
6663        param;
6664        param = DECL_CHAIN (param))
6665     {
6666       *p = remap_decl (param, &id);
6667       p = &DECL_CHAIN (*p);
6668     }
6669 
6670   if (DECL_RESULT (fn))
6671     result = remap_decl (DECL_RESULT (fn), &id);
6672   else
6673     result = NULL_TREE;
6674 
6675   return copy_tree_body (&id);
6676 }
6677