xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Tree inlining.
2    Copyright (C) 2001-2017 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "tree-chkp.h"
60 #include "attribs.h"
61 
62 
63 /* I'm not real happy about this, but we need to handle gimple and
64    non-gimple trees.  */
65 
66 /* Inlining, Cloning, Versioning, Parallelization
67 
68    Inlining: a function body is duplicated, but the PARM_DECLs are
69    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
70    MODIFY_EXPRs that store to a dedicated returned-value variable.
71    The duplicated eh_region info of the copy will later be appended
72    to the info for the caller; the eh_region info in copied throwing
73    statements and RESX statements are adjusted accordingly.
74 
75    Cloning: (only in C++) We have one body for a con/de/structor, and
76    multiple function decls, each with a unique parameter list.
77    Duplicate the body, using the given splay tree; some parameters
78    will become constants (like 0 or 1).
79 
80    Versioning: a function body is duplicated and the result is a new
81    function rather than into blocks of an existing function as with
82    inlining.  Some parameters will become constants.
83 
84    Parallelization: a region of a function is duplicated resulting in
85    a new function.  Variables may be replaced with complex expressions
86    to enable shared variable semantics.
87 
88    All of these will simultaneously lookup any callgraph edges.  If
89    we're going to inline the duplicated function body, and the given
90    function has some cloned callgraph nodes (one for each place this
91    function will be inlined) those callgraph edges will be duplicated.
92    If we're cloning the body, those callgraph edges will be
93    updated to point into the new body.  (Note that the original
94    callgraph node and edge list will not be altered.)
95 
96    See the CALL_EXPR handling case in copy_tree_body_r ().  */
97 
98 /* To Do:
99 
100    o In order to make inlining-on-trees work, we pessimized
101      function-local static constants.  In particular, they are now
102      always output, even when not addressed.  Fix this by treating
103      function-local static constants just like global static
104      constants; the back-end already knows not to output them if they
105      are not needed.
106 
107    o Provide heuristics to clamp inlining of recursive template
108      calls?  */
109 
110 
111 /* Weights that estimate_num_insns uses to estimate the size of the
112    produced code.  */
113 
114 eni_weights eni_size_weights;
115 
116 /* Weights that estimate_num_insns uses to estimate the time necessary
117    to execute the produced code.  */
118 
119 eni_weights eni_time_weights;
120 
121 /* Prototypes.  */
122 
123 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
124 				     basic_block);
125 static void remap_block (tree *, copy_body_data *);
126 static void copy_bind_expr (tree *, int *, copy_body_data *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136 
137 /* Insert a tree->tree mapping for ID.  Despite the name suggests
138    that the trees should be variables, it is used for more than that.  */
139 
140 void
141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143   id->decl_map->put (key, value);
144 
145   /* Always insert an identity map as well.  If we see this same new
146      node again, we won't want to duplicate it a second time.  */
147   if (key != value)
148     id->decl_map->put (value, value);
149 }
150 
151 /* Insert a tree->tree mapping for ID.  This is only used for
152    variables.  */
153 
154 static void
155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157   if (!gimple_in_ssa_p (id->src_cfun))
158     return;
159 
160   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161     return;
162 
163   if (!target_for_debug_bind (key))
164     return;
165 
166   gcc_assert (TREE_CODE (key) == PARM_DECL);
167   gcc_assert (VAR_P (value));
168 
169   if (!id->debug_map)
170     id->debug_map = new hash_map<tree, tree>;
171 
172   id->debug_map->put (key, value);
173 }
174 
175 /* If nonzero, we're remapping the contents of inlined debug
176    statements.  If negative, an error has occurred, such as a
177    reference to a variable that isn't available in the inlined
178    context.  */
179 static int processing_debug_stmt = 0;
180 
181 /* Construct new SSA name for old NAME. ID is the inline context.  */
182 
183 static tree
184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186   tree new_tree, var;
187   tree *n;
188 
189   gcc_assert (TREE_CODE (name) == SSA_NAME);
190 
191   n = id->decl_map->get (name);
192   if (n)
193     return unshare_expr (*n);
194 
195   if (processing_debug_stmt)
196     {
197       if (SSA_NAME_IS_DEFAULT_DEF (name)
198 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
199 	  && id->entry_bb == NULL
200 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
201 	{
202 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
203 	  gimple *def_temp;
204 	  gimple_stmt_iterator gsi;
205 	  tree val = SSA_NAME_VAR (name);
206 
207 	  n = id->decl_map->get (val);
208 	  if (n != NULL)
209 	    val = *n;
210 	  if (TREE_CODE (val) != PARM_DECL)
211 	    {
212 	      processing_debug_stmt = -1;
213 	      return name;
214 	    }
215 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
216 	  DECL_ARTIFICIAL (vexpr) = 1;
217 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
218 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
219 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
220 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
221 	  return vexpr;
222 	}
223 
224       processing_debug_stmt = -1;
225       return name;
226     }
227 
228   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
229   var = SSA_NAME_VAR (name);
230   if (!var
231       || (!SSA_NAME_IS_DEFAULT_DEF (name)
232 	  && VAR_P (var)
233 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
234 	  && DECL_ARTIFICIAL (var)
235 	  && DECL_IGNORED_P (var)
236 	  && !DECL_NAME (var)))
237     {
238       struct ptr_info_def *pi;
239       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
240       if (!var && SSA_NAME_IDENTIFIER (name))
241 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
242       insert_decl_map (id, name, new_tree);
243       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
244 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
245       /* At least IPA points-to info can be directly transferred.  */
246       if (id->src_cfun->gimple_df
247 	  && id->src_cfun->gimple_df->ipa_pta
248 	  && POINTER_TYPE_P (TREE_TYPE (name))
249 	  && (pi = SSA_NAME_PTR_INFO (name))
250 	  && !pi->pt.anything)
251 	{
252 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
253 	  new_pi->pt = pi->pt;
254 	}
255       return new_tree;
256     }
257 
258   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
259      in copy_bb.  */
260   new_tree = remap_decl (var, id);
261 
262   /* We might've substituted constant or another SSA_NAME for
263      the variable.
264 
265      Replace the SSA name representing RESULT_DECL by variable during
266      inlining:  this saves us from need to introduce PHI node in a case
267      return value is just partly initialized.  */
268   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
269       && (!SSA_NAME_VAR (name)
270 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
271 	  || !id->transform_return_to_modify))
272     {
273       struct ptr_info_def *pi;
274       new_tree = make_ssa_name (new_tree);
275       insert_decl_map (id, name, new_tree);
276       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
277 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
278       /* At least IPA points-to info can be directly transferred.  */
279       if (id->src_cfun->gimple_df
280 	  && id->src_cfun->gimple_df->ipa_pta
281 	  && POINTER_TYPE_P (TREE_TYPE (name))
282 	  && (pi = SSA_NAME_PTR_INFO (name))
283 	  && !pi->pt.anything)
284 	{
285 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
286 	  new_pi->pt = pi->pt;
287 	}
288       if (SSA_NAME_IS_DEFAULT_DEF (name))
289 	{
290 	  /* By inlining function having uninitialized variable, we might
291 	     extend the lifetime (variable might get reused).  This cause
292 	     ICE in the case we end up extending lifetime of SSA name across
293 	     abnormal edge, but also increase register pressure.
294 
295 	     We simply initialize all uninitialized vars by 0 except
296 	     for case we are inlining to very first BB.  We can avoid
297 	     this for all BBs that are not inside strongly connected
298 	     regions of the CFG, but this is expensive to test.  */
299 	  if (id->entry_bb
300 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
301 	      && (!SSA_NAME_VAR (name)
302 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
303 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
304 					     0)->dest
305 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
306 	    {
307 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
308 	      gimple *init_stmt;
309 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
310 
311 	      init_stmt = gimple_build_assign (new_tree, zero);
312 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
313 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
314 	    }
315 	  else
316 	    {
317 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
318 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
319 	    }
320 	}
321     }
322   else
323     insert_decl_map (id, name, new_tree);
324   return new_tree;
325 }
326 
327 /* Remap DECL during the copying of the BLOCK tree for the function.  */
328 
329 tree
330 remap_decl (tree decl, copy_body_data *id)
331 {
332   tree *n;
333 
334   /* We only remap local variables in the current function.  */
335 
336   /* See if we have remapped this declaration.  */
337 
338   n = id->decl_map->get (decl);
339 
340   if (!n && processing_debug_stmt)
341     {
342       processing_debug_stmt = -1;
343       return decl;
344     }
345 
346   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
347      necessary DECLs have already been remapped and we do not want to duplicate
348      a decl coming from outside of the sequence we are copying.  */
349   if (!n
350       && id->prevent_decl_creation_for_types
351       && id->remapping_type_depth > 0
352       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
353     return decl;
354 
355   /* If we didn't already have an equivalent for this declaration, create one
356      now.  */
357   if (!n)
358     {
359       /* Make a copy of the variable or label.  */
360       tree t = id->copy_decl (decl, id);
361 
362       /* Remember it, so that if we encounter this local entity again
363 	 we can reuse this copy.  Do this early because remap_type may
364 	 need this decl for TYPE_STUB_DECL.  */
365       insert_decl_map (id, decl, t);
366 
367       if (!DECL_P (t))
368 	return t;
369 
370       /* Remap types, if necessary.  */
371       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
372       if (TREE_CODE (t) == TYPE_DECL)
373 	{
374 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
375 
376 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
377 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
378 	     is not set on the TYPE_DECL, for example in LTO mode.  */
379 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
380 	    {
381 	      tree x = build_variant_type_copy (TREE_TYPE (t));
382 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
383 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
384 	      DECL_ORIGINAL_TYPE (t) = x;
385 	    }
386 	}
387 
388       /* Remap sizes as necessary.  */
389       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
390       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
391 
392       /* If fields, do likewise for offset and qualifier.  */
393       if (TREE_CODE (t) == FIELD_DECL)
394 	{
395 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
396 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
397 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
398 	}
399 
400       return t;
401     }
402 
403   if (id->do_not_unshare)
404     return *n;
405   else
406     return unshare_expr (*n);
407 }
408 
409 static tree
410 remap_type_1 (tree type, copy_body_data *id)
411 {
412   tree new_tree, t;
413 
414   /* We do need a copy.  build and register it now.  If this is a pointer or
415      reference type, remap the designated type and make a new pointer or
416      reference type.  */
417   if (TREE_CODE (type) == POINTER_TYPE)
418     {
419       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
420 					 TYPE_MODE (type),
421 					 TYPE_REF_CAN_ALIAS_ALL (type));
422       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
423 	new_tree = build_type_attribute_qual_variant (new_tree,
424 						      TYPE_ATTRIBUTES (type),
425 						      TYPE_QUALS (type));
426       insert_decl_map (id, type, new_tree);
427       return new_tree;
428     }
429   else if (TREE_CODE (type) == REFERENCE_TYPE)
430     {
431       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
432 					    TYPE_MODE (type),
433 					    TYPE_REF_CAN_ALIAS_ALL (type));
434       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
435 	new_tree = build_type_attribute_qual_variant (new_tree,
436 						      TYPE_ATTRIBUTES (type),
437 						      TYPE_QUALS (type));
438       insert_decl_map (id, type, new_tree);
439       return new_tree;
440     }
441   else
442     new_tree = copy_node (type);
443 
444   insert_decl_map (id, type, new_tree);
445 
446   /* This is a new type, not a copy of an old type.  Need to reassociate
447      variants.  We can handle everything except the main variant lazily.  */
448   t = TYPE_MAIN_VARIANT (type);
449   if (type != t)
450     {
451       t = remap_type (t, id);
452       TYPE_MAIN_VARIANT (new_tree) = t;
453       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
454       TYPE_NEXT_VARIANT (t) = new_tree;
455     }
456   else
457     {
458       TYPE_MAIN_VARIANT (new_tree) = new_tree;
459       TYPE_NEXT_VARIANT (new_tree) = NULL;
460     }
461 
462   if (TYPE_STUB_DECL (type))
463     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
464 
465   /* Lazily create pointer and reference types.  */
466   TYPE_POINTER_TO (new_tree) = NULL;
467   TYPE_REFERENCE_TO (new_tree) = NULL;
468 
469   /* Copy all types that may contain references to local variables; be sure to
470      preserve sharing in between type and its main variant when possible.  */
471   switch (TREE_CODE (new_tree))
472     {
473     case INTEGER_TYPE:
474     case REAL_TYPE:
475     case FIXED_POINT_TYPE:
476     case ENUMERAL_TYPE:
477     case BOOLEAN_TYPE:
478       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
479 	{
480 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
481 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
482 
483 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
484 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
485 	}
486       else
487 	{
488 	  t = TYPE_MIN_VALUE (new_tree);
489 	  if (t && TREE_CODE (t) != INTEGER_CST)
490 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
491 
492 	  t = TYPE_MAX_VALUE (new_tree);
493 	  if (t && TREE_CODE (t) != INTEGER_CST)
494 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
495 	}
496       return new_tree;
497 
498     case FUNCTION_TYPE:
499       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
500 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
501 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
502       else
503         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
504       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
505 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
506 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
507       else
508         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
509       return new_tree;
510 
511     case ARRAY_TYPE:
512       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
513 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
514 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
515       else
516 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
517 
518       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
519 	{
520 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
521 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
522 	}
523       else
524 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
525       break;
526 
527     case RECORD_TYPE:
528     case UNION_TYPE:
529     case QUAL_UNION_TYPE:
530       if (TYPE_MAIN_VARIANT (type) != type
531 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
532 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
533       else
534 	{
535 	  tree f, nf = NULL;
536 
537 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
538 	    {
539 	      t = remap_decl (f, id);
540 	      DECL_CONTEXT (t) = new_tree;
541 	      DECL_CHAIN (t) = nf;
542 	      nf = t;
543 	    }
544 	  TYPE_FIELDS (new_tree) = nreverse (nf);
545 	}
546       break;
547 
548     case OFFSET_TYPE:
549     default:
550       /* Shouldn't have been thought variable sized.  */
551       gcc_unreachable ();
552     }
553 
554   /* All variants of type share the same size, so use the already remaped data.  */
555   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
556     {
557       gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
558       gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
559 
560       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
561       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
562     }
563   else
564     {
565       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
566       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
567     }
568 
569   return new_tree;
570 }
571 
572 /* Helper function for remap_type_2, called through walk_tree.  */
573 
574 static tree
575 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
576 {
577   copy_body_data *id = (copy_body_data *) data;
578 
579   if (TYPE_P (*tp))
580     *walk_subtrees = 0;
581 
582   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
583     return *tp;
584 
585   return NULL_TREE;
586 }
587 
588 /* Return true if TYPE needs to be remapped because remap_decl on any
589    needed embedded decl returns something other than that decl.  */
590 
591 static bool
592 remap_type_2 (tree type, copy_body_data *id)
593 {
594   tree t;
595 
596 #define RETURN_TRUE_IF_VAR(T) \
597   do								\
598     {								\
599       tree _t = (T);						\
600       if (_t)							\
601 	{							\
602 	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
603 	    return true;					\
604 	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
605 	      && walk_tree (&_t, remap_type_3, id, NULL))	\
606 	    return true;					\
607 	}							\
608     }								\
609   while (0)
610 
611   switch (TREE_CODE (type))
612     {
613     case POINTER_TYPE:
614     case REFERENCE_TYPE:
615     case FUNCTION_TYPE:
616     case METHOD_TYPE:
617       return remap_type_2 (TREE_TYPE (type), id);
618 
619     case INTEGER_TYPE:
620     case REAL_TYPE:
621     case FIXED_POINT_TYPE:
622     case ENUMERAL_TYPE:
623     case BOOLEAN_TYPE:
624       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
625       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
626       return false;
627 
628     case ARRAY_TYPE:
629       if (remap_type_2 (TREE_TYPE (type), id)
630 	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
631 	return true;
632       break;
633 
634     case RECORD_TYPE:
635     case UNION_TYPE:
636     case QUAL_UNION_TYPE:
637       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
638 	if (TREE_CODE (t) == FIELD_DECL)
639 	  {
640 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
641 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
642 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
643 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
644 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
645 	  }
646       break;
647 
648     default:
649       return false;
650     }
651 
652   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
653   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
654   return false;
655 #undef RETURN_TRUE_IF_VAR
656 }
657 
658 tree
659 remap_type (tree type, copy_body_data *id)
660 {
661   tree *node;
662   tree tmp;
663 
664   if (type == NULL)
665     return type;
666 
667   /* See if we have remapped this type.  */
668   node = id->decl_map->get (type);
669   if (node)
670     return *node;
671 
672   /* The type only needs remapping if it's variably modified.  */
673   if (! variably_modified_type_p (type, id->src_fn)
674       /* Don't remap if copy_decl method doesn't always return a new
675 	 decl and for all embedded decls returns the passed in decl.  */
676       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
677     {
678       insert_decl_map (id, type, type);
679       return type;
680     }
681 
682   id->remapping_type_depth++;
683   tmp = remap_type_1 (type, id);
684   id->remapping_type_depth--;
685 
686   return tmp;
687 }
688 
689 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
690 
691 static bool
692 can_be_nonlocal (tree decl, copy_body_data *id)
693 {
694   /* We can not duplicate function decls.  */
695   if (TREE_CODE (decl) == FUNCTION_DECL)
696     return true;
697 
698   /* Local static vars must be non-local or we get multiple declaration
699      problems.  */
700   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
701     return true;
702 
703   return false;
704 }
705 
706 static tree
707 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
708 	     copy_body_data *id)
709 {
710   tree old_var;
711   tree new_decls = NULL_TREE;
712 
713   /* Remap its variables.  */
714   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
715     {
716       tree new_var;
717 
718       if (can_be_nonlocal (old_var, id))
719 	{
720 	  /* We need to add this variable to the local decls as otherwise
721 	     nothing else will do so.  */
722 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
723 	    add_local_decl (cfun, old_var);
724 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
725 	      && !DECL_IGNORED_P (old_var)
726 	      && nonlocalized_list)
727 	    vec_safe_push (*nonlocalized_list, old_var);
728 	  continue;
729 	}
730 
731       /* Remap the variable.  */
732       new_var = remap_decl (old_var, id);
733 
734       /* If we didn't remap this variable, we can't mess with its
735 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
736 	 already declared somewhere else, so don't declare it here.  */
737 
738       if (new_var == id->retvar)
739 	;
740       else if (!new_var)
741         {
742 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
743 	      && !DECL_IGNORED_P (old_var)
744 	      && nonlocalized_list)
745 	    vec_safe_push (*nonlocalized_list, old_var);
746 	}
747       else
748 	{
749 	  gcc_assert (DECL_P (new_var));
750 	  DECL_CHAIN (new_var) = new_decls;
751 	  new_decls = new_var;
752 
753 	  /* Also copy value-expressions.  */
754 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
755 	    {
756 	      tree tem = DECL_VALUE_EXPR (new_var);
757 	      bool old_regimplify = id->regimplify;
758 	      id->remapping_type_depth++;
759 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
760 	      id->remapping_type_depth--;
761 	      id->regimplify = old_regimplify;
762 	      SET_DECL_VALUE_EXPR (new_var, tem);
763 	    }
764 	}
765     }
766 
767   return nreverse (new_decls);
768 }
769 
770 /* Copy the BLOCK to contain remapped versions of the variables
771    therein.  And hook the new block into the block-tree.  */
772 
773 static void
774 remap_block (tree *block, copy_body_data *id)
775 {
776   tree old_block;
777   tree new_block;
778 
779   /* Make the new block.  */
780   old_block = *block;
781   new_block = make_node (BLOCK);
782   TREE_USED (new_block) = TREE_USED (old_block);
783   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
784   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
785   BLOCK_NONLOCALIZED_VARS (new_block)
786     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
787   *block = new_block;
788 
789   /* Remap its variables.  */
790   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
791   					&BLOCK_NONLOCALIZED_VARS (new_block),
792 					id);
793 
794   if (id->transform_lang_insert_block)
795     id->transform_lang_insert_block (new_block);
796 
797   /* Remember the remapped block.  */
798   insert_decl_map (id, old_block, new_block);
799 }
800 
801 /* Copy the whole block tree and root it in id->block.  */
802 static tree
803 remap_blocks (tree block, copy_body_data *id)
804 {
805   tree t;
806   tree new_tree = block;
807 
808   if (!block)
809     return NULL;
810 
811   remap_block (&new_tree, id);
812   gcc_assert (new_tree != block);
813   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
814     prepend_lexical_block (new_tree, remap_blocks (t, id));
815   /* Blocks are in arbitrary order, but make things slightly prettier and do
816      not swap order when producing a copy.  */
817   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
818   return new_tree;
819 }
820 
821 /* Remap the block tree rooted at BLOCK to nothing.  */
822 static void
823 remap_blocks_to_null (tree block, copy_body_data *id)
824 {
825   tree t;
826   insert_decl_map (id, block, NULL_TREE);
827   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
828     remap_blocks_to_null (t, id);
829 }
830 
831 static void
832 copy_statement_list (tree *tp)
833 {
834   tree_stmt_iterator oi, ni;
835   tree new_tree;
836 
837   new_tree = alloc_stmt_list ();
838   ni = tsi_start (new_tree);
839   oi = tsi_start (*tp);
840   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
841   *tp = new_tree;
842 
843   for (; !tsi_end_p (oi); tsi_next (&oi))
844     {
845       tree stmt = tsi_stmt (oi);
846       if (TREE_CODE (stmt) == STATEMENT_LIST)
847 	/* This copy is not redundant; tsi_link_after will smash this
848 	   STATEMENT_LIST into the end of the one we're building, and we
849 	   don't want to do that with the original.  */
850 	copy_statement_list (&stmt);
851       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
852     }
853 }
854 
855 static void
856 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
857 {
858   tree block = BIND_EXPR_BLOCK (*tp);
859   /* Copy (and replace) the statement.  */
860   copy_tree_r (tp, walk_subtrees, NULL);
861   if (block)
862     {
863       remap_block (&block, id);
864       BIND_EXPR_BLOCK (*tp) = block;
865     }
866 
867   if (BIND_EXPR_VARS (*tp))
868     /* This will remap a lot of the same decls again, but this should be
869        harmless.  */
870     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
871 }
872 
873 
874 /* Create a new gimple_seq by remapping all the statements in BODY
875    using the inlining information in ID.  */
876 
877 static gimple_seq
878 remap_gimple_seq (gimple_seq body, copy_body_data *id)
879 {
880   gimple_stmt_iterator si;
881   gimple_seq new_body = NULL;
882 
883   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
884     {
885       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
886       gimple_seq_add_seq (&new_body, new_stmts);
887     }
888 
889   return new_body;
890 }
891 
892 
893 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
894    block using the mapping information in ID.  */
895 
896 static gimple *
897 copy_gimple_bind (gbind *stmt, copy_body_data *id)
898 {
899   gimple *new_bind;
900   tree new_block, new_vars;
901   gimple_seq body, new_body;
902 
903   /* Copy the statement.  Note that we purposely don't use copy_stmt
904      here because we need to remap statements as we copy.  */
905   body = gimple_bind_body (stmt);
906   new_body = remap_gimple_seq (body, id);
907 
908   new_block = gimple_bind_block (stmt);
909   if (new_block)
910     remap_block (&new_block, id);
911 
912   /* This will remap a lot of the same decls again, but this should be
913      harmless.  */
914   new_vars = gimple_bind_vars (stmt);
915   if (new_vars)
916     new_vars = remap_decls (new_vars, NULL, id);
917 
918   new_bind = gimple_build_bind (new_vars, new_body, new_block);
919 
920   return new_bind;
921 }
922 
923 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
924 
925 static bool
926 is_parm (tree decl)
927 {
928   if (TREE_CODE (decl) == SSA_NAME)
929     {
930       decl = SSA_NAME_VAR (decl);
931       if (!decl)
932 	return false;
933     }
934 
935   return (TREE_CODE (decl) == PARM_DECL);
936 }
937 
938 /* Remap the dependence CLIQUE from the source to the destination function
939    as specified in ID.  */
940 
941 static unsigned short
942 remap_dependence_clique (copy_body_data *id, unsigned short clique)
943 {
944   if (clique == 0 || processing_debug_stmt)
945     return 0;
946   if (!id->dependence_map)
947     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
948   bool existed;
949   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
950   if (!existed)
951     {
952       /* Clique 1 is reserved for local ones set by PTA.  */
953       if (cfun->last_clique == 0)
954 	cfun->last_clique = 1;
955       newc = ++cfun->last_clique;
956     }
957   return newc;
958 }
959 
960 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
961    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
962    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
963    recursing into the children nodes of *TP.  */
964 
965 static tree
966 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
967 {
968   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
969   copy_body_data *id = (copy_body_data *) wi_p->info;
970   tree fn = id->src_fn;
971 
972   /* For recursive invocations this is no longer the LHS itself.  */
973   bool is_lhs = wi_p->is_lhs;
974   wi_p->is_lhs = false;
975 
976   if (TREE_CODE (*tp) == SSA_NAME)
977     {
978       *tp = remap_ssa_name (*tp, id);
979       *walk_subtrees = 0;
980       if (is_lhs)
981 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
982       return NULL;
983     }
984   else if (auto_var_in_fn_p (*tp, fn))
985     {
986       /* Local variables and labels need to be replaced by equivalent
987 	 variables.  We don't want to copy static variables; there's
988 	 only one of those, no matter how many times we inline the
989 	 containing function.  Similarly for globals from an outer
990 	 function.  */
991       tree new_decl;
992 
993       /* Remap the declaration.  */
994       new_decl = remap_decl (*tp, id);
995       gcc_assert (new_decl);
996       /* Replace this variable with the copy.  */
997       STRIP_TYPE_NOPS (new_decl);
998       /* ???  The C++ frontend uses void * pointer zero to initialize
999          any other type.  This confuses the middle-end type verification.
1000 	 As cloned bodies do not go through gimplification again the fixup
1001 	 there doesn't trigger.  */
1002       if (TREE_CODE (new_decl) == INTEGER_CST
1003 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1004 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1005       *tp = new_decl;
1006       *walk_subtrees = 0;
1007     }
1008   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1009     gcc_unreachable ();
1010   else if (TREE_CODE (*tp) == SAVE_EXPR)
1011     gcc_unreachable ();
1012   else if (TREE_CODE (*tp) == LABEL_DECL
1013 	   && (!DECL_CONTEXT (*tp)
1014 	       || decl_function_context (*tp) == id->src_fn))
1015     /* These may need to be remapped for EH handling.  */
1016     *tp = remap_decl (*tp, id);
1017   else if (TREE_CODE (*tp) == FIELD_DECL)
1018     {
1019       /* If the enclosing record type is variably_modified_type_p, the field
1020 	 has already been remapped.  Otherwise, it need not be.  */
1021       tree *n = id->decl_map->get (*tp);
1022       if (n)
1023 	*tp = *n;
1024       *walk_subtrees = 0;
1025     }
1026   else if (TYPE_P (*tp))
1027     /* Types may need remapping as well.  */
1028     *tp = remap_type (*tp, id);
1029   else if (CONSTANT_CLASS_P (*tp))
1030     {
1031       /* If this is a constant, we have to copy the node iff the type
1032 	 will be remapped.  copy_tree_r will not copy a constant.  */
1033       tree new_type = remap_type (TREE_TYPE (*tp), id);
1034 
1035       if (new_type == TREE_TYPE (*tp))
1036 	*walk_subtrees = 0;
1037 
1038       else if (TREE_CODE (*tp) == INTEGER_CST)
1039 	*tp = wide_int_to_tree (new_type, *tp);
1040       else
1041 	{
1042 	  *tp = copy_node (*tp);
1043 	  TREE_TYPE (*tp) = new_type;
1044 	}
1045     }
1046   else
1047     {
1048       /* Otherwise, just copy the node.  Note that copy_tree_r already
1049 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1050 
1051       if (TREE_CODE (*tp) == MEM_REF)
1052 	{
1053 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1054 	     that can happen when a pointer argument is an ADDR_EXPR.
1055 	     Recurse here manually to allow that.  */
1056 	  tree ptr = TREE_OPERAND (*tp, 0);
1057 	  tree type = remap_type (TREE_TYPE (*tp), id);
1058 	  tree old = *tp;
1059 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1060 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1061 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1062 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1063 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1064 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1065 	    {
1066 	      MR_DEPENDENCE_CLIQUE (*tp)
1067 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1068 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1069 	    }
1070 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1071 	     remapped a parameter as the property might be valid only
1072 	     for the parameter itself.  */
1073 	  if (TREE_THIS_NOTRAP (old)
1074 	      && (!is_parm (TREE_OPERAND (old, 0))
1075 		  || (!id->transform_parameter && is_parm (ptr))))
1076 	    TREE_THIS_NOTRAP (*tp) = 1;
1077 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1078 	  *walk_subtrees = 0;
1079 	  return NULL;
1080 	}
1081 
1082       /* Here is the "usual case".  Copy this tree node, and then
1083 	 tweak some special cases.  */
1084       copy_tree_r (tp, walk_subtrees, NULL);
1085 
1086       if (TREE_CODE (*tp) != OMP_CLAUSE)
1087 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1088 
1089       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1090 	{
1091 	  /* The copied TARGET_EXPR has never been expanded, even if the
1092 	     original node was expanded already.  */
1093 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1094 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1095 	}
1096       else if (TREE_CODE (*tp) == ADDR_EXPR)
1097 	{
1098 	  /* Variable substitution need not be simple.  In particular,
1099 	     the MEM_REF substitution above.  Make sure that
1100 	     TREE_CONSTANT and friends are up-to-date.  */
1101 	  int invariant = is_gimple_min_invariant (*tp);
1102 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1103 	  recompute_tree_invariant_for_addr_expr (*tp);
1104 
1105 	  /* If this used to be invariant, but is not any longer,
1106 	     then regimplification is probably needed.  */
1107 	  if (invariant && !is_gimple_min_invariant (*tp))
1108 	    id->regimplify = true;
1109 
1110 	  *walk_subtrees = 0;
1111 	}
1112     }
1113 
1114   /* Update the TREE_BLOCK for the cloned expr.  */
1115   if (EXPR_P (*tp))
1116     {
1117       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1118       tree old_block = TREE_BLOCK (*tp);
1119       if (old_block)
1120 	{
1121 	  tree *n;
1122 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1123 	  if (n)
1124 	    new_block = *n;
1125 	}
1126       TREE_SET_BLOCK (*tp, new_block);
1127     }
1128 
1129   /* Keep iterating.  */
1130   return NULL_TREE;
1131 }
1132 
1133 
1134 /* Called from copy_body_id via walk_tree.  DATA is really a
1135    `copy_body_data *'.  */
1136 
1137 tree
1138 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1139 {
1140   copy_body_data *id = (copy_body_data *) data;
1141   tree fn = id->src_fn;
1142   tree new_block;
1143 
1144   /* Begin by recognizing trees that we'll completely rewrite for the
1145      inlining context.  Our output for these trees is completely
1146      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1147      into an edge).  Further down, we'll handle trees that get
1148      duplicated and/or tweaked.  */
1149 
1150   /* When requested, RETURN_EXPRs should be transformed to just the
1151      contained MODIFY_EXPR.  The branch semantics of the return will
1152      be handled elsewhere by manipulating the CFG rather than a statement.  */
1153   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1154     {
1155       tree assignment = TREE_OPERAND (*tp, 0);
1156 
1157       /* If we're returning something, just turn that into an
1158 	 assignment into the equivalent of the original RESULT_DECL.
1159 	 If the "assignment" is just the result decl, the result
1160 	 decl has already been set (e.g. a recent "foo (&result_decl,
1161 	 ...)"); just toss the entire RETURN_EXPR.  */
1162       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1163 	{
1164 	  /* Replace the RETURN_EXPR with (a copy of) the
1165 	     MODIFY_EXPR hanging underneath.  */
1166 	  *tp = copy_node (assignment);
1167 	}
1168       else /* Else the RETURN_EXPR returns no value.  */
1169 	{
1170 	  *tp = NULL;
1171 	  return (tree) (void *)1;
1172 	}
1173     }
1174   else if (TREE_CODE (*tp) == SSA_NAME)
1175     {
1176       *tp = remap_ssa_name (*tp, id);
1177       *walk_subtrees = 0;
1178       return NULL;
1179     }
1180 
1181   /* Local variables and labels need to be replaced by equivalent
1182      variables.  We don't want to copy static variables; there's only
1183      one of those, no matter how many times we inline the containing
1184      function.  Similarly for globals from an outer function.  */
1185   else if (auto_var_in_fn_p (*tp, fn))
1186     {
1187       tree new_decl;
1188 
1189       /* Remap the declaration.  */
1190       new_decl = remap_decl (*tp, id);
1191       gcc_assert (new_decl);
1192       /* Replace this variable with the copy.  */
1193       STRIP_TYPE_NOPS (new_decl);
1194       *tp = new_decl;
1195       *walk_subtrees = 0;
1196     }
1197   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1198     copy_statement_list (tp);
1199   else if (TREE_CODE (*tp) == SAVE_EXPR
1200 	   || TREE_CODE (*tp) == TARGET_EXPR)
1201     remap_save_expr (tp, id->decl_map, walk_subtrees);
1202   else if (TREE_CODE (*tp) == LABEL_DECL
1203 	   && (! DECL_CONTEXT (*tp)
1204 	       || decl_function_context (*tp) == id->src_fn))
1205     /* These may need to be remapped for EH handling.  */
1206     *tp = remap_decl (*tp, id);
1207   else if (TREE_CODE (*tp) == BIND_EXPR)
1208     copy_bind_expr (tp, walk_subtrees, id);
1209   /* Types may need remapping as well.  */
1210   else if (TYPE_P (*tp))
1211     *tp = remap_type (*tp, id);
1212 
1213   /* If this is a constant, we have to copy the node iff the type will be
1214      remapped.  copy_tree_r will not copy a constant.  */
1215   else if (CONSTANT_CLASS_P (*tp))
1216     {
1217       tree new_type = remap_type (TREE_TYPE (*tp), id);
1218 
1219       if (new_type == TREE_TYPE (*tp))
1220 	*walk_subtrees = 0;
1221 
1222       else if (TREE_CODE (*tp) == INTEGER_CST)
1223 	*tp = wide_int_to_tree (new_type, *tp);
1224       else
1225 	{
1226 	  *tp = copy_node (*tp);
1227 	  TREE_TYPE (*tp) = new_type;
1228 	}
1229     }
1230 
1231   /* Otherwise, just copy the node.  Note that copy_tree_r already
1232      knows not to copy VAR_DECLs, etc., so this is safe.  */
1233   else
1234     {
1235       /* Here we handle trees that are not completely rewritten.
1236 	 First we detect some inlining-induced bogosities for
1237 	 discarding.  */
1238       if (TREE_CODE (*tp) == MODIFY_EXPR
1239 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1240 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1241 	{
1242 	  /* Some assignments VAR = VAR; don't generate any rtl code
1243 	     and thus don't count as variable modification.  Avoid
1244 	     keeping bogosities like 0 = 0.  */
1245 	  tree decl = TREE_OPERAND (*tp, 0), value;
1246 	  tree *n;
1247 
1248 	  n = id->decl_map->get (decl);
1249 	  if (n)
1250 	    {
1251 	      value = *n;
1252 	      STRIP_TYPE_NOPS (value);
1253 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1254 		{
1255 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1256 		  return copy_tree_body_r (tp, walk_subtrees, data);
1257 		}
1258 	    }
1259 	}
1260       else if (TREE_CODE (*tp) == INDIRECT_REF)
1261 	{
1262 	  /* Get rid of *& from inline substitutions that can happen when a
1263 	     pointer argument is an ADDR_EXPR.  */
1264 	  tree decl = TREE_OPERAND (*tp, 0);
1265 	  tree *n = id->decl_map->get (decl);
1266 	  if (n)
1267 	    {
1268 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1269 	         it manually here as we'll eventually get ADDR_EXPRs
1270 		 which lie about their types pointed to.  In this case
1271 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1272 		 but we absolutely rely on that.  As fold_indirect_ref
1273 	         does other useful transformations, try that first, though.  */
1274 	      tree type = TREE_TYPE (*tp);
1275 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1276 	      tree old = *tp;
1277 	      *tp = gimple_fold_indirect_ref (ptr);
1278 	      if (! *tp)
1279 	        {
1280 		  type = remap_type (type, id);
1281 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1282 		    {
1283 		      *tp
1284 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1285 		      /* ???  We should either assert here or build
1286 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1287 			 incompatible types to our IL.  */
1288 		      if (! *tp)
1289 			*tp = TREE_OPERAND (ptr, 0);
1290 		    }
1291 	          else
1292 		    {
1293 	              *tp = build1 (INDIRECT_REF, type, ptr);
1294 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1295 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1296 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1297 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1298 			 have remapped a parameter as the property might be
1299 			 valid only for the parameter itself.  */
1300 		      if (TREE_THIS_NOTRAP (old)
1301 			  && (!is_parm (TREE_OPERAND (old, 0))
1302 			      || (!id->transform_parameter && is_parm (ptr))))
1303 		        TREE_THIS_NOTRAP (*tp) = 1;
1304 		    }
1305 		}
1306 	      *walk_subtrees = 0;
1307 	      return NULL;
1308 	    }
1309 	}
1310       else if (TREE_CODE (*tp) == MEM_REF)
1311 	{
1312 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1313 	     that can happen when a pointer argument is an ADDR_EXPR.
1314 	     Recurse here manually to allow that.  */
1315 	  tree ptr = TREE_OPERAND (*tp, 0);
1316 	  tree type = remap_type (TREE_TYPE (*tp), id);
1317 	  tree old = *tp;
1318 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1319 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1320 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1321 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1322 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1323 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1324 	    {
1325 	      MR_DEPENDENCE_CLIQUE (*tp)
1326 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1327 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1328 	    }
1329 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1330 	     remapped a parameter as the property might be valid only
1331 	     for the parameter itself.  */
1332 	  if (TREE_THIS_NOTRAP (old)
1333 	      && (!is_parm (TREE_OPERAND (old, 0))
1334 		  || (!id->transform_parameter && is_parm (ptr))))
1335 	    TREE_THIS_NOTRAP (*tp) = 1;
1336 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1337 	  *walk_subtrees = 0;
1338 	  return NULL;
1339 	}
1340 
1341       /* Here is the "usual case".  Copy this tree node, and then
1342 	 tweak some special cases.  */
1343       copy_tree_r (tp, walk_subtrees, NULL);
1344 
1345       /* If EXPR has block defined, map it to newly constructed block.
1346          When inlining we want EXPRs without block appear in the block
1347 	 of function call if we are not remapping a type.  */
1348       if (EXPR_P (*tp))
1349 	{
1350 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1351 	  if (TREE_BLOCK (*tp))
1352 	    {
1353 	      tree *n;
1354 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1355 	      if (n)
1356 		new_block = *n;
1357 	    }
1358 	  TREE_SET_BLOCK (*tp, new_block);
1359 	}
1360 
1361       if (TREE_CODE (*tp) != OMP_CLAUSE)
1362 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1363 
1364       /* The copied TARGET_EXPR has never been expanded, even if the
1365 	 original node was expanded already.  */
1366       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1367 	{
1368 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1369 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1370 	}
1371 
1372       /* Variable substitution need not be simple.  In particular, the
1373 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1374 	 and friends are up-to-date.  */
1375       else if (TREE_CODE (*tp) == ADDR_EXPR)
1376 	{
1377 	  int invariant = is_gimple_min_invariant (*tp);
1378 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1379 
1380 	  /* Handle the case where we substituted an INDIRECT_REF
1381 	     into the operand of the ADDR_EXPR.  */
1382 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1383 	    {
1384 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1385 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1386 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1387 	      *tp = t;
1388 	    }
1389 	  else
1390 	    recompute_tree_invariant_for_addr_expr (*tp);
1391 
1392 	  /* If this used to be invariant, but is not any longer,
1393 	     then regimplification is probably needed.  */
1394 	  if (invariant && !is_gimple_min_invariant (*tp))
1395 	    id->regimplify = true;
1396 
1397 	  *walk_subtrees = 0;
1398 	}
1399     }
1400 
1401   /* Keep iterating.  */
1402   return NULL_TREE;
1403 }
1404 
1405 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1406    source function, map that to the duplicate EH region number in
1407    the destination function.  */
1408 
1409 static int
1410 remap_eh_region_nr (int old_nr, copy_body_data *id)
1411 {
1412   eh_region old_r, new_r;
1413 
1414   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1415   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1416 
1417   return new_r->index;
1418 }
1419 
1420 /* Similar, but operate on INTEGER_CSTs.  */
1421 
1422 static tree
1423 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1424 {
1425   int old_nr, new_nr;
1426 
1427   old_nr = tree_to_shwi (old_t_nr);
1428   new_nr = remap_eh_region_nr (old_nr, id);
1429 
1430   return build_int_cst (integer_type_node, new_nr);
1431 }
1432 
1433 /* Helper for copy_bb.  Remap statement STMT using the inlining
1434    information in ID.  Return the new statement copy.  */
1435 
1436 static gimple_seq
1437 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1438 {
1439   gimple *copy = NULL;
1440   struct walk_stmt_info wi;
1441   bool skip_first = false;
1442   gimple_seq stmts = NULL;
1443 
1444   if (is_gimple_debug (stmt)
1445       && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1446     return stmts;
1447 
1448   /* Begin by recognizing trees that we'll completely rewrite for the
1449      inlining context.  Our output for these trees is completely
1450      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1451      into an edge).  Further down, we'll handle trees that get
1452      duplicated and/or tweaked.  */
1453 
1454   /* When requested, GIMPLE_RETURNs should be transformed to just the
1455      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1456      be handled elsewhere by manipulating the CFG rather than the
1457      statement.  */
1458   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1459     {
1460       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1461       tree retbnd = gimple_return_retbnd (stmt);
1462       tree bndslot = id->retbnd;
1463 
1464       if (retbnd && bndslot)
1465 	{
1466 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1467 	  memset (&wi, 0, sizeof (wi));
1468 	  wi.info = id;
1469 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1470 	  gimple_seq_add_stmt (&stmts, bndcopy);
1471 	}
1472 
1473       /* If we're returning something, just turn that into an
1474 	 assignment into the equivalent of the original RESULT_DECL.
1475 	 If RETVAL is just the result decl, the result decl has
1476 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1477 	 just toss the entire GIMPLE_RETURN.  */
1478       if (retval
1479 	  && (TREE_CODE (retval) != RESULT_DECL
1480 	      && (TREE_CODE (retval) != SSA_NAME
1481 		  || ! SSA_NAME_VAR (retval)
1482 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1483         {
1484 	  copy = gimple_build_assign (id->do_not_unshare
1485 				      ? id->retvar : unshare_expr (id->retvar),
1486 				      retval);
1487 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1488 	  skip_first = true;
1489 
1490 	  /* We need to copy bounds if return structure with pointers into
1491 	     instrumented function.  */
1492 	  if (chkp_function_instrumented_p (id->dst_fn)
1493 	      && !bndslot
1494 	      && !BOUNDED_P (id->retvar)
1495 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1496 	    id->assign_stmts.safe_push (copy);
1497 
1498 	}
1499       else
1500 	return stmts;
1501     }
1502   else if (gimple_has_substatements (stmt))
1503     {
1504       gimple_seq s1, s2;
1505 
1506       /* When cloning bodies from the C++ front end, we will be handed bodies
1507 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1508 	 have embedded statements.  */
1509       switch (gimple_code (stmt))
1510 	{
1511 	case GIMPLE_BIND:
1512 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1513 	  break;
1514 
1515 	case GIMPLE_CATCH:
1516 	  {
1517 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1518 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1519 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1520 	  }
1521 	  break;
1522 
1523 	case GIMPLE_EH_FILTER:
1524 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1525 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1526 	  break;
1527 
1528 	case GIMPLE_TRY:
1529 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1530 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1531 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1532 	  break;
1533 
1534 	case GIMPLE_WITH_CLEANUP_EXPR:
1535 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1536 	  copy = gimple_build_wce (s1);
1537 	  break;
1538 
1539 	case GIMPLE_OMP_PARALLEL:
1540 	  {
1541 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1542 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1543 	    copy = gimple_build_omp_parallel
1544 	             (s1,
1545 		      gimple_omp_parallel_clauses (omp_par_stmt),
1546 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1547 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1548 	  }
1549 	  break;
1550 
1551 	case GIMPLE_OMP_TASK:
1552 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1553 	  copy = gimple_build_omp_task
1554 	           (s1,
1555 		    gimple_omp_task_clauses (stmt),
1556 		    gimple_omp_task_child_fn (stmt),
1557 		    gimple_omp_task_data_arg (stmt),
1558 		    gimple_omp_task_copy_fn (stmt),
1559 		    gimple_omp_task_arg_size (stmt),
1560 		    gimple_omp_task_arg_align (stmt));
1561 	  break;
1562 
1563 	case GIMPLE_OMP_FOR:
1564 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1565 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1566 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1567 				       gimple_omp_for_clauses (stmt),
1568 				       gimple_omp_for_collapse (stmt), s2);
1569 	  {
1570 	    size_t i;
1571 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1572 	      {
1573 		gimple_omp_for_set_index (copy, i,
1574 					  gimple_omp_for_index (stmt, i));
1575 		gimple_omp_for_set_initial (copy, i,
1576 					    gimple_omp_for_initial (stmt, i));
1577 		gimple_omp_for_set_final (copy, i,
1578 					  gimple_omp_for_final (stmt, i));
1579 		gimple_omp_for_set_incr (copy, i,
1580 					 gimple_omp_for_incr (stmt, i));
1581 		gimple_omp_for_set_cond (copy, i,
1582 					 gimple_omp_for_cond (stmt, i));
1583 	      }
1584 	  }
1585 	  break;
1586 
1587 	case GIMPLE_OMP_MASTER:
1588 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1589 	  copy = gimple_build_omp_master (s1);
1590 	  break;
1591 
1592 	case GIMPLE_OMP_TASKGROUP:
1593 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1594 	  copy = gimple_build_omp_taskgroup (s1);
1595 	  break;
1596 
1597 	case GIMPLE_OMP_ORDERED:
1598 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1599 	  copy = gimple_build_omp_ordered
1600 		   (s1,
1601 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1602 	  break;
1603 
1604 	case GIMPLE_OMP_SECTION:
1605 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1606 	  copy = gimple_build_omp_section (s1);
1607 	  break;
1608 
1609 	case GIMPLE_OMP_SECTIONS:
1610 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1611 	  copy = gimple_build_omp_sections
1612 	           (s1, gimple_omp_sections_clauses (stmt));
1613 	  break;
1614 
1615 	case GIMPLE_OMP_SINGLE:
1616 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1617 	  copy = gimple_build_omp_single
1618 	           (s1, gimple_omp_single_clauses (stmt));
1619 	  break;
1620 
1621 	case GIMPLE_OMP_TARGET:
1622 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1623 	  copy = gimple_build_omp_target
1624 		   (s1, gimple_omp_target_kind (stmt),
1625 		    gimple_omp_target_clauses (stmt));
1626 	  break;
1627 
1628 	case GIMPLE_OMP_TEAMS:
1629 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1630 	  copy = gimple_build_omp_teams
1631 		   (s1, gimple_omp_teams_clauses (stmt));
1632 	  break;
1633 
1634 	case GIMPLE_OMP_CRITICAL:
1635 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1636 	  copy = gimple_build_omp_critical (s1,
1637 					    gimple_omp_critical_name
1638 					      (as_a <gomp_critical *> (stmt)),
1639 					    gimple_omp_critical_clauses
1640 					      (as_a <gomp_critical *> (stmt)));
1641 	  break;
1642 
1643 	case GIMPLE_TRANSACTION:
1644 	  {
1645 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1646 	    gtransaction *new_trans_stmt;
1647 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1648 				   id);
1649 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1650 	    gimple_transaction_set_subcode (new_trans_stmt,
1651 	      gimple_transaction_subcode (old_trans_stmt));
1652 	    gimple_transaction_set_label_norm (new_trans_stmt,
1653 	      gimple_transaction_label_norm (old_trans_stmt));
1654 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1655 	      gimple_transaction_label_uninst (old_trans_stmt));
1656 	    gimple_transaction_set_label_over (new_trans_stmt,
1657 	      gimple_transaction_label_over (old_trans_stmt));
1658 	  }
1659 	  break;
1660 
1661 	default:
1662 	  gcc_unreachable ();
1663 	}
1664     }
1665   else
1666     {
1667       if (gimple_assign_copy_p (stmt)
1668 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1669 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1670 	{
1671 	  /* Here we handle statements that are not completely rewritten.
1672 	     First we detect some inlining-induced bogosities for
1673 	     discarding.  */
1674 
1675 	  /* Some assignments VAR = VAR; don't generate any rtl code
1676 	     and thus don't count as variable modification.  Avoid
1677 	     keeping bogosities like 0 = 0.  */
1678 	  tree decl = gimple_assign_lhs (stmt), value;
1679 	  tree *n;
1680 
1681 	  n = id->decl_map->get (decl);
1682 	  if (n)
1683 	    {
1684 	      value = *n;
1685 	      STRIP_TYPE_NOPS (value);
1686 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1687 		return NULL;
1688 	    }
1689 	}
1690 
1691       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1692 	 in a block that we aren't copying during tree_function_versioning,
1693 	 just drop the clobber stmt.  */
1694       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1695 	{
1696 	  tree lhs = gimple_assign_lhs (stmt);
1697 	  if (TREE_CODE (lhs) == MEM_REF
1698 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1699 	    {
1700 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1701 	      if (gimple_bb (def_stmt)
1702 		  && !bitmap_bit_p (id->blocks_to_copy,
1703 				    gimple_bb (def_stmt)->index))
1704 		return NULL;
1705 	    }
1706 	}
1707 
1708       if (gimple_debug_bind_p (stmt))
1709 	{
1710 	  gdebug *copy
1711 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1712 				       gimple_debug_bind_get_value (stmt),
1713 				       stmt);
1714 	  id->debug_stmts.safe_push (copy);
1715 	  gimple_seq_add_stmt (&stmts, copy);
1716 	  return stmts;
1717 	}
1718       if (gimple_debug_source_bind_p (stmt))
1719 	{
1720 	  gdebug *copy = gimple_build_debug_source_bind
1721 	                   (gimple_debug_source_bind_get_var (stmt),
1722 			    gimple_debug_source_bind_get_value (stmt),
1723 			    stmt);
1724 	  id->debug_stmts.safe_push (copy);
1725 	  gimple_seq_add_stmt (&stmts, copy);
1726 	  return stmts;
1727 	}
1728 
1729       /* Create a new deep copy of the statement.  */
1730       copy = gimple_copy (stmt);
1731 
1732       /* Clear flags that need revisiting.  */
1733       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1734         {
1735 	  if (gimple_call_tail_p (call_stmt))
1736 	    gimple_call_set_tail (call_stmt, false);
1737 	  if (gimple_call_from_thunk_p (call_stmt))
1738 	    gimple_call_set_from_thunk (call_stmt, false);
1739 	  if (gimple_call_internal_p (call_stmt))
1740 	    switch (gimple_call_internal_fn (call_stmt))
1741 	      {
1742 	      case IFN_GOMP_SIMD_LANE:
1743 	      case IFN_GOMP_SIMD_VF:
1744 	      case IFN_GOMP_SIMD_LAST_LANE:
1745 	      case IFN_GOMP_SIMD_ORDERED_START:
1746 	      case IFN_GOMP_SIMD_ORDERED_END:
1747 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1748 	        break;
1749 	      default:
1750 		break;
1751 	      }
1752 	}
1753 
1754       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1755 	 RESX and EH_DISPATCH.  */
1756       if (id->eh_map)
1757 	switch (gimple_code (copy))
1758 	  {
1759 	  case GIMPLE_CALL:
1760 	    {
1761 	      tree r, fndecl = gimple_call_fndecl (copy);
1762 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1763 		switch (DECL_FUNCTION_CODE (fndecl))
1764 		  {
1765 		  case BUILT_IN_EH_COPY_VALUES:
1766 		    r = gimple_call_arg (copy, 1);
1767 		    r = remap_eh_region_tree_nr (r, id);
1768 		    gimple_call_set_arg (copy, 1, r);
1769 		    /* FALLTHRU */
1770 
1771 		  case BUILT_IN_EH_POINTER:
1772 		  case BUILT_IN_EH_FILTER:
1773 		    r = gimple_call_arg (copy, 0);
1774 		    r = remap_eh_region_tree_nr (r, id);
1775 		    gimple_call_set_arg (copy, 0, r);
1776 		    break;
1777 
1778 		  default:
1779 		    break;
1780 		  }
1781 
1782 	      /* Reset alias info if we didn't apply measures to
1783 		 keep it valid over inlining by setting DECL_PT_UID.  */
1784 	      if (!id->src_cfun->gimple_df
1785 		  || !id->src_cfun->gimple_df->ipa_pta)
1786 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1787 	    }
1788 	    break;
1789 
1790 	  case GIMPLE_RESX:
1791 	    {
1792 	      gresx *resx_stmt = as_a <gresx *> (copy);
1793 	      int r = gimple_resx_region (resx_stmt);
1794 	      r = remap_eh_region_nr (r, id);
1795 	      gimple_resx_set_region (resx_stmt, r);
1796 	    }
1797 	    break;
1798 
1799 	  case GIMPLE_EH_DISPATCH:
1800 	    {
1801 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1802 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1803 	      r = remap_eh_region_nr (r, id);
1804 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1805 	    }
1806 	    break;
1807 
1808 	  default:
1809 	    break;
1810 	  }
1811     }
1812 
1813   /* If STMT has a block defined, map it to the newly constructed
1814      block.  */
1815   if (gimple_block (copy))
1816     {
1817       tree *n;
1818       n = id->decl_map->get (gimple_block (copy));
1819       gcc_assert (n);
1820       gimple_set_block (copy, *n);
1821     }
1822 
1823   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1824     {
1825       gimple_seq_add_stmt (&stmts, copy);
1826       return stmts;
1827     }
1828 
1829   /* Remap all the operands in COPY.  */
1830   memset (&wi, 0, sizeof (wi));
1831   wi.info = id;
1832   if (skip_first)
1833     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1834   else
1835     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1836 
1837   /* Clear the copied virtual operands.  We are not remapping them here
1838      but are going to recreate them from scratch.  */
1839   if (gimple_has_mem_ops (copy))
1840     {
1841       gimple_set_vdef (copy, NULL_TREE);
1842       gimple_set_vuse (copy, NULL_TREE);
1843     }
1844 
1845   gimple_seq_add_stmt (&stmts, copy);
1846   return stmts;
1847 }
1848 
1849 
1850 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1851    later  */
1852 
1853 static basic_block
1854 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1855          gcov_type count_scale)
1856 {
1857   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1858   basic_block copy_basic_block;
1859   tree decl;
1860   gcov_type freq;
1861   basic_block prev;
1862 
1863   /* Search for previous copied basic block.  */
1864   prev = bb->prev_bb;
1865   while (!prev->aux)
1866     prev = prev->prev_bb;
1867 
1868   /* create_basic_block() will append every new block to
1869      basic_block_info automatically.  */
1870   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1871   copy_basic_block->count = apply_scale (bb->count, count_scale);
1872 
1873   /* We are going to rebuild frequencies from scratch.  These values
1874      have just small importance to drive canonicalize_loop_headers.  */
1875   freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1876 
1877   /* We recompute frequencies after inlining, so this is quite safe.  */
1878   if (freq > BB_FREQ_MAX)
1879     freq = BB_FREQ_MAX;
1880   copy_basic_block->frequency = freq;
1881 
1882   copy_gsi = gsi_start_bb (copy_basic_block);
1883 
1884   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1885     {
1886       gimple_seq stmts;
1887       gimple *stmt = gsi_stmt (gsi);
1888       gimple *orig_stmt = stmt;
1889       gimple_stmt_iterator stmts_gsi;
1890       bool stmt_added = false;
1891 
1892       id->regimplify = false;
1893       stmts = remap_gimple_stmt (stmt, id);
1894 
1895       if (gimple_seq_empty_p (stmts))
1896 	continue;
1897 
1898       seq_gsi = copy_gsi;
1899 
1900       for (stmts_gsi = gsi_start (stmts);
1901 	   !gsi_end_p (stmts_gsi); )
1902 	{
1903 	  stmt = gsi_stmt (stmts_gsi);
1904 
1905 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1906 	  gsi_next (&stmts_gsi);
1907 
1908 	  if (gimple_nop_p (stmt))
1909 	      continue;
1910 
1911 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1912 					    orig_stmt);
1913 
1914 	  /* With return slot optimization we can end up with
1915 	     non-gimple (foo *)&this->m, fix that here.  */
1916 	  if (is_gimple_assign (stmt)
1917 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1918 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1919 	    {
1920 	      tree new_rhs;
1921 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1922 						  gimple_assign_rhs1 (stmt),
1923 						  true, NULL, false,
1924 						  GSI_CONTINUE_LINKING);
1925 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1926 	      id->regimplify = false;
1927 	    }
1928 
1929 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1930 
1931 	  if (id->regimplify)
1932 	    gimple_regimplify_operands (stmt, &seq_gsi);
1933 
1934 	  stmt_added = true;
1935 	}
1936 
1937       if (!stmt_added)
1938 	continue;
1939 
1940       /* If copy_basic_block has been empty at the start of this iteration,
1941 	 call gsi_start_bb again to get at the newly added statements.  */
1942       if (gsi_end_p (copy_gsi))
1943 	copy_gsi = gsi_start_bb (copy_basic_block);
1944       else
1945 	gsi_next (&copy_gsi);
1946 
1947       /* Process the new statement.  The call to gimple_regimplify_operands
1948 	 possibly turned the statement into multiple statements, we
1949 	 need to process all of them.  */
1950       do
1951 	{
1952 	  tree fn;
1953 	  gcall *call_stmt;
1954 
1955 	  stmt = gsi_stmt (copy_gsi);
1956 	  call_stmt = dyn_cast <gcall *> (stmt);
1957 	  if (call_stmt
1958 	      && gimple_call_va_arg_pack_p (call_stmt)
1959 	      && id->call_stmt
1960 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1961 	    {
1962 	      /* __builtin_va_arg_pack () should be replaced by
1963 		 all arguments corresponding to ... in the caller.  */
1964 	      tree p;
1965 	      gcall *new_call;
1966 	      vec<tree> argarray;
1967 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1968 	      size_t n, i, nargs_to_copy;
1969 	      bool remove_bounds = false;
1970 
1971 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1972 		nargs--;
1973 
1974 	      /* Bounds should be removed from arg pack in case
1975 		 we handle not instrumented call in instrumented
1976 		 function.  */
1977 	      nargs_to_copy = nargs;
1978 	      if (gimple_call_with_bounds_p (id->call_stmt)
1979 		  && !gimple_call_with_bounds_p (stmt))
1980 		{
1981 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1982 		       i < gimple_call_num_args (id->call_stmt);
1983 		       i++)
1984 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1985 		      nargs_to_copy--;
1986 		  remove_bounds = true;
1987 		}
1988 
1989 	      /* Create the new array of arguments.  */
1990 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1991 	      argarray.create (n);
1992 	      argarray.safe_grow_cleared (n);
1993 
1994 	      /* Copy all the arguments before '...'  */
1995 	      memcpy (argarray.address (),
1996 		      gimple_call_arg_ptr (call_stmt, 0),
1997 		      gimple_call_num_args (call_stmt) * sizeof (tree));
1998 
1999 	      if (remove_bounds)
2000 		{
2001 		  /* Append the rest of arguments removing bounds.  */
2002 		  unsigned cur = gimple_call_num_args (call_stmt);
2003 		  i = gimple_call_num_args (id->call_stmt) - nargs;
2004 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
2005 		       i < gimple_call_num_args (id->call_stmt);
2006 		       i++)
2007 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
2008 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
2009 		  gcc_assert (cur == n);
2010 		}
2011 	      else
2012 		{
2013 		  /* Append the arguments passed in '...'  */
2014 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2015 			  gimple_call_arg_ptr (id->call_stmt, 0)
2016 			  + (gimple_call_num_args (id->call_stmt) - nargs),
2017 			  nargs * sizeof (tree));
2018 		}
2019 
2020 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2021 						argarray);
2022 
2023 	      argarray.release ();
2024 
2025 	      /* Copy all GIMPLE_CALL flags, location and block, except
2026 		 GF_CALL_VA_ARG_PACK.  */
2027 	      gimple_call_copy_flags (new_call, call_stmt);
2028 	      gimple_call_set_va_arg_pack (new_call, false);
2029 	      gimple_set_location (new_call, gimple_location (stmt));
2030 	      gimple_set_block (new_call, gimple_block (stmt));
2031 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2032 
2033 	      gsi_replace (&copy_gsi, new_call, false);
2034 	      stmt = new_call;
2035 	    }
2036 	  else if (call_stmt
2037 		   && id->call_stmt
2038 		   && (decl = gimple_call_fndecl (stmt))
2039 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2040 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
2041 	    {
2042 	      /* __builtin_va_arg_pack_len () should be replaced by
2043 		 the number of anonymous arguments.  */
2044 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
2045 	      tree count, p;
2046 	      gimple *new_stmt;
2047 
2048 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2049 		nargs--;
2050 
2051 	      /* For instrumented calls we should ignore bounds.  */
2052 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
2053 		   i < gimple_call_num_args (id->call_stmt);
2054 		   i++)
2055 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
2056 		  nargs--;
2057 
2058 	      if (!gimple_call_lhs (stmt))
2059 		{
2060 		  /* Drop unused calls.  */
2061 		  gsi_remove (&copy_gsi, false);
2062 		  continue;
2063 		}
2064 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2065 		{
2066 		  count = build_int_cst (integer_type_node, nargs);
2067 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2068 		  gsi_replace (&copy_gsi, new_stmt, false);
2069 		  stmt = new_stmt;
2070 		}
2071 	      else if (nargs != 0)
2072 		{
2073 		  tree newlhs;
2074 		  if (gimple_in_ssa_p (cfun))
2075 		    newlhs = make_ssa_name (integer_type_node, NULL);
2076 		  else
2077 		    newlhs = create_tmp_reg (integer_type_node);
2078 		  count = build_int_cst (integer_type_node, nargs);
2079 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2080 						  PLUS_EXPR, newlhs, count);
2081 		  gimple_call_set_lhs (stmt, newlhs);
2082 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2083 		}
2084 	    }
2085 	  else if (call_stmt
2086 		   && id->call_stmt
2087 		   && gimple_call_internal_p (stmt)
2088 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2089 	    {
2090 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
2091 	      gsi_remove (&copy_gsi, false);
2092 	      continue;
2093 	    }
2094 
2095 	  /* Statements produced by inlining can be unfolded, especially
2096 	     when we constant propagated some operands.  We can't fold
2097 	     them right now for two reasons:
2098 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2099 	     2) we can't change function calls to builtins.
2100 	     So we just mark statement for later folding.  We mark
2101 	     all new statements, instead just statements that has changed
2102 	     by some nontrivial substitution so even statements made
2103 	     foldable indirectly are updated.  If this turns out to be
2104 	     expensive, copy_body can be told to watch for nontrivial
2105 	     changes.  */
2106 	  if (id->statements_to_fold)
2107 	    id->statements_to_fold->add (stmt);
2108 
2109 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2110 	     callgraph edges and update or duplicate them.  */
2111 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2112 	    {
2113 	      struct cgraph_edge *edge;
2114 
2115 	      switch (id->transform_call_graph_edges)
2116 		{
2117 		case CB_CGE_DUPLICATE:
2118 		  edge = id->src_node->get_edge (orig_stmt);
2119 		  if (edge)
2120 		    {
2121 		      int edge_freq = edge->frequency;
2122 		      int new_freq;
2123 		      struct cgraph_edge *old_edge = edge;
2124 		      edge = edge->clone (id->dst_node, call_stmt,
2125 					  gimple_uid (stmt),
2126 					  REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2127 					  true);
2128 		      /* We could also just rescale the frequency, but
2129 		         doing so would introduce roundoff errors and make
2130 			 verifier unhappy.  */
2131 		      new_freq  = compute_call_stmt_bb_frequency (id->dst_node->decl,
2132 								  copy_basic_block);
2133 
2134 		      /* Speculative calls consist of two edges - direct and indirect.
2135 			 Duplicate the whole thing and distribute frequencies accordingly.  */
2136 		      if (edge->speculative)
2137 			{
2138 			  struct cgraph_edge *direct, *indirect;
2139 			  struct ipa_ref *ref;
2140 
2141 			  gcc_assert (!edge->indirect_unknown_callee);
2142 			  old_edge->speculative_call_info (direct, indirect, ref);
2143 			  indirect = indirect->clone (id->dst_node, call_stmt,
2144 						      gimple_uid (stmt),
2145 						      REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2146 						      true);
2147 			  if (old_edge->frequency + indirect->frequency)
2148 			    {
2149 			      edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2150 						           (old_edge->frequency + indirect->frequency)),
2151 						     CGRAPH_FREQ_MAX);
2152 			      indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2153 							       (old_edge->frequency + indirect->frequency)),
2154 							 CGRAPH_FREQ_MAX);
2155 			    }
2156 			  id->dst_node->clone_reference (ref, stmt);
2157 			}
2158 		      else
2159 			{
2160 			  edge->frequency = new_freq;
2161 			  if (dump_file
2162 			      && profile_status_for_fn (cfun) != PROFILE_ABSENT
2163 			      && (edge_freq > edge->frequency + 10
2164 				  || edge_freq < edge->frequency - 10))
2165 			    {
2166 			      fprintf (dump_file, "Edge frequency estimated by "
2167 				       "cgraph %i diverge from inliner's estimate %i\n",
2168 				       edge_freq,
2169 				       edge->frequency);
2170 			      fprintf (dump_file,
2171 				       "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2172 				       bb->index,
2173 				       bb->frequency,
2174 				       copy_basic_block->frequency);
2175 			    }
2176 			}
2177 		    }
2178 		  break;
2179 
2180 		case CB_CGE_MOVE_CLONES:
2181 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2182 								call_stmt);
2183 		  edge = id->dst_node->get_edge (stmt);
2184 		  break;
2185 
2186 		case CB_CGE_MOVE:
2187 		  edge = id->dst_node->get_edge (orig_stmt);
2188 		  if (edge)
2189 		    edge->set_call_stmt (call_stmt);
2190 		  break;
2191 
2192 		default:
2193 		  gcc_unreachable ();
2194 		}
2195 
2196 	      /* Constant propagation on argument done during inlining
2197 		 may create new direct call.  Produce an edge for it.  */
2198 	      if ((!edge
2199 		   || (edge->indirect_inlining_edge
2200 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2201 		  && id->dst_node->definition
2202 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2203 		{
2204 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2205 
2206 		  /* We have missing edge in the callgraph.  This can happen
2207 		     when previous inlining turned an indirect call into a
2208 		     direct call by constant propagating arguments or we are
2209 		     producing dead clone (for further cloning).  In all
2210 		     other cases we hit a bug (incorrect node sharing is the
2211 		     most common reason for missing edges).  */
2212 		  gcc_assert (!dest->definition
2213 			      || dest->address_taken
2214 		  	      || !id->src_node->definition
2215 			      || !id->dst_node->definition);
2216 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2217 		    id->dst_node->create_edge_including_clones
2218 		      (dest, orig_stmt, call_stmt, bb->count,
2219 		       compute_call_stmt_bb_frequency (id->dst_node->decl,
2220 		       				       copy_basic_block),
2221 		       CIF_ORIGINALLY_INDIRECT_CALL);
2222 		  else
2223 		    id->dst_node->create_edge (dest, call_stmt,
2224 					bb->count,
2225 					compute_call_stmt_bb_frequency
2226 					  (id->dst_node->decl,
2227 					   copy_basic_block))->inline_failed
2228 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2229 		  if (dump_file)
2230 		    {
2231 		      fprintf (dump_file, "Created new direct edge to %s\n",
2232 			       dest->name ());
2233 		    }
2234 		}
2235 
2236 	      notice_special_calls (as_a <gcall *> (stmt));
2237 	    }
2238 
2239 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2240 				      id->eh_map, id->eh_lp_nr);
2241 
2242 	  gsi_next (&copy_gsi);
2243 	}
2244       while (!gsi_end_p (copy_gsi));
2245 
2246       copy_gsi = gsi_last_bb (copy_basic_block);
2247     }
2248 
2249   return copy_basic_block;
2250 }
2251 
2252 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2253    form is quite easy, since dominator relationship for old basic blocks does
2254    not change.
2255 
2256    There is however exception where inlining might change dominator relation
2257    across EH edges from basic block within inlined functions destinating
2258    to landing pads in function we inline into.
2259 
2260    The function fills in PHI_RESULTs of such PHI nodes if they refer
2261    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2262    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2263    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2264    set, and this means that there will be no overlapping live ranges
2265    for the underlying symbol.
2266 
2267    This might change in future if we allow redirecting of EH edges and
2268    we might want to change way build CFG pre-inlining to include
2269    all the possible edges then.  */
2270 static void
2271 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2272 				  bool can_throw, bool nonlocal_goto)
2273 {
2274   edge e;
2275   edge_iterator ei;
2276 
2277   FOR_EACH_EDGE (e, ei, bb->succs)
2278     if (!e->dest->aux
2279 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2280       {
2281 	gphi *phi;
2282 	gphi_iterator si;
2283 
2284 	if (!nonlocal_goto)
2285 	  gcc_assert (e->flags & EDGE_EH);
2286 
2287 	if (!can_throw)
2288 	  gcc_assert (!(e->flags & EDGE_EH));
2289 
2290 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2291 	  {
2292 	    edge re;
2293 
2294 	    phi = si.phi ();
2295 
2296 	    /* For abnormal goto/call edges the receiver can be the
2297 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2298 
2299 	    gcc_assert ((e->flags & EDGE_EH)
2300 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2301 
2302 	    re = find_edge (ret_bb, e->dest);
2303 	    gcc_checking_assert (re);
2304 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2305 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2306 
2307 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2308 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2309 	  }
2310       }
2311 }
2312 
2313 
2314 /* Copy edges from BB into its copy constructed earlier, scale profile
2315    accordingly.  Edges will be taken care of later.  Assume aux
2316    pointers to point to the copies of each BB.  Return true if any
2317    debug stmts are left after a statement that must end the basic block.  */
2318 
2319 static bool
2320 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2321 		   basic_block abnormal_goto_dest)
2322 {
2323   basic_block new_bb = (basic_block) bb->aux;
2324   edge_iterator ei;
2325   edge old_edge;
2326   gimple_stmt_iterator si;
2327   int flags;
2328   bool need_debug_cleanup = false;
2329 
2330   /* Use the indices from the original blocks to create edges for the
2331      new ones.  */
2332   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2333     if (!(old_edge->flags & EDGE_EH))
2334       {
2335 	edge new_edge;
2336 
2337 	flags = old_edge->flags;
2338 
2339 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2340 	if (old_edge->dest->index == EXIT_BLOCK
2341 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2342 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2343 	  flags |= EDGE_FALLTHRU;
2344 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2345 	new_edge->count = apply_scale (old_edge->count, count_scale);
2346 	new_edge->probability = old_edge->probability;
2347       }
2348 
2349   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2350     return false;
2351 
2352   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2353     {
2354       gimple *copy_stmt;
2355       bool can_throw, nonlocal_goto;
2356 
2357       copy_stmt = gsi_stmt (si);
2358       if (!is_gimple_debug (copy_stmt))
2359 	update_stmt (copy_stmt);
2360 
2361       /* Do this before the possible split_block.  */
2362       gsi_next (&si);
2363 
2364       /* If this tree could throw an exception, there are two
2365          cases where we need to add abnormal edge(s): the
2366          tree wasn't in a region and there is a "current
2367          region" in the caller; or the original tree had
2368          EH edges.  In both cases split the block after the tree,
2369          and add abnormal edge(s) as needed; we need both
2370          those from the callee and the caller.
2371          We check whether the copy can throw, because the const
2372          propagation can change an INDIRECT_REF which throws
2373          into a COMPONENT_REF which doesn't.  If the copy
2374          can throw, the original could also throw.  */
2375       can_throw = stmt_can_throw_internal (copy_stmt);
2376       nonlocal_goto
2377 	= (stmt_can_make_abnormal_goto (copy_stmt)
2378 	   && !computed_goto_p (copy_stmt));
2379 
2380       if (can_throw || nonlocal_goto)
2381 	{
2382 	  if (!gsi_end_p (si))
2383 	    {
2384 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2385 		gsi_next (&si);
2386 	      if (gsi_end_p (si))
2387 		need_debug_cleanup = true;
2388 	    }
2389 	  if (!gsi_end_p (si))
2390 	    /* Note that bb's predecessor edges aren't necessarily
2391 	       right at this point; split_block doesn't care.  */
2392 	    {
2393 	      edge e = split_block (new_bb, copy_stmt);
2394 
2395 	      new_bb = e->dest;
2396 	      new_bb->aux = e->src->aux;
2397 	      si = gsi_start_bb (new_bb);
2398 	    }
2399 	}
2400 
2401       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2402 	make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2403       else if (can_throw)
2404 	make_eh_edges (copy_stmt);
2405 
2406       /* If the call we inline cannot make abnormal goto do not add
2407          additional abnormal edges but only retain those already present
2408 	 in the original function body.  */
2409       if (abnormal_goto_dest == NULL)
2410 	nonlocal_goto = false;
2411       if (nonlocal_goto)
2412 	{
2413 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2414 
2415 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2416 	    nonlocal_goto = false;
2417 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2418 	     in OpenMP regions which aren't allowed to be left abnormally.
2419 	     So, no need to add abnormal edge in that case.  */
2420 	  else if (is_gimple_call (copy_stmt)
2421 		   && gimple_call_internal_p (copy_stmt)
2422 		   && (gimple_call_internal_fn (copy_stmt)
2423 		       == IFN_ABNORMAL_DISPATCHER)
2424 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2425 	    nonlocal_goto = false;
2426 	  else
2427 	    make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2428 	}
2429 
2430       if ((can_throw || nonlocal_goto)
2431 	  && gimple_in_ssa_p (cfun))
2432 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2433 					  can_throw, nonlocal_goto);
2434     }
2435   return need_debug_cleanup;
2436 }
2437 
2438 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2439    was possibly split and new outgoing EH edges inserted.
2440    BB points to the block of original function and AUX pointers links
2441    the original and newly copied blocks.  */
2442 
2443 static void
2444 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2445 {
2446   basic_block const new_bb = (basic_block) bb->aux;
2447   edge_iterator ei;
2448   gphi *phi;
2449   gphi_iterator si;
2450   edge new_edge;
2451   bool inserted = false;
2452 
2453   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2454     {
2455       tree res, new_res;
2456       gphi *new_phi;
2457 
2458       phi = si.phi ();
2459       res = PHI_RESULT (phi);
2460       new_res = res;
2461       if (!virtual_operand_p (res))
2462 	{
2463 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2464 	  if (EDGE_COUNT (new_bb->preds) == 0)
2465 	    {
2466 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2467 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2468 	    }
2469 	  else
2470 	    {
2471 	      new_phi = create_phi_node (new_res, new_bb);
2472 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2473 		{
2474 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2475 					     bb);
2476 		  tree arg;
2477 		  tree new_arg;
2478 		  edge_iterator ei2;
2479 		  location_t locus;
2480 
2481 		  /* When doing partial cloning, we allow PHIs on the entry
2482 		     block as long as all the arguments are the same.
2483 		     Find any input edge to see argument to copy.  */
2484 		  if (!old_edge)
2485 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2486 		      if (!old_edge->src->aux)
2487 			break;
2488 
2489 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2490 		  new_arg = arg;
2491 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2492 		  gcc_assert (new_arg);
2493 		  /* With return slot optimization we can end up with
2494 		     non-gimple (foo *)&this->m, fix that here.  */
2495 		  if (TREE_CODE (new_arg) != SSA_NAME
2496 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2497 		      && !is_gimple_val (new_arg))
2498 		    {
2499 		      gimple_seq stmts = NULL;
2500 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2501 						      NULL);
2502 		      gsi_insert_seq_on_edge (new_edge, stmts);
2503 		      inserted = true;
2504 		    }
2505 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2506 		  if (LOCATION_BLOCK (locus))
2507 		    {
2508 		      tree *n;
2509 		      n = id->decl_map->get (LOCATION_BLOCK (locus));
2510 		      gcc_assert (n);
2511 		      locus = set_block (locus, *n);
2512 		    }
2513 		  else
2514 		    locus = LOCATION_LOCUS (locus);
2515 
2516 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2517 		}
2518 	    }
2519 	}
2520     }
2521 
2522   /* Commit the delayed edge insertions.  */
2523   if (inserted)
2524     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2525       gsi_commit_one_edge_insert (new_edge, NULL);
2526 }
2527 
2528 
2529 /* Wrapper for remap_decl so it can be used as a callback.  */
2530 
2531 static tree
2532 remap_decl_1 (tree decl, void *data)
2533 {
2534   return remap_decl (decl, (copy_body_data *) data);
2535 }
2536 
2537 /* Build struct function and associated datastructures for the new clone
2538    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2539    the cfun to the function of new_fndecl (and current_function_decl too).  */
2540 
2541 static void
2542 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2543 {
2544   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2545   gcov_type count_scale;
2546 
2547   if (!DECL_ARGUMENTS (new_fndecl))
2548     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2549   if (!DECL_RESULT (new_fndecl))
2550     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2551 
2552   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2553     count_scale
2554         = GCOV_COMPUTE_SCALE (count,
2555                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2556   else
2557     count_scale = REG_BR_PROB_BASE;
2558 
2559   /* Register specific tree functions.  */
2560   gimple_register_cfg_hooks ();
2561 
2562   /* Get clean struct function.  */
2563   push_struct_function (new_fndecl);
2564 
2565   /* We will rebuild these, so just sanity check that they are empty.  */
2566   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2567   gcc_assert (cfun->local_decls == NULL);
2568   gcc_assert (cfun->cfg == NULL);
2569   gcc_assert (cfun->decl == new_fndecl);
2570 
2571   /* Copy items we preserve during cloning.  */
2572   cfun->static_chain_decl = src_cfun->static_chain_decl;
2573   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2574   cfun->function_end_locus = src_cfun->function_end_locus;
2575   cfun->curr_properties = src_cfun->curr_properties;
2576   cfun->last_verified = src_cfun->last_verified;
2577   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2578   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2579   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2580   cfun->stdarg = src_cfun->stdarg;
2581   cfun->after_inlining = src_cfun->after_inlining;
2582   cfun->can_throw_non_call_exceptions
2583     = src_cfun->can_throw_non_call_exceptions;
2584   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2585   cfun->returns_struct = src_cfun->returns_struct;
2586   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2587 
2588   init_empty_tree_cfg ();
2589 
2590   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2591   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2592     (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2593      REG_BR_PROB_BASE);
2594   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2595     = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2596   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2597     (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2598      REG_BR_PROB_BASE);
2599   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2600     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2601   if (src_cfun->eh)
2602     init_eh_for_function ();
2603 
2604   if (src_cfun->gimple_df)
2605     {
2606       init_tree_ssa (cfun);
2607       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2608       if (cfun->gimple_df->in_ssa_p)
2609 	init_ssa_operands (cfun);
2610     }
2611 }
2612 
2613 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2614    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2615    successor has multiple predecessors, reset them, otherwise keep
2616    their value.  */
2617 
2618 static void
2619 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2620 {
2621   edge e;
2622   edge_iterator ei;
2623   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2624 
2625   if (gsi_end_p (si)
2626       || gsi_one_before_end_p (si)
2627       || !(stmt_can_throw_internal (gsi_stmt (si))
2628 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2629     return;
2630 
2631   FOR_EACH_EDGE (e, ei, new_bb->succs)
2632     {
2633       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2634       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2635       while (is_gimple_debug (gsi_stmt (ssi)))
2636 	{
2637 	  gimple *stmt = gsi_stmt (ssi);
2638 	  gdebug *new_stmt;
2639 	  tree var;
2640 	  tree value;
2641 
2642 	  /* For the last edge move the debug stmts instead of copying
2643 	     them.  */
2644 	  if (ei_one_before_end_p (ei))
2645 	    {
2646 	      si = ssi;
2647 	      gsi_prev (&ssi);
2648 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2649 		gimple_debug_bind_reset_value (stmt);
2650 	      gsi_remove (&si, false);
2651 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2652 	      continue;
2653 	    }
2654 
2655 	  if (gimple_debug_bind_p (stmt))
2656 	    {
2657 	      var = gimple_debug_bind_get_var (stmt);
2658 	      if (single_pred_p (e->dest))
2659 		{
2660 		  value = gimple_debug_bind_get_value (stmt);
2661 		  value = unshare_expr (value);
2662 		}
2663 	      else
2664 		value = NULL_TREE;
2665 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2666 	    }
2667 	  else if (gimple_debug_source_bind_p (stmt))
2668 	    {
2669 	      var = gimple_debug_source_bind_get_var (stmt);
2670 	      value = gimple_debug_source_bind_get_value (stmt);
2671 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2672 	    }
2673 	  else
2674 	    gcc_unreachable ();
2675 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2676 	  id->debug_stmts.safe_push (new_stmt);
2677 	  gsi_prev (&ssi);
2678 	}
2679     }
2680 }
2681 
2682 /* Make a copy of the sub-loops of SRC_PARENT and place them
2683    as siblings of DEST_PARENT.  */
2684 
2685 static void
2686 copy_loops (copy_body_data *id,
2687 	    struct loop *dest_parent, struct loop *src_parent)
2688 {
2689   struct loop *src_loop = src_parent->inner;
2690   while (src_loop)
2691     {
2692       if (!id->blocks_to_copy
2693 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2694 	{
2695 	  struct loop *dest_loop = alloc_loop ();
2696 
2697 	  /* Assign the new loop its header and latch and associate
2698 	     those with the new loop.  */
2699 	  dest_loop->header = (basic_block)src_loop->header->aux;
2700 	  dest_loop->header->loop_father = dest_loop;
2701 	  if (src_loop->latch != NULL)
2702 	    {
2703 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2704 	      dest_loop->latch->loop_father = dest_loop;
2705 	    }
2706 
2707 	  /* Copy loop meta-data.  */
2708 	  copy_loop_info (src_loop, dest_loop);
2709 
2710 	  /* Finally place it into the loop array and the loop tree.  */
2711 	  place_new_loop (cfun, dest_loop);
2712 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2713 
2714 	  dest_loop->safelen = src_loop->safelen;
2715 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2716 	  if (src_loop->force_vectorize)
2717 	    {
2718 	      dest_loop->force_vectorize = true;
2719 	      cfun->has_force_vectorize_loops = true;
2720 	    }
2721 	  if (src_loop->simduid)
2722 	    {
2723 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2724 	      cfun->has_simduid_loops = true;
2725 	    }
2726 	  if (id->src_cfun->last_clique != 0)
2727 	    dest_loop->owned_clique
2728 	      = remap_dependence_clique (id,
2729 					 src_loop->owned_clique
2730 					 ? src_loop->owned_clique : 1);
2731 	  /* Recurse.  */
2732 	  copy_loops (id, dest_loop, src_loop);
2733 	}
2734       src_loop = src_loop->next;
2735     }
2736 }
2737 
2738 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2739 
2740 void
2741 redirect_all_calls (copy_body_data * id, basic_block bb)
2742 {
2743   gimple_stmt_iterator si;
2744   gimple *last = last_stmt (bb);
2745   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2746     {
2747       gimple *stmt = gsi_stmt (si);
2748       if (is_gimple_call (stmt))
2749 	{
2750 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2751 	  if (edge)
2752 	    {
2753 	      edge->redirect_call_stmt_to_callee ();
2754 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2755 		gimple_purge_dead_eh_edges (bb);
2756 	    }
2757 	}
2758     }
2759 }
2760 
2761 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2762    with each bb's frequency. Used when NODE has a 0-weight entry
2763    but we are about to inline it into a non-zero count call bb.
2764    See the comments for handle_missing_profiles() in predict.c for
2765    when this can happen for COMDATs.  */
2766 
2767 void
2768 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2769 {
2770   basic_block bb;
2771   edge_iterator ei;
2772   edge e;
2773   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2774 
2775   FOR_ALL_BB_FN(bb, fn)
2776     {
2777       bb->count = apply_scale (count,
2778                                GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2779       FOR_EACH_EDGE (e, ei, bb->succs)
2780         e->count = apply_probability (e->src->count, e->probability);
2781     }
2782 }
2783 
2784 /* Make a copy of the body of FN so that it can be inserted inline in
2785    another function.  Walks FN via CFG, returns new fndecl.  */
2786 
2787 static tree
2788 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2789 	       basic_block entry_block_map, basic_block exit_block_map,
2790 	       basic_block new_entry)
2791 {
2792   tree callee_fndecl = id->src_fn;
2793   /* Original cfun for the callee, doesn't change.  */
2794   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2795   struct function *cfun_to_copy;
2796   basic_block bb;
2797   tree new_fndecl = NULL;
2798   bool need_debug_cleanup = false;
2799   gcov_type count_scale;
2800   int last;
2801   int incoming_frequency = 0;
2802   gcov_type incoming_count = 0;
2803 
2804   /* This can happen for COMDAT routines that end up with 0 counts
2805      despite being called (see the comments for handle_missing_profiles()
2806      in predict.c as to why). Apply counts to the blocks in the callee
2807      before inlining, using the guessed edge frequencies, so that we don't
2808      end up with a 0-count inline body which can confuse downstream
2809      optimizations such as function splitting.  */
2810   if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2811     {
2812       /* Apply the larger of the call bb count and the total incoming
2813          call edge count to the callee.  */
2814       gcov_type in_count = 0;
2815       struct cgraph_edge *in_edge;
2816       for (in_edge = id->src_node->callers; in_edge;
2817            in_edge = in_edge->next_caller)
2818         in_count += in_edge->count;
2819       freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2820     }
2821 
2822   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2823     count_scale
2824         = GCOV_COMPUTE_SCALE (count,
2825                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2826   else
2827     count_scale = REG_BR_PROB_BASE;
2828 
2829   /* Register specific tree functions.  */
2830   gimple_register_cfg_hooks ();
2831 
2832   /* If we are inlining just region of the function, make sure to connect
2833      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2834      part of loop, we must compute frequency and probability of
2835      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2836      probabilities of edges incoming from nonduplicated region.  */
2837   if (new_entry)
2838     {
2839       edge e;
2840       edge_iterator ei;
2841 
2842       FOR_EACH_EDGE (e, ei, new_entry->preds)
2843 	if (!e->src->aux)
2844 	  {
2845 	    incoming_frequency += EDGE_FREQUENCY (e);
2846 	    incoming_count += e->count;
2847 	  }
2848       incoming_count = apply_scale (incoming_count, count_scale);
2849       incoming_frequency
2850 	= apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2851       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2852       ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2853     }
2854 
2855   /* Must have a CFG here at this point.  */
2856   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2857 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2858 
2859   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2860 
2861   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2862   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2863   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2864   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2865 
2866   /* Duplicate any exception-handling regions.  */
2867   if (cfun->eh)
2868     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2869 				       remap_decl_1, id);
2870 
2871   /* Use aux pointers to map the original blocks to copy.  */
2872   FOR_EACH_BB_FN (bb, cfun_to_copy)
2873     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2874       {
2875 	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2876 	bb->aux = new_bb;
2877 	new_bb->aux = bb;
2878 	new_bb->loop_father = entry_block_map->loop_father;
2879       }
2880 
2881   last = last_basic_block_for_fn (cfun);
2882 
2883   /* Now that we've duplicated the blocks, duplicate their edges.  */
2884   basic_block abnormal_goto_dest = NULL;
2885   if (id->call_stmt
2886       && stmt_can_make_abnormal_goto (id->call_stmt))
2887     {
2888       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2889 
2890       bb = gimple_bb (id->call_stmt);
2891       gsi_next (&gsi);
2892       if (gsi_end_p (gsi))
2893 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2894     }
2895   FOR_ALL_BB_FN (bb, cfun_to_copy)
2896     if (!id->blocks_to_copy
2897 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2898       need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2899 					       abnormal_goto_dest);
2900 
2901   if (new_entry)
2902     {
2903       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2904       e->probability = REG_BR_PROB_BASE;
2905       e->count = incoming_count;
2906     }
2907 
2908   /* Duplicate the loop tree, if available and wanted.  */
2909   if (loops_for_fn (src_cfun) != NULL
2910       && current_loops != NULL)
2911     {
2912       copy_loops (id, entry_block_map->loop_father,
2913 		  get_loop (src_cfun, 0));
2914       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2915       loops_state_set (LOOPS_NEED_FIXUP);
2916     }
2917 
2918   /* If the loop tree in the source function needed fixup, mark the
2919      destination loop tree for fixup, too.  */
2920   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2921     loops_state_set (LOOPS_NEED_FIXUP);
2922 
2923   if (gimple_in_ssa_p (cfun))
2924     FOR_ALL_BB_FN (bb, cfun_to_copy)
2925       if (!id->blocks_to_copy
2926 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2927 	copy_phis_for_bb (bb, id);
2928 
2929   FOR_ALL_BB_FN (bb, cfun_to_copy)
2930     if (bb->aux)
2931       {
2932 	if (need_debug_cleanup
2933 	    && bb->index != ENTRY_BLOCK
2934 	    && bb->index != EXIT_BLOCK)
2935 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2936 	/* Update call edge destinations.  This can not be done before loop
2937 	   info is updated, because we may split basic blocks.  */
2938 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2939 	    && bb->index != ENTRY_BLOCK
2940 	    && bb->index != EXIT_BLOCK)
2941 	  redirect_all_calls (id, (basic_block)bb->aux);
2942 	((basic_block)bb->aux)->aux = NULL;
2943 	bb->aux = NULL;
2944       }
2945 
2946   /* Zero out AUX fields of newly created block during EH edge
2947      insertion. */
2948   for (; last < last_basic_block_for_fn (cfun); last++)
2949     {
2950       if (need_debug_cleanup)
2951 	maybe_move_debug_stmts_to_successors (id,
2952 					      BASIC_BLOCK_FOR_FN (cfun, last));
2953       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2954       /* Update call edge destinations.  This can not be done before loop
2955 	 info is updated, because we may split basic blocks.  */
2956       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2957 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2958     }
2959   entry_block_map->aux = NULL;
2960   exit_block_map->aux = NULL;
2961 
2962   if (id->eh_map)
2963     {
2964       delete id->eh_map;
2965       id->eh_map = NULL;
2966     }
2967   if (id->dependence_map)
2968     {
2969       delete id->dependence_map;
2970       id->dependence_map = NULL;
2971     }
2972 
2973   return new_fndecl;
2974 }
2975 
2976 /* Copy the debug STMT using ID.  We deal with these statements in a
2977    special way: if any variable in their VALUE expression wasn't
2978    remapped yet, we won't remap it, because that would get decl uids
2979    out of sync, causing codegen differences between -g and -g0.  If
2980    this arises, we drop the VALUE expression altogether.  */
2981 
2982 static void
2983 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2984 {
2985   tree t, *n;
2986   struct walk_stmt_info wi;
2987 
2988   if (gimple_block (stmt))
2989     {
2990       n = id->decl_map->get (gimple_block (stmt));
2991       gimple_set_block (stmt, n ? *n : id->block);
2992     }
2993 
2994   /* Remap all the operands in COPY.  */
2995   memset (&wi, 0, sizeof (wi));
2996   wi.info = id;
2997 
2998   processing_debug_stmt = 1;
2999 
3000   if (gimple_debug_source_bind_p (stmt))
3001     t = gimple_debug_source_bind_get_var (stmt);
3002   else
3003     t = gimple_debug_bind_get_var (stmt);
3004 
3005   if (TREE_CODE (t) == PARM_DECL && id->debug_map
3006       && (n = id->debug_map->get (t)))
3007     {
3008       gcc_assert (VAR_P (*n));
3009       t = *n;
3010     }
3011   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3012     /* T is a non-localized variable.  */;
3013   else
3014     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3015 
3016   if (gimple_debug_bind_p (stmt))
3017     {
3018       gimple_debug_bind_set_var (stmt, t);
3019 
3020       if (gimple_debug_bind_has_value_p (stmt))
3021 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3022 		   remap_gimple_op_r, &wi, NULL);
3023 
3024       /* Punt if any decl couldn't be remapped.  */
3025       if (processing_debug_stmt < 0)
3026 	gimple_debug_bind_reset_value (stmt);
3027     }
3028   else if (gimple_debug_source_bind_p (stmt))
3029     {
3030       gimple_debug_source_bind_set_var (stmt, t);
3031       /* When inlining and source bind refers to one of the optimized
3032 	 away parameters, change the source bind into normal debug bind
3033 	 referring to the corresponding DEBUG_EXPR_DECL that should have
3034 	 been bound before the call stmt.  */
3035       t = gimple_debug_source_bind_get_value (stmt);
3036       if (t != NULL_TREE
3037 	  && TREE_CODE (t) == PARM_DECL
3038 	  && id->call_stmt)
3039 	{
3040 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3041 	  unsigned int i;
3042 	  if (debug_args != NULL)
3043 	    {
3044 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3045 		if ((**debug_args)[i] == DECL_ORIGIN (t)
3046 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3047 		  {
3048 		    t = (**debug_args)[i + 1];
3049 		    stmt->subcode = GIMPLE_DEBUG_BIND;
3050 		    gimple_debug_bind_set_value (stmt, t);
3051 		    break;
3052 		  }
3053 	    }
3054 	}
3055       if (gimple_debug_source_bind_p (stmt))
3056 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3057 		   remap_gimple_op_r, &wi, NULL);
3058     }
3059 
3060   processing_debug_stmt = 0;
3061 
3062   update_stmt (stmt);
3063 }
3064 
3065 /* Process deferred debug stmts.  In order to give values better odds
3066    of being successfully remapped, we delay the processing of debug
3067    stmts until all other stmts that might require remapping are
3068    processed.  */
3069 
3070 static void
3071 copy_debug_stmts (copy_body_data *id)
3072 {
3073   size_t i;
3074   gdebug *stmt;
3075 
3076   if (!id->debug_stmts.exists ())
3077     return;
3078 
3079   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3080     copy_debug_stmt (stmt, id);
3081 
3082   id->debug_stmts.release ();
3083 }
3084 
3085 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3086    another function.  */
3087 
3088 static tree
3089 copy_tree_body (copy_body_data *id)
3090 {
3091   tree fndecl = id->src_fn;
3092   tree body = DECL_SAVED_TREE (fndecl);
3093 
3094   walk_tree (&body, copy_tree_body_r, id, NULL);
3095 
3096   return body;
3097 }
3098 
3099 /* Make a copy of the body of FN so that it can be inserted inline in
3100    another function.  */
3101 
3102 static tree
3103 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
3104 	   basic_block entry_block_map, basic_block exit_block_map,
3105 	   basic_block new_entry)
3106 {
3107   tree fndecl = id->src_fn;
3108   tree body;
3109 
3110   /* If this body has a CFG, walk CFG and copy.  */
3111   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3112   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
3113 			new_entry);
3114   copy_debug_stmts (id);
3115 
3116   return body;
3117 }
3118 
3119 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3120    defined in function FN, or of a data member thereof.  */
3121 
3122 static bool
3123 self_inlining_addr_expr (tree value, tree fn)
3124 {
3125   tree var;
3126 
3127   if (TREE_CODE (value) != ADDR_EXPR)
3128     return false;
3129 
3130   var = get_base_address (TREE_OPERAND (value, 0));
3131 
3132   return var && auto_var_in_fn_p (var, fn);
3133 }
3134 
3135 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3136    lexical block and line number information from base_stmt, if given,
3137    or from the last stmt of the block otherwise.  */
3138 
3139 static gimple *
3140 insert_init_debug_bind (copy_body_data *id,
3141 			basic_block bb, tree var, tree value,
3142 			gimple *base_stmt)
3143 {
3144   gimple *note;
3145   gimple_stmt_iterator gsi;
3146   tree tracked_var;
3147 
3148   if (!gimple_in_ssa_p (id->src_cfun))
3149     return NULL;
3150 
3151   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3152     return NULL;
3153 
3154   tracked_var = target_for_debug_bind (var);
3155   if (!tracked_var)
3156     return NULL;
3157 
3158   if (bb)
3159     {
3160       gsi = gsi_last_bb (bb);
3161       if (!base_stmt && !gsi_end_p (gsi))
3162 	base_stmt = gsi_stmt (gsi);
3163     }
3164 
3165   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3166 
3167   if (bb)
3168     {
3169       if (!gsi_end_p (gsi))
3170 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3171       else
3172 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3173     }
3174 
3175   return note;
3176 }
3177 
3178 static void
3179 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3180 {
3181   /* If VAR represents a zero-sized variable, it's possible that the
3182      assignment statement may result in no gimple statements.  */
3183   if (init_stmt)
3184     {
3185       gimple_stmt_iterator si = gsi_last_bb (bb);
3186 
3187       /* We can end up with init statements that store to a non-register
3188          from a rhs with a conversion.  Handle that here by forcing the
3189 	 rhs into a temporary.  gimple_regimplify_operands is not
3190 	 prepared to do this for us.  */
3191       if (!is_gimple_debug (init_stmt)
3192 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3193 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3194 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3195 	{
3196 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3197 			     gimple_expr_type (init_stmt),
3198 			     gimple_assign_rhs1 (init_stmt));
3199 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3200 					  GSI_NEW_STMT);
3201 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3202 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3203 	}
3204       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3205       gimple_regimplify_operands (init_stmt, &si);
3206 
3207       if (!is_gimple_debug (init_stmt))
3208 	{
3209 	  tree def = gimple_assign_lhs (init_stmt);
3210 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3211 	}
3212     }
3213 }
3214 
3215 /* Initialize parameter P with VALUE.  If needed, produce init statement
3216    at the end of BB.  When BB is NULL, we return init statement to be
3217    output later.  */
3218 static gimple *
3219 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3220 		     basic_block bb, tree *vars)
3221 {
3222   gimple *init_stmt = NULL;
3223   tree var;
3224   tree rhs = value;
3225   tree def = (gimple_in_ssa_p (cfun)
3226 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3227 
3228   if (value
3229       && value != error_mark_node
3230       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3231     {
3232       /* If we can match up types by promotion/demotion do so.  */
3233       if (fold_convertible_p (TREE_TYPE (p), value))
3234 	rhs = fold_convert (TREE_TYPE (p), value);
3235       else
3236 	{
3237 	  /* ???  For valid programs we should not end up here.
3238 	     Still if we end up with truly mismatched types here, fall back
3239 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3240 	     GIMPLE to the following passes.  */
3241 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3242 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3243 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3244 	  else
3245 	    rhs = build_zero_cst (TREE_TYPE (p));
3246 	}
3247     }
3248 
3249   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3250      here since the type of this decl must be visible to the calling
3251      function.  */
3252   var = copy_decl_to_var (p, id);
3253 
3254   /* Declare this new variable.  */
3255   DECL_CHAIN (var) = *vars;
3256   *vars = var;
3257 
3258   /* Make gimplifier happy about this variable.  */
3259   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3260 
3261   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3262      we would not need to create a new variable here at all, if it
3263      weren't for debug info.  Still, we can just use the argument
3264      value.  */
3265   if (TREE_READONLY (p)
3266       && !TREE_ADDRESSABLE (p)
3267       && value && !TREE_SIDE_EFFECTS (value)
3268       && !def)
3269     {
3270       /* We may produce non-gimple trees by adding NOPs or introduce
3271 	 invalid sharing when operand is not really constant.
3272 	 It is not big deal to prohibit constant propagation here as
3273 	 we will constant propagate in DOM1 pass anyway.  */
3274       if (is_gimple_min_invariant (value)
3275 	  && useless_type_conversion_p (TREE_TYPE (p),
3276 						 TREE_TYPE (value))
3277 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3278 	     the base variable isn't a local variable of the inlined
3279 	     function, e.g., when doing recursive inlining, direct or
3280 	     mutually-recursive or whatever, which is why we don't
3281 	     just test whether fn == current_function_decl.  */
3282 	  && ! self_inlining_addr_expr (value, fn))
3283 	{
3284 	  insert_decl_map (id, p, value);
3285 	  insert_debug_decl_map (id, p, var);
3286 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3287 	}
3288     }
3289 
3290   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3291      that way, when the PARM_DECL is encountered, it will be
3292      automatically replaced by the VAR_DECL.  */
3293   insert_decl_map (id, p, var);
3294 
3295   /* Even if P was TREE_READONLY, the new VAR should not be.
3296      In the original code, we would have constructed a
3297      temporary, and then the function body would have never
3298      changed the value of P.  However, now, we will be
3299      constructing VAR directly.  The constructor body may
3300      change its value multiple times as it is being
3301      constructed.  Therefore, it must not be TREE_READONLY;
3302      the back-end assumes that TREE_READONLY variable is
3303      assigned to only once.  */
3304   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3305     TREE_READONLY (var) = 0;
3306 
3307   /* If there is no setup required and we are in SSA, take the easy route
3308      replacing all SSA names representing the function parameter by the
3309      SSA name passed to function.
3310 
3311      We need to construct map for the variable anyway as it might be used
3312      in different SSA names when parameter is set in function.
3313 
3314      Do replacement at -O0 for const arguments replaced by constant.
3315      This is important for builtin_constant_p and other construct requiring
3316      constant argument to be visible in inlined function body.  */
3317   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3318       && (optimize
3319           || (TREE_READONLY (p)
3320 	      && is_gimple_min_invariant (rhs)))
3321       && (TREE_CODE (rhs) == SSA_NAME
3322 	  || is_gimple_min_invariant (rhs))
3323       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3324     {
3325       insert_decl_map (id, def, rhs);
3326       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3327     }
3328 
3329   /* If the value of argument is never used, don't care about initializing
3330      it.  */
3331   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3332     {
3333       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3334       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3335     }
3336 
3337   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3338      the argument to the proper type in case it was promoted.  */
3339   if (value)
3340     {
3341       if (rhs == error_mark_node)
3342 	{
3343 	  insert_decl_map (id, p, var);
3344 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3345 	}
3346 
3347       STRIP_USELESS_TYPE_CONVERSION (rhs);
3348 
3349       /* If we are in SSA form properly remap the default definition
3350          or assign to a dummy SSA name if the parameter is unused and
3351 	 we are not optimizing.  */
3352       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3353 	{
3354 	  if (def)
3355 	    {
3356 	      def = remap_ssa_name (def, id);
3357 	      init_stmt = gimple_build_assign (def, rhs);
3358 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3359 	      set_ssa_default_def (cfun, var, NULL);
3360 	    }
3361 	  else if (!optimize)
3362 	    {
3363 	      def = make_ssa_name (var);
3364 	      init_stmt = gimple_build_assign (def, rhs);
3365 	    }
3366 	}
3367       else
3368         init_stmt = gimple_build_assign (var, rhs);
3369 
3370       if (bb && init_stmt)
3371         insert_init_stmt (id, bb, init_stmt);
3372     }
3373   return init_stmt;
3374 }
3375 
3376 /* Generate code to initialize the parameters of the function at the
3377    top of the stack in ID from the GIMPLE_CALL STMT.  */
3378 
3379 static void
3380 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3381 			       tree fn, basic_block bb)
3382 {
3383   tree parms;
3384   size_t i;
3385   tree p;
3386   tree vars = NULL_TREE;
3387   tree static_chain = gimple_call_chain (stmt);
3388 
3389   /* Figure out what the parameters are.  */
3390   parms = DECL_ARGUMENTS (fn);
3391 
3392   /* Loop through the parameter declarations, replacing each with an
3393      equivalent VAR_DECL, appropriately initialized.  */
3394   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3395     {
3396       tree val;
3397       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3398       setup_one_parameter (id, p, val, fn, bb, &vars);
3399     }
3400   /* After remapping parameters remap their types.  This has to be done
3401      in a second loop over all parameters to appropriately remap
3402      variable sized arrays when the size is specified in a
3403      parameter following the array.  */
3404   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3405     {
3406       tree *varp = id->decl_map->get (p);
3407       if (varp && VAR_P (*varp))
3408 	{
3409 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3410 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3411 	  tree var = *varp;
3412 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3413 	  /* Also remap the default definition if it was remapped
3414 	     to the default definition of the parameter replacement
3415 	     by the parameter setup.  */
3416 	  if (def)
3417 	    {
3418 	      tree *defp = id->decl_map->get (def);
3419 	      if (defp
3420 		  && TREE_CODE (*defp) == SSA_NAME
3421 		  && SSA_NAME_VAR (*defp) == var)
3422 		TREE_TYPE (*defp) = TREE_TYPE (var);
3423 	    }
3424 	}
3425     }
3426 
3427   /* Initialize the static chain.  */
3428   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3429   gcc_assert (fn != current_function_decl);
3430   if (p)
3431     {
3432       /* No static chain?  Seems like a bug in tree-nested.c.  */
3433       gcc_assert (static_chain);
3434 
3435       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3436     }
3437 
3438   declare_inline_vars (id->block, vars);
3439 }
3440 
3441 
3442 /* Declare a return variable to replace the RESULT_DECL for the
3443    function we are calling.  An appropriate DECL_STMT is returned.
3444    The USE_STMT is filled to contain a use of the declaration to
3445    indicate the return value of the function.
3446 
3447    RETURN_SLOT, if non-null is place where to store the result.  It
3448    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3449    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3450 
3451    RETURN_BOUNDS holds a destination for returned bounds.
3452 
3453    The return value is a (possibly null) value that holds the result
3454    as seen by the caller.  */
3455 
3456 static tree
3457 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3458 			 tree return_bounds, basic_block entry_bb)
3459 {
3460   tree callee = id->src_fn;
3461   tree result = DECL_RESULT (callee);
3462   tree callee_type = TREE_TYPE (result);
3463   tree caller_type;
3464   tree var, use;
3465 
3466   /* Handle type-mismatches in the function declaration return type
3467      vs. the call expression.  */
3468   if (modify_dest)
3469     caller_type = TREE_TYPE (modify_dest);
3470   else
3471     caller_type = TREE_TYPE (TREE_TYPE (callee));
3472 
3473   /* We don't need to do anything for functions that don't return anything.  */
3474   if (VOID_TYPE_P (callee_type))
3475     return NULL_TREE;
3476 
3477   /* If there was a return slot, then the return value is the
3478      dereferenced address of that object.  */
3479   if (return_slot)
3480     {
3481       /* The front end shouldn't have used both return_slot and
3482 	 a modify expression.  */
3483       gcc_assert (!modify_dest);
3484       if (DECL_BY_REFERENCE (result))
3485 	{
3486 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3487 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3488 
3489 	  /* We are going to construct *&return_slot and we can't do that
3490 	     for variables believed to be not addressable.
3491 
3492 	     FIXME: This check possibly can match, because values returned
3493 	     via return slot optimization are not believed to have address
3494 	     taken by alias analysis.  */
3495 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3496 	  var = return_slot_addr;
3497 	}
3498       else
3499 	{
3500 	  var = return_slot;
3501 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3502 	  if (TREE_ADDRESSABLE (result))
3503 	    mark_addressable (var);
3504 	}
3505       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3506            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3507 	  && !DECL_GIMPLE_REG_P (result)
3508 	  && DECL_P (var))
3509 	DECL_GIMPLE_REG_P (var) = 0;
3510       use = NULL;
3511       goto done;
3512     }
3513 
3514   /* All types requiring non-trivial constructors should have been handled.  */
3515   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3516 
3517   /* Attempt to avoid creating a new temporary variable.  */
3518   if (modify_dest
3519       && TREE_CODE (modify_dest) != SSA_NAME)
3520     {
3521       bool use_it = false;
3522 
3523       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3524       if (!useless_type_conversion_p (callee_type, caller_type))
3525 	use_it = false;
3526 
3527       /* ??? If we're assigning to a variable sized type, then we must
3528 	 reuse the destination variable, because we've no good way to
3529 	 create variable sized temporaries at this point.  */
3530       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3531 	use_it = true;
3532 
3533       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3534 	 reuse it as the result of the call directly.  Don't do this if
3535 	 it would promote MODIFY_DEST to addressable.  */
3536       else if (TREE_ADDRESSABLE (result))
3537 	use_it = false;
3538       else
3539 	{
3540 	  tree base_m = get_base_address (modify_dest);
3541 
3542 	  /* If the base isn't a decl, then it's a pointer, and we don't
3543 	     know where that's going to go.  */
3544 	  if (!DECL_P (base_m))
3545 	    use_it = false;
3546 	  else if (is_global_var (base_m))
3547 	    use_it = false;
3548 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3549 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3550 		   && !DECL_GIMPLE_REG_P (result)
3551 		   && DECL_GIMPLE_REG_P (base_m))
3552 	    use_it = false;
3553 	  else if (!TREE_ADDRESSABLE (base_m))
3554 	    use_it = true;
3555 	}
3556 
3557       if (use_it)
3558 	{
3559 	  var = modify_dest;
3560 	  use = NULL;
3561 	  goto done;
3562 	}
3563     }
3564 
3565   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3566 
3567   var = copy_result_decl_to_var (result, id);
3568   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3569 
3570   /* Do not have the rest of GCC warn about this variable as it should
3571      not be visible to the user.  */
3572   TREE_NO_WARNING (var) = 1;
3573 
3574   declare_inline_vars (id->block, var);
3575 
3576   /* Build the use expr.  If the return type of the function was
3577      promoted, convert it back to the expected type.  */
3578   use = var;
3579   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3580     {
3581       /* If we can match up types by promotion/demotion do so.  */
3582       if (fold_convertible_p (caller_type, var))
3583 	use = fold_convert (caller_type, var);
3584       else
3585 	{
3586 	  /* ???  For valid programs we should not end up here.
3587 	     Still if we end up with truly mismatched types here, fall back
3588 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3589 	     passes.  */
3590 	  /* Prevent var from being written into SSA form.  */
3591 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3592 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3593 	    DECL_GIMPLE_REG_P (var) = false;
3594 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3595 	    TREE_ADDRESSABLE (var) = true;
3596 	  use = fold_build2 (MEM_REF, caller_type,
3597 			     build_fold_addr_expr (var),
3598 			     build_int_cst (ptr_type_node, 0));
3599 	}
3600     }
3601 
3602   STRIP_USELESS_TYPE_CONVERSION (use);
3603 
3604   if (DECL_BY_REFERENCE (result))
3605     {
3606       TREE_ADDRESSABLE (var) = 1;
3607       var = build_fold_addr_expr (var);
3608     }
3609 
3610  done:
3611   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3612      way, when the RESULT_DECL is encountered, it will be
3613      automatically replaced by the VAR_DECL.
3614 
3615      When returning by reference, ensure that RESULT_DECL remaps to
3616      gimple_val.  */
3617   if (DECL_BY_REFERENCE (result)
3618       && !is_gimple_val (var))
3619     {
3620       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3621       insert_decl_map (id, result, temp);
3622       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3623 	 it's default_def SSA_NAME.  */
3624       if (gimple_in_ssa_p (id->src_cfun)
3625 	  && is_gimple_reg (result))
3626 	{
3627 	  temp = make_ssa_name (temp);
3628 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3629 	}
3630       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3631     }
3632   else
3633     insert_decl_map (id, result, var);
3634 
3635   /* Remember this so we can ignore it in remap_decls.  */
3636   id->retvar = var;
3637 
3638   /* If returned bounds are used, then make var for them.  */
3639   if (return_bounds)
3640   {
3641     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3642     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3643     TREE_NO_WARNING (bndtemp) = 1;
3644     declare_inline_vars (id->block, bndtemp);
3645 
3646     id->retbnd = bndtemp;
3647     insert_init_stmt (id, entry_bb,
3648 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3649   }
3650 
3651   return use;
3652 }
3653 
3654 /* Determine if the function can be copied.  If so return NULL.  If
3655    not return a string describng the reason for failure.  */
3656 
3657 const char *
3658 copy_forbidden (struct function *fun)
3659 {
3660   const char *reason = fun->cannot_be_copied_reason;
3661 
3662   /* Only examine the function once.  */
3663   if (fun->cannot_be_copied_set)
3664     return reason;
3665 
3666   /* We cannot copy a function that receives a non-local goto
3667      because we cannot remap the destination label used in the
3668      function that is performing the non-local goto.  */
3669   /* ??? Actually, this should be possible, if we work at it.
3670      No doubt there's just a handful of places that simply
3671      assume it doesn't happen and don't substitute properly.  */
3672   if (fun->has_nonlocal_label)
3673     {
3674       reason = G_("function %q+F can never be copied "
3675 		  "because it receives a non-local goto");
3676       goto fail;
3677     }
3678 
3679   if (fun->has_forced_label_in_static)
3680     {
3681       reason = G_("function %q+F can never be copied because it saves "
3682 		  "address of local label in a static variable");
3683       goto fail;
3684     }
3685 
3686  fail:
3687   fun->cannot_be_copied_reason = reason;
3688   fun->cannot_be_copied_set = true;
3689   return reason;
3690 }
3691 
3692 
3693 static const char *inline_forbidden_reason;
3694 
3695 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3696    iff a function can not be inlined.  Also sets the reason why. */
3697 
3698 static tree
3699 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3700 			 struct walk_stmt_info *wip)
3701 {
3702   tree fn = (tree) wip->info;
3703   tree t;
3704   gimple *stmt = gsi_stmt (*gsi);
3705 
3706   switch (gimple_code (stmt))
3707     {
3708     case GIMPLE_CALL:
3709       /* Refuse to inline alloca call unless user explicitly forced so as
3710 	 this may change program's memory overhead drastically when the
3711 	 function using alloca is called in loop.  In GCC present in
3712 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3713 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3714 	 VLA objects as those can't cause unbounded growth (they're always
3715 	 wrapped inside stack_save/stack_restore regions.  */
3716       if (gimple_maybe_alloca_call_p (stmt)
3717 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3718 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3719 	{
3720 	  inline_forbidden_reason
3721 	    = G_("function %q+F can never be inlined because it uses "
3722 		 "alloca (override using the always_inline attribute)");
3723 	  *handled_ops_p = true;
3724 	  return fn;
3725 	}
3726 
3727       t = gimple_call_fndecl (stmt);
3728       if (t == NULL_TREE)
3729 	break;
3730 
3731       /* We cannot inline functions that call setjmp.  */
3732       if (setjmp_call_p (t))
3733 	{
3734 	  inline_forbidden_reason
3735 	    = G_("function %q+F can never be inlined because it uses setjmp");
3736 	  *handled_ops_p = true;
3737 	  return t;
3738 	}
3739 
3740       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3741 	switch (DECL_FUNCTION_CODE (t))
3742 	  {
3743 	    /* We cannot inline functions that take a variable number of
3744 	       arguments.  */
3745 	  case BUILT_IN_VA_START:
3746 	  case BUILT_IN_NEXT_ARG:
3747 	  case BUILT_IN_VA_END:
3748 	    inline_forbidden_reason
3749 	      = G_("function %q+F can never be inlined because it "
3750 		   "uses variable argument lists");
3751 	    *handled_ops_p = true;
3752 	    return t;
3753 
3754 	  case BUILT_IN_LONGJMP:
3755 	    /* We can't inline functions that call __builtin_longjmp at
3756 	       all.  The non-local goto machinery really requires the
3757 	       destination be in a different function.  If we allow the
3758 	       function calling __builtin_longjmp to be inlined into the
3759 	       function calling __builtin_setjmp, Things will Go Awry.  */
3760 	    inline_forbidden_reason
3761 	      = G_("function %q+F can never be inlined because "
3762 		   "it uses setjmp-longjmp exception handling");
3763 	    *handled_ops_p = true;
3764 	    return t;
3765 
3766 	  case BUILT_IN_NONLOCAL_GOTO:
3767 	    /* Similarly.  */
3768 	    inline_forbidden_reason
3769 	      = G_("function %q+F can never be inlined because "
3770 		   "it uses non-local goto");
3771 	    *handled_ops_p = true;
3772 	    return t;
3773 
3774 	  case BUILT_IN_RETURN:
3775 	  case BUILT_IN_APPLY_ARGS:
3776 	    /* If a __builtin_apply_args caller would be inlined,
3777 	       it would be saving arguments of the function it has
3778 	       been inlined into.  Similarly __builtin_return would
3779 	       return from the function the inline has been inlined into.  */
3780 	    inline_forbidden_reason
3781 	      = G_("function %q+F can never be inlined because "
3782 		   "it uses __builtin_return or __builtin_apply_args");
3783 	    *handled_ops_p = true;
3784 	    return t;
3785 
3786 	  default:
3787 	    break;
3788 	  }
3789       break;
3790 
3791     case GIMPLE_GOTO:
3792       t = gimple_goto_dest (stmt);
3793 
3794       /* We will not inline a function which uses computed goto.  The
3795 	 addresses of its local labels, which may be tucked into
3796 	 global storage, are of course not constant across
3797 	 instantiations, which causes unexpected behavior.  */
3798       if (TREE_CODE (t) != LABEL_DECL)
3799 	{
3800 	  inline_forbidden_reason
3801 	    = G_("function %q+F can never be inlined "
3802 		 "because it contains a computed goto");
3803 	  *handled_ops_p = true;
3804 	  return t;
3805 	}
3806       break;
3807 
3808     default:
3809       break;
3810     }
3811 
3812   *handled_ops_p = false;
3813   return NULL_TREE;
3814 }
3815 
3816 /* Return true if FNDECL is a function that cannot be inlined into
3817    another one.  */
3818 
3819 static bool
3820 inline_forbidden_p (tree fndecl)
3821 {
3822   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3823   struct walk_stmt_info wi;
3824   basic_block bb;
3825   bool forbidden_p = false;
3826 
3827   /* First check for shared reasons not to copy the code.  */
3828   inline_forbidden_reason = copy_forbidden (fun);
3829   if (inline_forbidden_reason != NULL)
3830     return true;
3831 
3832   /* Next, walk the statements of the function looking for
3833      constraucts we can't handle, or are non-optimal for inlining.  */
3834   hash_set<tree> visited_nodes;
3835   memset (&wi, 0, sizeof (wi));
3836   wi.info = (void *) fndecl;
3837   wi.pset = &visited_nodes;
3838 
3839   FOR_EACH_BB_FN (bb, fun)
3840     {
3841       gimple *ret;
3842       gimple_seq seq = bb_seq (bb);
3843       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3844       forbidden_p = (ret != NULL);
3845       if (forbidden_p)
3846 	break;
3847     }
3848 
3849   return forbidden_p;
3850 }
3851 
3852 /* Return false if the function FNDECL cannot be inlined on account of its
3853    attributes, true otherwise.  */
3854 static bool
3855 function_attribute_inlinable_p (const_tree fndecl)
3856 {
3857   if (targetm.attribute_table)
3858     {
3859       const_tree a;
3860 
3861       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3862 	{
3863 	  const_tree name = TREE_PURPOSE (a);
3864 	  int i;
3865 
3866 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3867 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3868 	      return targetm.function_attribute_inlinable_p (fndecl);
3869 	}
3870     }
3871 
3872   return true;
3873 }
3874 
3875 /* Returns nonzero if FN is a function that does not have any
3876    fundamental inline blocking properties.  */
3877 
3878 bool
3879 tree_inlinable_function_p (tree fn)
3880 {
3881   bool inlinable = true;
3882   bool do_warning;
3883   tree always_inline;
3884 
3885   /* If we've already decided this function shouldn't be inlined,
3886      there's no need to check again.  */
3887   if (DECL_UNINLINABLE (fn))
3888     return false;
3889 
3890   /* We only warn for functions declared `inline' by the user.  */
3891   do_warning = (warn_inline
3892 		&& DECL_DECLARED_INLINE_P (fn)
3893 		&& !DECL_NO_INLINE_WARNING_P (fn)
3894 		&& !DECL_IN_SYSTEM_HEADER (fn));
3895 
3896   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3897 
3898   if (flag_no_inline
3899       && always_inline == NULL)
3900     {
3901       if (do_warning)
3902         warning (OPT_Winline, "function %q+F can never be inlined because it "
3903                  "is suppressed using -fno-inline", fn);
3904       inlinable = false;
3905     }
3906 
3907   else if (!function_attribute_inlinable_p (fn))
3908     {
3909       if (do_warning)
3910         warning (OPT_Winline, "function %q+F can never be inlined because it "
3911                  "uses attributes conflicting with inlining", fn);
3912       inlinable = false;
3913     }
3914 
3915   else if (inline_forbidden_p (fn))
3916     {
3917       /* See if we should warn about uninlinable functions.  Previously,
3918 	 some of these warnings would be issued while trying to expand
3919 	 the function inline, but that would cause multiple warnings
3920 	 about functions that would for example call alloca.  But since
3921 	 this a property of the function, just one warning is enough.
3922 	 As a bonus we can now give more details about the reason why a
3923 	 function is not inlinable.  */
3924       if (always_inline)
3925 	error (inline_forbidden_reason, fn);
3926       else if (do_warning)
3927 	warning (OPT_Winline, inline_forbidden_reason, fn);
3928 
3929       inlinable = false;
3930     }
3931 
3932   /* Squirrel away the result so that we don't have to check again.  */
3933   DECL_UNINLINABLE (fn) = !inlinable;
3934 
3935   return inlinable;
3936 }
3937 
3938 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3939    word size and take possible memcpy call into account and return
3940    cost based on whether optimizing for size or speed according to SPEED_P.  */
3941 
3942 int
3943 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3944 {
3945   HOST_WIDE_INT size;
3946 
3947   gcc_assert (!VOID_TYPE_P (type));
3948 
3949   if (TREE_CODE (type) == VECTOR_TYPE)
3950     {
3951       machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3952       machine_mode simd
3953 	= targetm.vectorize.preferred_simd_mode (inner);
3954       int simd_mode_size = GET_MODE_SIZE (simd);
3955       return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3956 	      / simd_mode_size);
3957     }
3958 
3959   size = int_size_in_bytes (type);
3960 
3961   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3962     /* Cost of a memcpy call, 3 arguments and the call.  */
3963     return 4;
3964   else
3965     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3966 }
3967 
3968 /* Returns cost of operation CODE, according to WEIGHTS  */
3969 
3970 static int
3971 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3972 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3973 {
3974   switch (code)
3975     {
3976     /* These are "free" conversions, or their presumed cost
3977        is folded into other operations.  */
3978     case RANGE_EXPR:
3979     CASE_CONVERT:
3980     case COMPLEX_EXPR:
3981     case PAREN_EXPR:
3982     case VIEW_CONVERT_EXPR:
3983       return 0;
3984 
3985     /* Assign cost of 1 to usual operations.
3986        ??? We may consider mapping RTL costs to this.  */
3987     case COND_EXPR:
3988     case VEC_COND_EXPR:
3989     case VEC_PERM_EXPR:
3990 
3991     case PLUS_EXPR:
3992     case POINTER_PLUS_EXPR:
3993     case MINUS_EXPR:
3994     case MULT_EXPR:
3995     case MULT_HIGHPART_EXPR:
3996     case FMA_EXPR:
3997 
3998     case ADDR_SPACE_CONVERT_EXPR:
3999     case FIXED_CONVERT_EXPR:
4000     case FIX_TRUNC_EXPR:
4001 
4002     case NEGATE_EXPR:
4003     case FLOAT_EXPR:
4004     case MIN_EXPR:
4005     case MAX_EXPR:
4006     case ABS_EXPR:
4007 
4008     case LSHIFT_EXPR:
4009     case RSHIFT_EXPR:
4010     case LROTATE_EXPR:
4011     case RROTATE_EXPR:
4012 
4013     case BIT_IOR_EXPR:
4014     case BIT_XOR_EXPR:
4015     case BIT_AND_EXPR:
4016     case BIT_NOT_EXPR:
4017 
4018     case TRUTH_ANDIF_EXPR:
4019     case TRUTH_ORIF_EXPR:
4020     case TRUTH_AND_EXPR:
4021     case TRUTH_OR_EXPR:
4022     case TRUTH_XOR_EXPR:
4023     case TRUTH_NOT_EXPR:
4024 
4025     case LT_EXPR:
4026     case LE_EXPR:
4027     case GT_EXPR:
4028     case GE_EXPR:
4029     case EQ_EXPR:
4030     case NE_EXPR:
4031     case ORDERED_EXPR:
4032     case UNORDERED_EXPR:
4033 
4034     case UNLT_EXPR:
4035     case UNLE_EXPR:
4036     case UNGT_EXPR:
4037     case UNGE_EXPR:
4038     case UNEQ_EXPR:
4039     case LTGT_EXPR:
4040 
4041     case CONJ_EXPR:
4042 
4043     case PREDECREMENT_EXPR:
4044     case PREINCREMENT_EXPR:
4045     case POSTDECREMENT_EXPR:
4046     case POSTINCREMENT_EXPR:
4047 
4048     case REALIGN_LOAD_EXPR:
4049 
4050     case REDUC_MAX_EXPR:
4051     case REDUC_MIN_EXPR:
4052     case REDUC_PLUS_EXPR:
4053     case WIDEN_SUM_EXPR:
4054     case WIDEN_MULT_EXPR:
4055     case DOT_PROD_EXPR:
4056     case SAD_EXPR:
4057     case WIDEN_MULT_PLUS_EXPR:
4058     case WIDEN_MULT_MINUS_EXPR:
4059     case WIDEN_LSHIFT_EXPR:
4060 
4061     case VEC_WIDEN_MULT_HI_EXPR:
4062     case VEC_WIDEN_MULT_LO_EXPR:
4063     case VEC_WIDEN_MULT_EVEN_EXPR:
4064     case VEC_WIDEN_MULT_ODD_EXPR:
4065     case VEC_UNPACK_HI_EXPR:
4066     case VEC_UNPACK_LO_EXPR:
4067     case VEC_UNPACK_FLOAT_HI_EXPR:
4068     case VEC_UNPACK_FLOAT_LO_EXPR:
4069     case VEC_PACK_TRUNC_EXPR:
4070     case VEC_PACK_SAT_EXPR:
4071     case VEC_PACK_FIX_TRUNC_EXPR:
4072     case VEC_WIDEN_LSHIFT_HI_EXPR:
4073     case VEC_WIDEN_LSHIFT_LO_EXPR:
4074 
4075       return 1;
4076 
4077     /* Few special cases of expensive operations.  This is useful
4078        to avoid inlining on functions having too many of these.  */
4079     case TRUNC_DIV_EXPR:
4080     case CEIL_DIV_EXPR:
4081     case FLOOR_DIV_EXPR:
4082     case ROUND_DIV_EXPR:
4083     case EXACT_DIV_EXPR:
4084     case TRUNC_MOD_EXPR:
4085     case CEIL_MOD_EXPR:
4086     case FLOOR_MOD_EXPR:
4087     case ROUND_MOD_EXPR:
4088     case RDIV_EXPR:
4089       if (TREE_CODE (op2) != INTEGER_CST)
4090         return weights->div_mod_cost;
4091       return 1;
4092 
4093     /* Bit-field insertion needs several shift and mask operations.  */
4094     case BIT_INSERT_EXPR:
4095       return 3;
4096 
4097     default:
4098       /* We expect a copy assignment with no operator.  */
4099       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4100       return 0;
4101     }
4102 }
4103 
4104 
4105 /* Estimate number of instructions that will be created by expanding
4106    the statements in the statement sequence STMTS.
4107    WEIGHTS contains weights attributed to various constructs.  */
4108 
4109 int
4110 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4111 {
4112   int cost;
4113   gimple_stmt_iterator gsi;
4114 
4115   cost = 0;
4116   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4117     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4118 
4119   return cost;
4120 }
4121 
4122 
4123 /* Estimate number of instructions that will be created by expanding STMT.
4124    WEIGHTS contains weights attributed to various constructs.  */
4125 
4126 int
4127 estimate_num_insns (gimple *stmt, eni_weights *weights)
4128 {
4129   unsigned cost, i;
4130   enum gimple_code code = gimple_code (stmt);
4131   tree lhs;
4132   tree rhs;
4133 
4134   switch (code)
4135     {
4136     case GIMPLE_ASSIGN:
4137       /* Try to estimate the cost of assignments.  We have three cases to
4138 	 deal with:
4139 	 1) Simple assignments to registers;
4140 	 2) Stores to things that must live in memory.  This includes
4141 	    "normal" stores to scalars, but also assignments of large
4142 	    structures, or constructors of big arrays;
4143 
4144 	 Let us look at the first two cases, assuming we have "a = b + C":
4145 	 <GIMPLE_ASSIGN <var_decl "a">
4146 	        <plus_expr <var_decl "b"> <constant C>>
4147 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4148 	 any target, because "a" usually ends up in a real register.  Hence
4149 	 the only cost of this expression comes from the PLUS_EXPR, and we
4150 	 can ignore the GIMPLE_ASSIGN.
4151 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4152 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4153 	 of moving something into "a", which we compute using the function
4154 	 estimate_move_cost.  */
4155       if (gimple_clobber_p (stmt))
4156 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4157 
4158       lhs = gimple_assign_lhs (stmt);
4159       rhs = gimple_assign_rhs1 (stmt);
4160 
4161       cost = 0;
4162 
4163       /* Account for the cost of moving to / from memory.  */
4164       if (gimple_store_p (stmt))
4165 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4166       if (gimple_assign_load_p (stmt))
4167 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4168 
4169       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4170       				      gimple_assign_rhs1 (stmt),
4171 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4172 				      == GIMPLE_BINARY_RHS
4173 				      ? gimple_assign_rhs2 (stmt) : NULL);
4174       break;
4175 
4176     case GIMPLE_COND:
4177       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4178       				         gimple_op (stmt, 0),
4179 				         gimple_op (stmt, 1));
4180       break;
4181 
4182     case GIMPLE_SWITCH:
4183       {
4184 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4185 	/* Take into account cost of the switch + guess 2 conditional jumps for
4186 	   each case label.
4187 
4188 	   TODO: once the switch expansion logic is sufficiently separated, we can
4189 	   do better job on estimating cost of the switch.  */
4190 	if (weights->time_based)
4191 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4192 	else
4193 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4194       }
4195       break;
4196 
4197     case GIMPLE_CALL:
4198       {
4199 	tree decl;
4200 
4201 	if (gimple_call_internal_p (stmt))
4202 	  return 0;
4203 	else if ((decl = gimple_call_fndecl (stmt))
4204 		 && DECL_BUILT_IN (decl))
4205 	  {
4206 	    /* Do not special case builtins where we see the body.
4207 	       This just confuse inliner.  */
4208 	    struct cgraph_node *node;
4209 	    if (!(node = cgraph_node::get (decl))
4210 		|| node->definition)
4211 	      ;
4212 	    /* For buitins that are likely expanded to nothing or
4213 	       inlined do not account operand costs.  */
4214 	    else if (is_simple_builtin (decl))
4215 	      return 0;
4216 	    else if (is_inexpensive_builtin (decl))
4217 	      return weights->target_builtin_call_cost;
4218 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4219 	      {
4220 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4221 		   specialize the cheap expansion we do here.
4222 		   ???  This asks for a more general solution.  */
4223 		switch (DECL_FUNCTION_CODE (decl))
4224 		  {
4225 		    case BUILT_IN_POW:
4226 		    case BUILT_IN_POWF:
4227 		    case BUILT_IN_POWL:
4228 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4229 			  && (real_equal
4230 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4231 			       &dconst2)))
4232 			return estimate_operator_cost
4233 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4234 			     gimple_call_arg (stmt, 0));
4235 		      break;
4236 
4237 		    default:
4238 		      break;
4239 		  }
4240 	      }
4241 	  }
4242 
4243 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4244 	if (gimple_call_lhs (stmt))
4245 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4246 				      weights->time_based);
4247 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4248 	  {
4249 	    tree arg = gimple_call_arg (stmt, i);
4250 	    cost += estimate_move_cost (TREE_TYPE (arg),
4251 					weights->time_based);
4252 	  }
4253 	break;
4254       }
4255 
4256     case GIMPLE_RETURN:
4257       return weights->return_cost;
4258 
4259     case GIMPLE_GOTO:
4260     case GIMPLE_LABEL:
4261     case GIMPLE_NOP:
4262     case GIMPLE_PHI:
4263     case GIMPLE_PREDICT:
4264     case GIMPLE_DEBUG:
4265       return 0;
4266 
4267     case GIMPLE_ASM:
4268       {
4269 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4270 	/* 1000 means infinity. This avoids overflows later
4271 	   with very long asm statements.  */
4272 	if (count > 1000)
4273 	  count = 1000;
4274 	/* If this asm is asm inline, count anything as minimum size.  */
4275 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4276 	  count = MIN (1, count);
4277 	return count;
4278       }
4279 
4280     case GIMPLE_RESX:
4281       /* This is either going to be an external function call with one
4282 	 argument, or two register copy statements plus a goto.  */
4283       return 2;
4284 
4285     case GIMPLE_EH_DISPATCH:
4286       /* ??? This is going to turn into a switch statement.  Ideally
4287 	 we'd have a look at the eh region and estimate the number of
4288 	 edges involved.  */
4289       return 10;
4290 
4291     case GIMPLE_BIND:
4292       return estimate_num_insns_seq (
4293 	       gimple_bind_body (as_a <gbind *> (stmt)),
4294 	       weights);
4295 
4296     case GIMPLE_EH_FILTER:
4297       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4298 
4299     case GIMPLE_CATCH:
4300       return estimate_num_insns_seq (gimple_catch_handler (
4301 				       as_a <gcatch *> (stmt)),
4302 				     weights);
4303 
4304     case GIMPLE_TRY:
4305       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4306               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4307 
4308     /* OMP directives are generally very expensive.  */
4309 
4310     case GIMPLE_OMP_RETURN:
4311     case GIMPLE_OMP_SECTIONS_SWITCH:
4312     case GIMPLE_OMP_ATOMIC_STORE:
4313     case GIMPLE_OMP_CONTINUE:
4314       /* ...except these, which are cheap.  */
4315       return 0;
4316 
4317     case GIMPLE_OMP_ATOMIC_LOAD:
4318       return weights->omp_cost;
4319 
4320     case GIMPLE_OMP_FOR:
4321       return (weights->omp_cost
4322               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4323               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4324 
4325     case GIMPLE_OMP_PARALLEL:
4326     case GIMPLE_OMP_TASK:
4327     case GIMPLE_OMP_CRITICAL:
4328     case GIMPLE_OMP_MASTER:
4329     case GIMPLE_OMP_TASKGROUP:
4330     case GIMPLE_OMP_ORDERED:
4331     case GIMPLE_OMP_SECTION:
4332     case GIMPLE_OMP_SECTIONS:
4333     case GIMPLE_OMP_SINGLE:
4334     case GIMPLE_OMP_TARGET:
4335     case GIMPLE_OMP_TEAMS:
4336       return (weights->omp_cost
4337               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4338 
4339     case GIMPLE_TRANSACTION:
4340       return (weights->tm_cost
4341 	      + estimate_num_insns_seq (gimple_transaction_body (
4342 					  as_a <gtransaction *> (stmt)),
4343 					weights));
4344 
4345     default:
4346       gcc_unreachable ();
4347     }
4348 
4349   return cost;
4350 }
4351 
4352 /* Estimate number of instructions that will be created by expanding
4353    function FNDECL.  WEIGHTS contains weights attributed to various
4354    constructs.  */
4355 
4356 int
4357 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4358 {
4359   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4360   gimple_stmt_iterator bsi;
4361   basic_block bb;
4362   int n = 0;
4363 
4364   gcc_assert (my_function && my_function->cfg);
4365   FOR_EACH_BB_FN (bb, my_function)
4366     {
4367       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4368 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4369     }
4370 
4371   return n;
4372 }
4373 
4374 
4375 /* Initializes weights used by estimate_num_insns.  */
4376 
4377 void
4378 init_inline_once (void)
4379 {
4380   eni_size_weights.call_cost = 1;
4381   eni_size_weights.indirect_call_cost = 3;
4382   eni_size_weights.target_builtin_call_cost = 1;
4383   eni_size_weights.div_mod_cost = 1;
4384   eni_size_weights.omp_cost = 40;
4385   eni_size_weights.tm_cost = 10;
4386   eni_size_weights.time_based = false;
4387   eni_size_weights.return_cost = 1;
4388 
4389   /* Estimating time for call is difficult, since we have no idea what the
4390      called function does.  In the current uses of eni_time_weights,
4391      underestimating the cost does less harm than overestimating it, so
4392      we choose a rather small value here.  */
4393   eni_time_weights.call_cost = 10;
4394   eni_time_weights.indirect_call_cost = 15;
4395   eni_time_weights.target_builtin_call_cost = 1;
4396   eni_time_weights.div_mod_cost = 10;
4397   eni_time_weights.omp_cost = 40;
4398   eni_time_weights.tm_cost = 40;
4399   eni_time_weights.time_based = true;
4400   eni_time_weights.return_cost = 2;
4401 }
4402 
4403 
4404 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4405 
4406 static void
4407 prepend_lexical_block (tree current_block, tree new_block)
4408 {
4409   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4410   BLOCK_SUBBLOCKS (current_block) = new_block;
4411   BLOCK_SUPERCONTEXT (new_block) = current_block;
4412 }
4413 
4414 /* Add local variables from CALLEE to CALLER.  */
4415 
4416 static inline void
4417 add_local_variables (struct function *callee, struct function *caller,
4418 		     copy_body_data *id)
4419 {
4420   tree var;
4421   unsigned ix;
4422 
4423   FOR_EACH_LOCAL_DECL (callee, ix, var)
4424     if (!can_be_nonlocal (var, id))
4425       {
4426         tree new_var = remap_decl (var, id);
4427 
4428         /* Remap debug-expressions.  */
4429 	if (VAR_P (new_var)
4430 	    && DECL_HAS_DEBUG_EXPR_P (var)
4431 	    && new_var != var)
4432 	  {
4433 	    tree tem = DECL_DEBUG_EXPR (var);
4434 	    bool old_regimplify = id->regimplify;
4435 	    id->remapping_type_depth++;
4436 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4437 	    id->remapping_type_depth--;
4438 	    id->regimplify = old_regimplify;
4439 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4440 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4441 	  }
4442 	add_local_decl (caller, new_var);
4443       }
4444 }
4445 
4446 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4447    have brought in or introduced any debug stmts for SRCVAR.  */
4448 
4449 static inline void
4450 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4451 {
4452   tree *remappedvarp = id->decl_map->get (srcvar);
4453 
4454   if (!remappedvarp)
4455     return;
4456 
4457   if (!VAR_P (*remappedvarp))
4458     return;
4459 
4460   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4461     return;
4462 
4463   tree tvar = target_for_debug_bind (*remappedvarp);
4464   if (!tvar)
4465     return;
4466 
4467   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4468 					  id->call_stmt);
4469   gimple_seq_add_stmt (bindings, stmt);
4470 }
4471 
4472 /* For each inlined variable for which we may have debug bind stmts,
4473    add before GSI a final debug stmt resetting it, marking the end of
4474    its life, so that var-tracking knows it doesn't have to compute
4475    further locations for it.  */
4476 
4477 static inline void
4478 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4479 {
4480   tree var;
4481   unsigned ix;
4482   gimple_seq bindings = NULL;
4483 
4484   if (!gimple_in_ssa_p (id->src_cfun))
4485     return;
4486 
4487   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4488     return;
4489 
4490   for (var = DECL_ARGUMENTS (id->src_fn);
4491        var; var = DECL_CHAIN (var))
4492     reset_debug_binding (id, var, &bindings);
4493 
4494   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4495     reset_debug_binding (id, var, &bindings);
4496 
4497   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4498 }
4499 
4500 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4501 
4502 static bool
4503 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4504 {
4505   tree use_retvar;
4506   tree fn;
4507   hash_map<tree, tree> *dst;
4508   hash_map<tree, tree> *st = NULL;
4509   tree return_slot;
4510   tree modify_dest;
4511   tree return_bounds = NULL;
4512   struct cgraph_edge *cg_edge;
4513   cgraph_inline_failed_t reason;
4514   basic_block return_block;
4515   edge e;
4516   gimple_stmt_iterator gsi, stmt_gsi;
4517   bool successfully_inlined = false;
4518   bool purge_dead_abnormal_edges;
4519   gcall *call_stmt;
4520   unsigned int i;
4521   unsigned int prop_mask, src_properties;
4522   struct function *dst_cfun;
4523   tree simduid;
4524   use_operand_p use;
4525   gimple *simtenter_stmt = NULL;
4526   vec<tree> *simtvars_save;
4527 
4528   /* The gimplifier uses input_location in too many places, such as
4529      internal_get_tmp_var ().  */
4530   location_t saved_location = input_location;
4531   input_location = gimple_location (stmt);
4532 
4533   /* From here on, we're only interested in CALL_EXPRs.  */
4534   call_stmt = dyn_cast <gcall *> (stmt);
4535   if (!call_stmt)
4536     goto egress;
4537 
4538   cg_edge = id->dst_node->get_edge (stmt);
4539   gcc_checking_assert (cg_edge);
4540   /* First, see if we can figure out what function is being called.
4541      If we cannot, then there is no hope of inlining the function.  */
4542   if (cg_edge->indirect_unknown_callee)
4543     goto egress;
4544   fn = cg_edge->callee->decl;
4545   gcc_checking_assert (fn);
4546 
4547   /* If FN is a declaration of a function in a nested scope that was
4548      globally declared inline, we don't set its DECL_INITIAL.
4549      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4550      C++ front-end uses it for cdtors to refer to their internal
4551      declarations, that are not real functions.  Fortunately those
4552      don't have trees to be saved, so we can tell by checking their
4553      gimple_body.  */
4554   if (!DECL_INITIAL (fn)
4555       && DECL_ABSTRACT_ORIGIN (fn)
4556       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4557     fn = DECL_ABSTRACT_ORIGIN (fn);
4558 
4559   /* Don't try to inline functions that are not well-suited to inlining.  */
4560   if (cg_edge->inline_failed)
4561     {
4562       reason = cg_edge->inline_failed;
4563       /* If this call was originally indirect, we do not want to emit any
4564 	 inlining related warnings or sorry messages because there are no
4565 	 guarantees regarding those.  */
4566       if (cg_edge->indirect_inlining_edge)
4567 	goto egress;
4568 
4569       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4570           /* For extern inline functions that get redefined we always
4571 	     silently ignored always_inline flag. Better behavior would
4572 	     be to be able to keep both bodies and use extern inline body
4573 	     for inlining, but we can't do that because frontends overwrite
4574 	     the body.  */
4575 	  && !cg_edge->callee->local.redefined_extern_inline
4576 	  /* During early inline pass, report only when optimization is
4577 	     not turned on.  */
4578 	  && (symtab->global_info_ready
4579 	      || !optimize
4580 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4581 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4582 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4583 	{
4584 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4585 		 cgraph_inline_failed_string (reason));
4586 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4587 	    inform (gimple_location (stmt), "called from here");
4588 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4589 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4590                    "called from this function");
4591 	}
4592       else if (warn_inline
4593 	       && DECL_DECLARED_INLINE_P (fn)
4594 	       && !DECL_NO_INLINE_WARNING_P (fn)
4595 	       && !DECL_IN_SYSTEM_HEADER (fn)
4596 	       && reason != CIF_UNSPECIFIED
4597 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4598 	       /* Do not warn about not inlined recursive calls.  */
4599 	       && !cg_edge->recursive_p ()
4600 	       /* Avoid warnings during early inline pass. */
4601 	       && symtab->global_info_ready)
4602 	{
4603 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4604 		       fn, _(cgraph_inline_failed_string (reason))))
4605 	    {
4606 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4607 		inform (gimple_location (stmt), "called from here");
4608 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4609 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4610                        "called from this function");
4611 	    }
4612 	}
4613       goto egress;
4614     }
4615   id->src_node = cg_edge->callee;
4616 
4617   /* If callee is thunk, all we need is to adjust the THIS pointer
4618      and redirect to function being thunked.  */
4619   if (id->src_node->thunk.thunk_p)
4620     {
4621       cgraph_edge *edge;
4622       tree virtual_offset = NULL;
4623       int freq = cg_edge->frequency;
4624       gcov_type count = cg_edge->count;
4625       tree op;
4626       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4627 
4628       cg_edge->remove ();
4629       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4630 		   		           gimple_uid (stmt),
4631 				   	   REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
4632 				           true);
4633       edge->frequency = freq;
4634       edge->count = count;
4635       if (id->src_node->thunk.virtual_offset_p)
4636         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4637       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4638 			      NULL);
4639       gsi_insert_before (&iter, gimple_build_assign (op,
4640 						    gimple_call_arg (stmt, 0)),
4641 			 GSI_NEW_STMT);
4642       gcc_assert (id->src_node->thunk.this_adjusting);
4643       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4644 			 virtual_offset);
4645 
4646       gimple_call_set_arg (stmt, 0, op);
4647       gimple_call_set_fndecl (stmt, edge->callee->decl);
4648       update_stmt (stmt);
4649       id->src_node->remove ();
4650       expand_call_inline (bb, stmt, id);
4651       maybe_remove_unused_call_args (cfun, stmt);
4652       return true;
4653     }
4654   fn = cg_edge->callee->decl;
4655   cg_edge->callee->get_untransformed_body ();
4656 
4657   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4658     cg_edge->callee->verify ();
4659 
4660   /* We will be inlining this callee.  */
4661   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4662   id->assign_stmts.create (0);
4663 
4664   /* Update the callers EH personality.  */
4665   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4666     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4667       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4668 
4669   /* Split the block holding the GIMPLE_CALL.  */
4670   e = split_block (bb, stmt);
4671   bb = e->src;
4672   return_block = e->dest;
4673   remove_edge (e);
4674 
4675   /* split_block splits after the statement; work around this by
4676      moving the call into the second block manually.  Not pretty,
4677      but seems easier than doing the CFG manipulation by hand
4678      when the GIMPLE_CALL is in the last statement of BB.  */
4679   stmt_gsi = gsi_last_bb (bb);
4680   gsi_remove (&stmt_gsi, false);
4681 
4682   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4683      been the source of abnormal edges.  In this case, schedule
4684      the removal of dead abnormal edges.  */
4685   gsi = gsi_start_bb (return_block);
4686   if (gsi_end_p (gsi))
4687     {
4688       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4689       purge_dead_abnormal_edges = true;
4690     }
4691   else
4692     {
4693       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4694       purge_dead_abnormal_edges = false;
4695     }
4696 
4697   stmt_gsi = gsi_start_bb (return_block);
4698 
4699   /* Build a block containing code to initialize the arguments, the
4700      actual inline expansion of the body, and a label for the return
4701      statements within the function to jump to.  The type of the
4702      statement expression is the return type of the function call.
4703      ???  If the call does not have an associated block then we will
4704      remap all callee blocks to NULL, effectively dropping most of
4705      its debug information.  This should only happen for calls to
4706      artificial decls inserted by the compiler itself.  We need to
4707      either link the inlined blocks into the caller block tree or
4708      not refer to them in any way to not break GC for locations.  */
4709   if (gimple_block (stmt))
4710     {
4711       id->block = make_node (BLOCK);
4712       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4713       BLOCK_SOURCE_LOCATION (id->block)
4714 	= LOCATION_LOCUS (gimple_location (stmt));
4715       prepend_lexical_block (gimple_block (stmt), id->block);
4716     }
4717 
4718   /* Local declarations will be replaced by their equivalents in this
4719      map.  */
4720   st = id->decl_map;
4721   id->decl_map = new hash_map<tree, tree>;
4722   dst = id->debug_map;
4723   id->debug_map = NULL;
4724 
4725   /* Record the function we are about to inline.  */
4726   id->src_fn = fn;
4727   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4728   id->call_stmt = call_stmt;
4729 
4730   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4731      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4732   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4733   simtvars_save = id->dst_simt_vars;
4734   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4735       && (simduid = bb->loop_father->simduid) != NULL_TREE
4736       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4737       && single_imm_use (simduid, &use, &simtenter_stmt)
4738       && is_gimple_call (simtenter_stmt)
4739       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4740     vec_alloc (id->dst_simt_vars, 0);
4741   else
4742     id->dst_simt_vars = NULL;
4743 
4744   /* If the src function contains an IFN_VA_ARG, then so will the dst
4745      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4746   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4747   src_properties = id->src_cfun->curr_properties & prop_mask;
4748   if (src_properties != prop_mask)
4749     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4750 
4751   gcc_assert (!id->src_cfun->after_inlining);
4752 
4753   id->entry_bb = bb;
4754   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4755     {
4756       gimple_stmt_iterator si = gsi_last_bb (bb);
4757       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4758       						   NOT_TAKEN),
4759 			GSI_NEW_STMT);
4760     }
4761   initialize_inlined_parameters (id, stmt, fn, bb);
4762 
4763   if (DECL_INITIAL (fn))
4764     {
4765       if (gimple_block (stmt))
4766 	{
4767 	  tree *var;
4768 
4769 	  prepend_lexical_block (id->block,
4770 				 remap_blocks (DECL_INITIAL (fn), id));
4771 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4772 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4773 				   == NULL_TREE));
4774 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4775 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4776 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4777 	     under it.  The parameters can be then evaluated in the debugger,
4778 	     but don't show in backtraces.  */
4779 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4780 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4781 	      {
4782 		tree v = *var;
4783 		*var = TREE_CHAIN (v);
4784 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4785 		BLOCK_VARS (id->block) = v;
4786 	      }
4787 	    else
4788 	      var = &TREE_CHAIN (*var);
4789 	}
4790       else
4791 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4792     }
4793 
4794   /* Return statements in the function body will be replaced by jumps
4795      to the RET_LABEL.  */
4796   gcc_assert (DECL_INITIAL (fn));
4797   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4798 
4799   /* Find the LHS to which the result of this call is assigned.  */
4800   return_slot = NULL;
4801   if (gimple_call_lhs (stmt))
4802     {
4803       modify_dest = gimple_call_lhs (stmt);
4804 
4805       /* Remember where to copy returned bounds.  */
4806       if (gimple_call_with_bounds_p (stmt)
4807 	  && TREE_CODE (modify_dest) == SSA_NAME)
4808 	{
4809 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4810 	  if (retbnd)
4811 	    {
4812 	      return_bounds = gimple_call_lhs (retbnd);
4813 	      /* If returned bounds are not used then just
4814 		 remove unused call.  */
4815 	      if (!return_bounds)
4816 		{
4817 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4818 		  gsi_remove (&iter, true);
4819 		}
4820 	    }
4821 	}
4822 
4823       /* The function which we are inlining might not return a value,
4824 	 in which case we should issue a warning that the function
4825 	 does not return a value.  In that case the optimizers will
4826 	 see that the variable to which the value is assigned was not
4827 	 initialized.  We do not want to issue a warning about that
4828 	 uninitialized variable.  */
4829       if (DECL_P (modify_dest))
4830 	TREE_NO_WARNING (modify_dest) = 1;
4831 
4832       if (gimple_call_return_slot_opt_p (call_stmt))
4833 	{
4834 	  return_slot = modify_dest;
4835 	  modify_dest = NULL;
4836 	}
4837     }
4838   else
4839     modify_dest = NULL;
4840 
4841   /* If we are inlining a call to the C++ operator new, we don't want
4842      to use type based alias analysis on the return value.  Otherwise
4843      we may get confused if the compiler sees that the inlined new
4844      function returns a pointer which was just deleted.  See bug
4845      33407.  */
4846   if (DECL_IS_OPERATOR_NEW (fn))
4847     {
4848       return_slot = NULL;
4849       modify_dest = NULL;
4850     }
4851 
4852   /* Declare the return variable for the function.  */
4853   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4854 					return_bounds, bb);
4855 
4856   /* Add local vars in this inlined callee to caller.  */
4857   add_local_variables (id->src_cfun, cfun, id);
4858 
4859   if (dump_file && (dump_flags & TDF_DETAILS))
4860     {
4861       fprintf (dump_file, "Inlining ");
4862       print_generic_expr (dump_file, id->src_fn, 0);
4863       fprintf (dump_file, " to ");
4864       print_generic_expr (dump_file, id->dst_fn, 0);
4865       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4866     }
4867 
4868   /* This is it.  Duplicate the callee body.  Assume callee is
4869      pre-gimplified.  Note that we must not alter the caller
4870      function in any way before this point, as this CALL_EXPR may be
4871      a self-referential call; if we're calling ourselves, we need to
4872      duplicate our body before altering anything.  */
4873   copy_body (id, cg_edge->callee->count,
4874   	     GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4875 	     bb, return_block, NULL);
4876 
4877   reset_debug_bindings (id, stmt_gsi);
4878 
4879   if (flag_stack_reuse != SR_NONE
4880       && (flag_sanitize & SANITIZE_KERNEL_ADDRESS) != 0)
4881     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4882       if (!TREE_THIS_VOLATILE (p))
4883 	{
4884 	  tree *varp = id->decl_map->get (p);
4885 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4886 	    {
4887 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4888 	      gimple *clobber_stmt;
4889 	      TREE_THIS_VOLATILE (clobber) = 1;
4890 	      clobber_stmt = gimple_build_assign (*varp, clobber);
4891 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
4892 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4893 	    }
4894 	}
4895 
4896   /* Reset the escaped solution.  */
4897   if (cfun->gimple_df)
4898     pt_solution_reset (&cfun->gimple_df->escaped);
4899 
4900   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
4901   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4902     {
4903       size_t nargs = gimple_call_num_args (simtenter_stmt);
4904       vec<tree> *vars = id->dst_simt_vars;
4905       auto_vec<tree> newargs (nargs + vars->length ());
4906       for (size_t i = 0; i < nargs; i++)
4907 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4908       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4909 	{
4910 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4911 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4912 	}
4913       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4914       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4915       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4916       gsi_replace (&gsi, g, false);
4917     }
4918   vec_free (id->dst_simt_vars);
4919   id->dst_simt_vars = simtvars_save;
4920 
4921   /* Clean up.  */
4922   if (id->debug_map)
4923     {
4924       delete id->debug_map;
4925       id->debug_map = dst;
4926     }
4927   delete id->decl_map;
4928   id->decl_map = st;
4929 
4930   /* Unlink the calls virtual operands before replacing it.  */
4931   unlink_stmt_vdef (stmt);
4932   if (gimple_vdef (stmt)
4933       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4934     release_ssa_name (gimple_vdef (stmt));
4935 
4936   /* If the inlined function returns a result that we care about,
4937      substitute the GIMPLE_CALL with an assignment of the return
4938      variable to the LHS of the call.  That is, if STMT was
4939      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4940   if (use_retvar && gimple_call_lhs (stmt))
4941     {
4942       gimple *old_stmt = stmt;
4943       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4944       gsi_replace (&stmt_gsi, stmt, false);
4945       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4946       /* Append a clobber for id->retvar if easily possible.  */
4947       if (flag_stack_reuse != SR_NONE
4948 	  && (flag_sanitize & SANITIZE_KERNEL_ADDRESS) != 0
4949 	  && id->retvar
4950 	  && VAR_P (id->retvar)
4951 	  && id->retvar != return_slot
4952 	  && id->retvar != modify_dest
4953 	  && !TREE_THIS_VOLATILE (id->retvar)
4954 	  && !is_gimple_reg (id->retvar)
4955 	  && !stmt_ends_bb_p (stmt))
4956 	{
4957 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4958 	  gimple *clobber_stmt;
4959 	  TREE_THIS_VOLATILE (clobber) = 1;
4960 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4961 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4962 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4963 	}
4964 
4965       /* Copy bounds if we copy structure with bounds.  */
4966       if (chkp_function_instrumented_p (id->dst_fn)
4967 	  && !BOUNDED_P (use_retvar)
4968 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4969 	id->assign_stmts.safe_push (stmt);
4970     }
4971   else
4972     {
4973       /* Handle the case of inlining a function with no return
4974 	 statement, which causes the return value to become undefined.  */
4975       if (gimple_call_lhs (stmt)
4976 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4977 	{
4978 	  tree name = gimple_call_lhs (stmt);
4979 	  tree var = SSA_NAME_VAR (name);
4980 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4981 
4982 	  if (def)
4983 	    {
4984 	      /* If the variable is used undefined, make this name
4985 		 undefined via a move.  */
4986 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4987 	      gsi_replace (&stmt_gsi, stmt, true);
4988 	    }
4989 	  else
4990 	    {
4991 	      if (!var)
4992 		{
4993 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4994 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4995 		}
4996 	      /* Otherwise make this variable undefined.  */
4997 	      gsi_remove (&stmt_gsi, true);
4998 	      set_ssa_default_def (cfun, var, name);
4999 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5000 	    }
5001 	}
5002       /* Replace with a clobber for id->retvar.  */
5003       else if (flag_stack_reuse != SR_NONE
5004 	       && (flag_sanitize & SANITIZE_KERNEL_ADDRESS) != 0
5005 	       && id->retvar
5006 	       && VAR_P (id->retvar)
5007 	       && id->retvar != return_slot
5008 	       && id->retvar != modify_dest
5009 	       && !TREE_THIS_VOLATILE (id->retvar)
5010 	       && !is_gimple_reg (id->retvar))
5011 	{
5012 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5013 	  gimple *clobber_stmt;
5014 	  TREE_THIS_VOLATILE (clobber) = 1;
5015 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5016 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
5017 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
5018 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5019 	}
5020       else
5021 	gsi_remove (&stmt_gsi, true);
5022     }
5023 
5024   /* Put returned bounds into the correct place if required.  */
5025   if (return_bounds)
5026     {
5027       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
5028       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
5029       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
5030       unlink_stmt_vdef (old_stmt);
5031       gsi_replace (&bnd_gsi, new_stmt, false);
5032       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
5033       cgraph_update_edges_for_call_stmt (old_stmt,
5034 					 gimple_call_fndecl (old_stmt),
5035 					 new_stmt);
5036     }
5037 
5038   if (purge_dead_abnormal_edges)
5039     {
5040       gimple_purge_dead_eh_edges (return_block);
5041       gimple_purge_dead_abnormal_call_edges (return_block);
5042     }
5043 
5044   /* If the value of the new expression is ignored, that's OK.  We
5045      don't warn about this for CALL_EXPRs, so we shouldn't warn about
5046      the equivalent inlined version either.  */
5047   if (is_gimple_assign (stmt))
5048     {
5049       gcc_assert (gimple_assign_single_p (stmt)
5050 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5051       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5052     }
5053 
5054   /* Copy bounds for all generated assigns that need it.  */
5055   for (i = 0; i < id->assign_stmts.length (); i++)
5056     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
5057   id->assign_stmts.release ();
5058 
5059   /* Output the inlining info for this abstract function, since it has been
5060      inlined.  If we don't do this now, we can lose the information about the
5061      variables in the function when the blocks get blown away as soon as we
5062      remove the cgraph node.  */
5063   if (gimple_block (stmt))
5064     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
5065 
5066   /* Update callgraph if needed.  */
5067   cg_edge->callee->remove ();
5068 
5069   id->block = NULL_TREE;
5070   id->retvar = NULL_TREE;
5071   id->retbnd = NULL_TREE;
5072   successfully_inlined = true;
5073 
5074  egress:
5075   input_location = saved_location;
5076   return successfully_inlined;
5077 }
5078 
5079 /* Expand call statements reachable from STMT_P.
5080    We can only have CALL_EXPRs as the "toplevel" tree code or nested
5081    in a MODIFY_EXPR.  */
5082 
5083 static bool
5084 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5085 {
5086   gimple_stmt_iterator gsi;
5087   bool inlined = false;
5088 
5089   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5090     {
5091       gimple *stmt = gsi_stmt (gsi);
5092       gsi_prev (&gsi);
5093 
5094       if (is_gimple_call (stmt)
5095 	  && !gimple_call_internal_p (stmt))
5096 	inlined |= expand_call_inline (bb, stmt, id);
5097     }
5098 
5099   return inlined;
5100 }
5101 
5102 
5103 /* Walk all basic blocks created after FIRST and try to fold every statement
5104    in the STATEMENTS pointer set.  */
5105 
5106 static void
5107 fold_marked_statements (int first, hash_set<gimple *> *statements)
5108 {
5109   for (; first < n_basic_blocks_for_fn (cfun); first++)
5110     if (BASIC_BLOCK_FOR_FN (cfun, first))
5111       {
5112         gimple_stmt_iterator gsi;
5113 
5114 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5115 	     !gsi_end_p (gsi);
5116 	     gsi_next (&gsi))
5117 	  if (statements->contains (gsi_stmt (gsi)))
5118 	    {
5119 	      gimple *old_stmt = gsi_stmt (gsi);
5120 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5121 
5122 	      if (old_decl && DECL_BUILT_IN (old_decl))
5123 		{
5124 		  /* Folding builtins can create multiple instructions,
5125 		     we need to look at all of them.  */
5126 		  gimple_stmt_iterator i2 = gsi;
5127 		  gsi_prev (&i2);
5128 		  if (fold_stmt (&gsi))
5129 		    {
5130 		      gimple *new_stmt;
5131 		      /* If a builtin at the end of a bb folded into nothing,
5132 			 the following loop won't work.  */
5133 		      if (gsi_end_p (gsi))
5134 			{
5135 			  cgraph_update_edges_for_call_stmt (old_stmt,
5136 							     old_decl, NULL);
5137 			  break;
5138 			}
5139 		      if (gsi_end_p (i2))
5140 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5141 		      else
5142 			gsi_next (&i2);
5143 		      while (1)
5144 			{
5145 			  new_stmt = gsi_stmt (i2);
5146 			  update_stmt (new_stmt);
5147 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5148 							     new_stmt);
5149 
5150 			  if (new_stmt == gsi_stmt (gsi))
5151 			    {
5152 			      /* It is okay to check only for the very last
5153 				 of these statements.  If it is a throwing
5154 				 statement nothing will change.  If it isn't
5155 				 this can remove EH edges.  If that weren't
5156 				 correct then because some intermediate stmts
5157 				 throw, but not the last one.  That would mean
5158 				 we'd have to split the block, which we can't
5159 				 here and we'd loose anyway.  And as builtins
5160 				 probably never throw, this all
5161 				 is mood anyway.  */
5162 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
5163 								  new_stmt))
5164 				gimple_purge_dead_eh_edges (
5165 				  BASIC_BLOCK_FOR_FN (cfun, first));
5166 			      break;
5167 			    }
5168 			  gsi_next (&i2);
5169 			}
5170 		    }
5171 		}
5172 	      else if (fold_stmt (&gsi))
5173 		{
5174 		  /* Re-read the statement from GSI as fold_stmt() may
5175 		     have changed it.  */
5176 		  gimple *new_stmt = gsi_stmt (gsi);
5177 		  update_stmt (new_stmt);
5178 
5179 		  if (is_gimple_call (old_stmt)
5180 		      || is_gimple_call (new_stmt))
5181 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5182 						       new_stmt);
5183 
5184 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5185 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5186 								    first));
5187 		}
5188 	    }
5189       }
5190 }
5191 
5192 /* Expand calls to inline functions in the body of FN.  */
5193 
5194 unsigned int
5195 optimize_inline_calls (tree fn)
5196 {
5197   copy_body_data id;
5198   basic_block bb;
5199   int last = n_basic_blocks_for_fn (cfun);
5200   bool inlined_p = false;
5201 
5202   /* Clear out ID.  */
5203   memset (&id, 0, sizeof (id));
5204 
5205   id.src_node = id.dst_node = cgraph_node::get (fn);
5206   gcc_assert (id.dst_node->definition);
5207   id.dst_fn = fn;
5208   /* Or any functions that aren't finished yet.  */
5209   if (current_function_decl)
5210     id.dst_fn = current_function_decl;
5211 
5212   id.copy_decl = copy_decl_maybe_to_var;
5213   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5214   id.transform_new_cfg = false;
5215   id.transform_return_to_modify = true;
5216   id.transform_parameter = true;
5217   id.transform_lang_insert_block = NULL;
5218   id.statements_to_fold = new hash_set<gimple *>;
5219 
5220   push_gimplify_context ();
5221 
5222   /* We make no attempts to keep dominance info up-to-date.  */
5223   free_dominance_info (CDI_DOMINATORS);
5224   free_dominance_info (CDI_POST_DOMINATORS);
5225 
5226   /* Register specific gimple functions.  */
5227   gimple_register_cfg_hooks ();
5228 
5229   /* Reach the trees by walking over the CFG, and note the
5230      enclosing basic-blocks in the call edges.  */
5231   /* We walk the blocks going forward, because inlined function bodies
5232      will split id->current_basic_block, and the new blocks will
5233      follow it; we'll trudge through them, processing their CALL_EXPRs
5234      along the way.  */
5235   FOR_EACH_BB_FN (bb, cfun)
5236     inlined_p |= gimple_expand_calls_inline (bb, &id);
5237 
5238   pop_gimplify_context (NULL);
5239 
5240   if (flag_checking)
5241     {
5242       struct cgraph_edge *e;
5243 
5244       id.dst_node->verify ();
5245 
5246       /* Double check that we inlined everything we are supposed to inline.  */
5247       for (e = id.dst_node->callees; e; e = e->next_callee)
5248 	gcc_assert (e->inline_failed);
5249     }
5250 
5251   /* Fold queued statements.  */
5252   fold_marked_statements (last, id.statements_to_fold);
5253   delete id.statements_to_fold;
5254 
5255   gcc_assert (!id.debug_stmts.exists ());
5256 
5257   /* If we didn't inline into the function there is nothing to do.  */
5258   if (!inlined_p)
5259     return 0;
5260 
5261   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5262   number_blocks (fn);
5263 
5264   delete_unreachable_blocks_update_callgraph (&id);
5265   if (flag_checking)
5266     id.dst_node->verify ();
5267 
5268   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5269      not possible yet - the IPA passes might make various functions to not
5270      throw and they don't care to proactively update local EH info.  This is
5271      done later in fixup_cfg pass that also execute the verification.  */
5272   return (TODO_update_ssa
5273 	  | TODO_cleanup_cfg
5274 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5275 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5276 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5277 	     ? TODO_rebuild_frequencies : 0));
5278 }
5279 
5280 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5281 
5282 tree
5283 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5284 {
5285   enum tree_code code = TREE_CODE (*tp);
5286   enum tree_code_class cl = TREE_CODE_CLASS (code);
5287 
5288   /* We make copies of most nodes.  */
5289   if (IS_EXPR_CODE_CLASS (cl)
5290       || code == TREE_LIST
5291       || code == TREE_VEC
5292       || code == TYPE_DECL
5293       || code == OMP_CLAUSE)
5294     {
5295       /* Because the chain gets clobbered when we make a copy, we save it
5296 	 here.  */
5297       tree chain = NULL_TREE, new_tree;
5298 
5299       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5300 	chain = TREE_CHAIN (*tp);
5301 
5302       /* Copy the node.  */
5303       new_tree = copy_node (*tp);
5304 
5305       *tp = new_tree;
5306 
5307       /* Now, restore the chain, if appropriate.  That will cause
5308 	 walk_tree to walk into the chain as well.  */
5309       if (code == PARM_DECL
5310 	  || code == TREE_LIST
5311 	  || code == OMP_CLAUSE)
5312 	TREE_CHAIN (*tp) = chain;
5313 
5314       /* For now, we don't update BLOCKs when we make copies.  So, we
5315 	 have to nullify all BIND_EXPRs.  */
5316       if (TREE_CODE (*tp) == BIND_EXPR)
5317 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5318     }
5319   else if (code == CONSTRUCTOR)
5320     {
5321       /* CONSTRUCTOR nodes need special handling because
5322          we need to duplicate the vector of elements.  */
5323       tree new_tree;
5324 
5325       new_tree = copy_node (*tp);
5326       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5327       *tp = new_tree;
5328     }
5329   else if (code == STATEMENT_LIST)
5330     /* We used to just abort on STATEMENT_LIST, but we can run into them
5331        with statement-expressions (c++/40975).  */
5332     copy_statement_list (tp);
5333   else if (TREE_CODE_CLASS (code) == tcc_type)
5334     *walk_subtrees = 0;
5335   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5336     *walk_subtrees = 0;
5337   else if (TREE_CODE_CLASS (code) == tcc_constant)
5338     *walk_subtrees = 0;
5339   return NULL_TREE;
5340 }
5341 
5342 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5343    information indicating to what new SAVE_EXPR this one should be mapped,
5344    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5345    the function into which the copy will be placed.  */
5346 
5347 static void
5348 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5349 {
5350   tree *n;
5351   tree t;
5352 
5353   /* See if we already encountered this SAVE_EXPR.  */
5354   n = st->get (*tp);
5355 
5356   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5357   if (!n)
5358     {
5359       t = copy_node (*tp);
5360 
5361       /* Remember this SAVE_EXPR.  */
5362       st->put (*tp, t);
5363       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5364       st->put (t, t);
5365     }
5366   else
5367     {
5368       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5369       *walk_subtrees = 0;
5370       t = *n;
5371     }
5372 
5373   /* Replace this SAVE_EXPR with the copy.  */
5374   *tp = t;
5375 }
5376 
5377 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5378    label, copies the declaration and enters it in the splay_tree in DATA (which
5379    is really a 'copy_body_data *'.  */
5380 
5381 static tree
5382 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5383 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5384 		        struct walk_stmt_info *wi)
5385 {
5386   copy_body_data *id = (copy_body_data *) wi->info;
5387   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5388 
5389   if (stmt)
5390     {
5391       tree decl = gimple_label_label (stmt);
5392 
5393       /* Copy the decl and remember the copy.  */
5394       insert_decl_map (id, decl, id->copy_decl (decl, id));
5395     }
5396 
5397   return NULL_TREE;
5398 }
5399 
5400 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5401 						  struct walk_stmt_info *wi);
5402 
5403 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5404    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5405    remaps all local declarations to appropriate replacements in gimple
5406    operands. */
5407 
5408 static tree
5409 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5410 {
5411   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5412   copy_body_data *id = (copy_body_data *) wi->info;
5413   hash_map<tree, tree> *st = id->decl_map;
5414   tree *n;
5415   tree expr = *tp;
5416 
5417   /* For recursive invocations this is no longer the LHS itself.  */
5418   bool is_lhs = wi->is_lhs;
5419   wi->is_lhs = false;
5420 
5421   if (TREE_CODE (expr) == SSA_NAME)
5422     {
5423       *tp = remap_ssa_name (*tp, id);
5424       *walk_subtrees = 0;
5425       if (is_lhs)
5426 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5427     }
5428   /* Only a local declaration (variable or label).  */
5429   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5430 	   || TREE_CODE (expr) == LABEL_DECL)
5431     {
5432       /* Lookup the declaration.  */
5433       n = st->get (expr);
5434 
5435       /* If it's there, remap it.  */
5436       if (n)
5437 	*tp = *n;
5438       *walk_subtrees = 0;
5439     }
5440   else if (TREE_CODE (expr) == STATEMENT_LIST
5441 	   || TREE_CODE (expr) == BIND_EXPR
5442 	   || TREE_CODE (expr) == SAVE_EXPR)
5443     gcc_unreachable ();
5444   else if (TREE_CODE (expr) == TARGET_EXPR)
5445     {
5446       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5447          It's OK for this to happen if it was part of a subtree that
5448          isn't immediately expanded, such as operand 2 of another
5449          TARGET_EXPR.  */
5450       if (!TREE_OPERAND (expr, 1))
5451 	{
5452 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5453 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5454 	}
5455     }
5456   else if (TREE_CODE (expr) == OMP_CLAUSE)
5457     {
5458       /* Before the omplower pass completes, some OMP clauses can contain
5459 	 sequences that are neither copied by gimple_seq_copy nor walked by
5460 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5461 	 in those situations, we have to copy and process them explicitely.  */
5462 
5463       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5464 	{
5465 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5466 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5467 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5468 	}
5469       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5470 	{
5471 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5472 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5473 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5474 	}
5475       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5476 	{
5477 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5478 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5479 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5480 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5481 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5482 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5483 	}
5484     }
5485 
5486   /* Keep iterating.  */
5487   return NULL_TREE;
5488 }
5489 
5490 
5491 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5492    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5493    remaps all local declarations to appropriate replacements in gimple
5494    statements. */
5495 
5496 static tree
5497 replace_locals_stmt (gimple_stmt_iterator *gsip,
5498 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5499 		     struct walk_stmt_info *wi)
5500 {
5501   copy_body_data *id = (copy_body_data *) wi->info;
5502   gimple *gs = gsi_stmt (*gsip);
5503 
5504   if (gbind *stmt = dyn_cast <gbind *> (gs))
5505     {
5506       tree block = gimple_bind_block (stmt);
5507 
5508       if (block)
5509 	{
5510 	  remap_block (&block, id);
5511 	  gimple_bind_set_block (stmt, block);
5512 	}
5513 
5514       /* This will remap a lot of the same decls again, but this should be
5515 	 harmless.  */
5516       if (gimple_bind_vars (stmt))
5517 	{
5518 	  tree old_var, decls = gimple_bind_vars (stmt);
5519 
5520 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5521 	    if (!can_be_nonlocal (old_var, id)
5522 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5523 	      remap_decl (old_var, id);
5524 
5525 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5526 	  id->prevent_decl_creation_for_types = true;
5527 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5528 	  id->prevent_decl_creation_for_types = false;
5529 	}
5530     }
5531 
5532   /* Keep iterating.  */
5533   return NULL_TREE;
5534 }
5535 
5536 /* Create a copy of SEQ and remap all decls in it.  */
5537 
5538 static gimple_seq
5539 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5540 {
5541   if (!seq)
5542     return NULL;
5543 
5544   /* If there are any labels in OMP sequences, they can be only referred to in
5545      the sequence itself and therefore we can do both here.  */
5546   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5547   gimple_seq copy = gimple_seq_copy (seq);
5548   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5549   return copy;
5550 }
5551 
5552 /* Copies everything in SEQ and replaces variables and labels local to
5553    current_function_decl.  */
5554 
5555 gimple_seq
5556 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5557 {
5558   copy_body_data id;
5559   struct walk_stmt_info wi;
5560   gimple_seq copy;
5561 
5562   /* There's nothing to do for NULL_TREE.  */
5563   if (seq == NULL)
5564     return seq;
5565 
5566   /* Set up ID.  */
5567   memset (&id, 0, sizeof (id));
5568   id.src_fn = current_function_decl;
5569   id.dst_fn = current_function_decl;
5570   id.src_cfun = cfun;
5571   id.decl_map = new hash_map<tree, tree>;
5572   id.debug_map = NULL;
5573 
5574   id.copy_decl = copy_decl_no_change;
5575   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5576   id.transform_new_cfg = false;
5577   id.transform_return_to_modify = false;
5578   id.transform_parameter = false;
5579   id.transform_lang_insert_block = NULL;
5580 
5581   /* Walk the tree once to find local labels.  */
5582   memset (&wi, 0, sizeof (wi));
5583   hash_set<tree> visited;
5584   wi.info = &id;
5585   wi.pset = &visited;
5586   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5587 
5588   copy = gimple_seq_copy (seq);
5589 
5590   /* Walk the copy, remapping decls.  */
5591   memset (&wi, 0, sizeof (wi));
5592   wi.info = &id;
5593   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5594 
5595   /* Clean up.  */
5596   delete id.decl_map;
5597   if (id.debug_map)
5598     delete id.debug_map;
5599   if (id.dependence_map)
5600     {
5601       delete id.dependence_map;
5602       id.dependence_map = NULL;
5603     }
5604 
5605   return copy;
5606 }
5607 
5608 
5609 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5610 
5611 static tree
5612 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5613 {
5614   if (*tp == data)
5615     return (tree) data;
5616   else
5617     return NULL;
5618 }
5619 
5620 DEBUG_FUNCTION bool
5621 debug_find_tree (tree top, tree search)
5622 {
5623   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5624 }
5625 
5626 
5627 /* Declare the variables created by the inliner.  Add all the variables in
5628    VARS to BIND_EXPR.  */
5629 
5630 static void
5631 declare_inline_vars (tree block, tree vars)
5632 {
5633   tree t;
5634   for (t = vars; t; t = DECL_CHAIN (t))
5635     {
5636       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5637       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5638       add_local_decl (cfun, t);
5639     }
5640 
5641   if (block)
5642     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5643 }
5644 
5645 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5646    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5647    VAR_DECL translation.  */
5648 
5649 tree
5650 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5651 {
5652   /* Don't generate debug information for the copy if we wouldn't have
5653      generated it for the copy either.  */
5654   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5655   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5656 
5657   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5658      declaration inspired this copy.  */
5659   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5660 
5661   /* The new variable/label has no RTL, yet.  */
5662   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5663       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5664     SET_DECL_RTL (copy, 0);
5665 
5666   /* These args would always appear unused, if not for this.  */
5667   TREE_USED (copy) = 1;
5668 
5669   /* Set the context for the new declaration.  */
5670   if (!DECL_CONTEXT (decl))
5671     /* Globals stay global.  */
5672     ;
5673   else if (DECL_CONTEXT (decl) != id->src_fn)
5674     /* Things that weren't in the scope of the function we're inlining
5675        from aren't in the scope we're inlining to, either.  */
5676     ;
5677   else if (TREE_STATIC (decl))
5678     /* Function-scoped static variables should stay in the original
5679        function.  */
5680     ;
5681   else
5682     {
5683       /* Ordinary automatic local variables are now in the scope of the
5684 	 new function.  */
5685       DECL_CONTEXT (copy) = id->dst_fn;
5686       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5687 	{
5688 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5689 	    DECL_ATTRIBUTES (copy)
5690 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5691 			   DECL_ATTRIBUTES (copy));
5692 	  id->dst_simt_vars->safe_push (copy);
5693 	}
5694     }
5695 
5696   return copy;
5697 }
5698 
5699 static tree
5700 copy_decl_to_var (tree decl, copy_body_data *id)
5701 {
5702   tree copy, type;
5703 
5704   gcc_assert (TREE_CODE (decl) == PARM_DECL
5705 	      || TREE_CODE (decl) == RESULT_DECL);
5706 
5707   type = TREE_TYPE (decl);
5708 
5709   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5710 		     VAR_DECL, DECL_NAME (decl), type);
5711   if (DECL_PT_UID_SET_P (decl))
5712     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5713   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5714   TREE_READONLY (copy) = TREE_READONLY (decl);
5715   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5716   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5717 
5718   return copy_decl_for_dup_finish (id, decl, copy);
5719 }
5720 
5721 /* Like copy_decl_to_var, but create a return slot object instead of a
5722    pointer variable for return by invisible reference.  */
5723 
5724 static tree
5725 copy_result_decl_to_var (tree decl, copy_body_data *id)
5726 {
5727   tree copy, type;
5728 
5729   gcc_assert (TREE_CODE (decl) == PARM_DECL
5730 	      || TREE_CODE (decl) == RESULT_DECL);
5731 
5732   type = TREE_TYPE (decl);
5733   if (DECL_BY_REFERENCE (decl))
5734     type = TREE_TYPE (type);
5735 
5736   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5737 		     VAR_DECL, DECL_NAME (decl), type);
5738   if (DECL_PT_UID_SET_P (decl))
5739     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5740   TREE_READONLY (copy) = TREE_READONLY (decl);
5741   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5742   if (!DECL_BY_REFERENCE (decl))
5743     {
5744       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5745       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5746     }
5747 
5748   return copy_decl_for_dup_finish (id, decl, copy);
5749 }
5750 
5751 tree
5752 copy_decl_no_change (tree decl, copy_body_data *id)
5753 {
5754   tree copy;
5755 
5756   copy = copy_node (decl);
5757 
5758   /* The COPY is not abstract; it will be generated in DST_FN.  */
5759   DECL_ABSTRACT_P (copy) = false;
5760   lang_hooks.dup_lang_specific_decl (copy);
5761 
5762   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5763      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5764   if (TREE_CODE (copy) == LABEL_DECL)
5765     {
5766       TREE_ADDRESSABLE (copy) = 0;
5767       LABEL_DECL_UID (copy) = -1;
5768     }
5769 
5770   return copy_decl_for_dup_finish (id, decl, copy);
5771 }
5772 
5773 static tree
5774 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5775 {
5776   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5777     return copy_decl_to_var (decl, id);
5778   else
5779     return copy_decl_no_change (decl, id);
5780 }
5781 
5782 /* Return a copy of the function's argument tree.  */
5783 static tree
5784 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5785 			       bitmap args_to_skip, tree *vars)
5786 {
5787   tree arg, *parg;
5788   tree new_parm = NULL;
5789   int i = 0;
5790 
5791   parg = &new_parm;
5792 
5793   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5794     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5795       {
5796         tree new_tree = remap_decl (arg, id);
5797 	if (TREE_CODE (new_tree) != PARM_DECL)
5798 	  new_tree = id->copy_decl (arg, id);
5799         lang_hooks.dup_lang_specific_decl (new_tree);
5800         *parg = new_tree;
5801 	parg = &DECL_CHAIN (new_tree);
5802       }
5803     else if (!id->decl_map->get (arg))
5804       {
5805 	/* Make an equivalent VAR_DECL.  If the argument was used
5806 	   as temporary variable later in function, the uses will be
5807 	   replaced by local variable.  */
5808 	tree var = copy_decl_to_var (arg, id);
5809 	insert_decl_map (id, arg, var);
5810         /* Declare this new variable.  */
5811         DECL_CHAIN (var) = *vars;
5812         *vars = var;
5813       }
5814   return new_parm;
5815 }
5816 
5817 /* Return a copy of the function's static chain.  */
5818 static tree
5819 copy_static_chain (tree static_chain, copy_body_data * id)
5820 {
5821   tree *chain_copy, *pvar;
5822 
5823   chain_copy = &static_chain;
5824   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5825     {
5826       tree new_tree = remap_decl (*pvar, id);
5827       lang_hooks.dup_lang_specific_decl (new_tree);
5828       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5829       *pvar = new_tree;
5830     }
5831   return static_chain;
5832 }
5833 
5834 /* Return true if the function is allowed to be versioned.
5835    This is a guard for the versioning functionality.  */
5836 
5837 bool
5838 tree_versionable_function_p (tree fndecl)
5839 {
5840   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5841 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5842 }
5843 
5844 /* Delete all unreachable basic blocks and update callgraph.
5845    Doing so is somewhat nontrivial because we need to update all clones and
5846    remove inline function that become unreachable.  */
5847 
5848 static bool
5849 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5850 {
5851   bool changed = false;
5852   basic_block b, next_bb;
5853 
5854   find_unreachable_blocks ();
5855 
5856   /* Delete all unreachable basic blocks.  */
5857 
5858   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5859        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5860     {
5861       next_bb = b->next_bb;
5862 
5863       if (!(b->flags & BB_REACHABLE))
5864 	{
5865           gimple_stmt_iterator bsi;
5866 
5867           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5868 	    {
5869 	      struct cgraph_edge *e;
5870 	      struct cgraph_node *node;
5871 
5872 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5873 
5874 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5875 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5876 		{
5877 		  if (!e->inline_failed)
5878 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5879 		  else
5880 		    e->remove ();
5881 		}
5882 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5883 		  && id->dst_node->clones)
5884 		for (node = id->dst_node->clones; node != id->dst_node;)
5885 		  {
5886 		    node->remove_stmt_references (gsi_stmt (bsi));
5887 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5888 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5889 		      {
5890 			if (!e->inline_failed)
5891 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5892 			else
5893 			  e->remove ();
5894 		      }
5895 
5896 		    if (node->clones)
5897 		      node = node->clones;
5898 		    else if (node->next_sibling_clone)
5899 		      node = node->next_sibling_clone;
5900 		    else
5901 		      {
5902 			while (node != id->dst_node && !node->next_sibling_clone)
5903 			  node = node->clone_of;
5904 			if (node != id->dst_node)
5905 			  node = node->next_sibling_clone;
5906 		      }
5907 		  }
5908 	    }
5909 	  delete_basic_block (b);
5910 	  changed = true;
5911 	}
5912     }
5913 
5914   return changed;
5915 }
5916 
5917 /* Update clone info after duplication.  */
5918 
5919 static void
5920 update_clone_info (copy_body_data * id)
5921 {
5922   struct cgraph_node *node;
5923   if (!id->dst_node->clones)
5924     return;
5925   for (node = id->dst_node->clones; node != id->dst_node;)
5926     {
5927       /* First update replace maps to match the new body.  */
5928       if (node->clone.tree_map)
5929         {
5930 	  unsigned int i;
5931           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5932 	    {
5933 	      struct ipa_replace_map *replace_info;
5934 	      replace_info = (*node->clone.tree_map)[i];
5935 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5936 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5937 	    }
5938 	}
5939       if (node->clones)
5940 	node = node->clones;
5941       else if (node->next_sibling_clone)
5942 	node = node->next_sibling_clone;
5943       else
5944 	{
5945 	  while (node != id->dst_node && !node->next_sibling_clone)
5946 	    node = node->clone_of;
5947 	  if (node != id->dst_node)
5948 	    node = node->next_sibling_clone;
5949 	}
5950     }
5951 }
5952 
5953 /* Create a copy of a function's tree.
5954    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5955    of the original function and the new copied function
5956    respectively.  In case we want to replace a DECL
5957    tree with another tree while duplicating the function's
5958    body, TREE_MAP represents the mapping between these
5959    trees. If UPDATE_CLONES is set, the call_stmt fields
5960    of edges of clones of the function will be updated.
5961 
5962    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5963    from new version.
5964    If SKIP_RETURN is true, the new version will return void.
5965    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5966    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5967 */
5968 void
5969 tree_function_versioning (tree old_decl, tree new_decl,
5970 			  vec<ipa_replace_map *, va_gc> *tree_map,
5971 			  bool update_clones, bitmap args_to_skip,
5972 			  bool skip_return, bitmap blocks_to_copy,
5973 			  basic_block new_entry)
5974 {
5975   struct cgraph_node *old_version_node;
5976   struct cgraph_node *new_version_node;
5977   copy_body_data id;
5978   tree p;
5979   unsigned i;
5980   struct ipa_replace_map *replace_info;
5981   basic_block old_entry_block, bb;
5982   auto_vec<gimple *, 10> init_stmts;
5983   tree vars = NULL_TREE;
5984   bitmap debug_args_to_skip = args_to_skip;
5985 
5986   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5987 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5988   DECL_POSSIBLY_INLINED (old_decl) = 1;
5989 
5990   old_version_node = cgraph_node::get (old_decl);
5991   gcc_checking_assert (old_version_node);
5992   new_version_node = cgraph_node::get (new_decl);
5993   gcc_checking_assert (new_version_node);
5994 
5995   /* Copy over debug args.  */
5996   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5997     {
5998       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5999       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6000       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6001       old_debug_args = decl_debug_args_lookup (old_decl);
6002       if (old_debug_args)
6003 	{
6004 	  new_debug_args = decl_debug_args_insert (new_decl);
6005 	  *new_debug_args = vec_safe_copy (*old_debug_args);
6006 	}
6007     }
6008 
6009   /* Output the inlining info for this abstract function, since it has been
6010      inlined.  If we don't do this now, we can lose the information about the
6011      variables in the function when the blocks get blown away as soon as we
6012      remove the cgraph node.  */
6013   (*debug_hooks->outlining_inline_function) (old_decl);
6014 
6015   DECL_ARTIFICIAL (new_decl) = 1;
6016   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6017   if (DECL_ORIGIN (old_decl) == old_decl)
6018     old_version_node->used_as_abstract_origin = true;
6019   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6020 
6021   /* Prepare the data structures for the tree copy.  */
6022   memset (&id, 0, sizeof (id));
6023 
6024   /* Generate a new name for the new version. */
6025   id.statements_to_fold = new hash_set<gimple *>;
6026 
6027   id.decl_map = new hash_map<tree, tree>;
6028   id.debug_map = NULL;
6029   id.src_fn = old_decl;
6030   id.dst_fn = new_decl;
6031   id.src_node = old_version_node;
6032   id.dst_node = new_version_node;
6033   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6034   id.blocks_to_copy = blocks_to_copy;
6035 
6036   id.copy_decl = copy_decl_no_change;
6037   id.transform_call_graph_edges
6038     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6039   id.transform_new_cfg = true;
6040   id.transform_return_to_modify = false;
6041   id.transform_parameter = false;
6042   id.transform_lang_insert_block = NULL;
6043 
6044   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6045     (DECL_STRUCT_FUNCTION (old_decl));
6046   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6047   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6048   initialize_cfun (new_decl, old_decl,
6049 		   old_entry_block->count);
6050   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6051     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6052       = id.src_cfun->gimple_df->ipa_pta;
6053 
6054   /* Copy the function's static chain.  */
6055   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6056   if (p)
6057     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6058       = copy_static_chain (p, &id);
6059 
6060   /* If there's a tree_map, prepare for substitution.  */
6061   if (tree_map)
6062     for (i = 0; i < tree_map->length (); i++)
6063       {
6064 	gimple *init;
6065 	replace_info = (*tree_map)[i];
6066 	if (replace_info->replace_p)
6067 	  {
6068 	    int parm_num = -1;
6069 	    if (!replace_info->old_tree)
6070 	      {
6071 		int p = replace_info->parm_num;
6072 		tree parm;
6073 		tree req_type, new_type;
6074 
6075 		for (parm = DECL_ARGUMENTS (old_decl); p;
6076 		     parm = DECL_CHAIN (parm))
6077 		  p--;
6078 		replace_info->old_tree = parm;
6079 		parm_num = replace_info->parm_num;
6080 		req_type = TREE_TYPE (parm);
6081 		new_type = TREE_TYPE (replace_info->new_tree);
6082 		if (!useless_type_conversion_p (req_type, new_type))
6083 		  {
6084 		    if (fold_convertible_p (req_type, replace_info->new_tree))
6085 		      replace_info->new_tree
6086 			= fold_build1 (NOP_EXPR, req_type,
6087 				       replace_info->new_tree);
6088 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6089 		      replace_info->new_tree
6090 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
6091 				       replace_info->new_tree);
6092 		    else
6093 		      {
6094 			if (dump_file)
6095 			  {
6096 			    fprintf (dump_file, "    const ");
6097 			    print_generic_expr (dump_file,
6098 						replace_info->new_tree, 0);
6099 			    fprintf (dump_file,
6100 				     "  can't be converted to param ");
6101 			    print_generic_expr (dump_file, parm, 0);
6102 			    fprintf (dump_file, "\n");
6103 			  }
6104 			replace_info->old_tree = NULL;
6105 		      }
6106 		  }
6107 	      }
6108 	    else
6109 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6110 	    if (replace_info->old_tree)
6111 	      {
6112 		init = setup_one_parameter (&id, replace_info->old_tree,
6113 					    replace_info->new_tree, id.src_fn,
6114 					    NULL,
6115 					    &vars);
6116 		if (init)
6117 		  init_stmts.safe_push (init);
6118 		if (MAY_HAVE_DEBUG_STMTS && args_to_skip)
6119 		  {
6120 		    if (parm_num == -1)
6121 		      {
6122 			tree parm;
6123 			int p;
6124 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6125 			     parm = DECL_CHAIN (parm), p++)
6126 			  if (parm == replace_info->old_tree)
6127 			    {
6128 			      parm_num = p;
6129 			      break;
6130 			    }
6131 		      }
6132 		    if (parm_num != -1)
6133 		      {
6134 			if (debug_args_to_skip == args_to_skip)
6135 			  {
6136 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
6137 			    bitmap_copy (debug_args_to_skip, args_to_skip);
6138 			  }
6139 			bitmap_clear_bit (debug_args_to_skip, parm_num);
6140 		      }
6141 		  }
6142 	      }
6143 	  }
6144       }
6145   /* Copy the function's arguments.  */
6146   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6147     DECL_ARGUMENTS (new_decl)
6148       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6149 				       args_to_skip, &vars);
6150 
6151   /* Remove omp declare simd attribute from the new attributes.  */
6152   if (tree a = lookup_attribute ("omp declare simd",
6153 				 DECL_ATTRIBUTES (new_decl)))
6154     {
6155       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
6156 	a = a2;
6157       a = TREE_CHAIN (a);
6158       for (tree *p = &DECL_ATTRIBUTES (new_decl); *p != a;)
6159 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
6160 	  *p = TREE_CHAIN (*p);
6161 	else
6162 	  {
6163 	    tree chain = TREE_CHAIN (*p);
6164 	    *p = copy_node (*p);
6165 	    p = &TREE_CHAIN (*p);
6166 	    *p = chain;
6167 	  }
6168     }
6169 
6170   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6171   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6172 
6173   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6174 
6175   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6176     /* Add local vars.  */
6177     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6178 
6179   if (DECL_RESULT (old_decl) == NULL_TREE)
6180     ;
6181   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6182     {
6183       DECL_RESULT (new_decl)
6184 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6185 		      RESULT_DECL, NULL_TREE, void_type_node);
6186       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6187       cfun->returns_struct = 0;
6188       cfun->returns_pcc_struct = 0;
6189     }
6190   else
6191     {
6192       tree old_name;
6193       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6194       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6195       if (gimple_in_ssa_p (id.src_cfun)
6196 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6197 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6198 	{
6199 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6200 	  insert_decl_map (&id, old_name, new_name);
6201 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6202 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6203 	}
6204     }
6205 
6206   /* Set up the destination functions loop tree.  */
6207   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6208     {
6209       cfun->curr_properties &= ~PROP_loops;
6210       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6211       cfun->curr_properties |= PROP_loops;
6212     }
6213 
6214   /* Copy the Function's body.  */
6215   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
6216 	     ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6217 	     new_entry);
6218 
6219   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6220   number_blocks (new_decl);
6221 
6222   /* We want to create the BB unconditionally, so that the addition of
6223      debug stmts doesn't affect BB count, which may in the end cause
6224      codegen differences.  */
6225   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6226   while (init_stmts.length ())
6227     insert_init_stmt (&id, bb, init_stmts.pop ());
6228   update_clone_info (&id);
6229 
6230   /* Remap the nonlocal_goto_save_area, if any.  */
6231   if (cfun->nonlocal_goto_save_area)
6232     {
6233       struct walk_stmt_info wi;
6234 
6235       memset (&wi, 0, sizeof (wi));
6236       wi.info = &id;
6237       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6238     }
6239 
6240   /* Clean up.  */
6241   delete id.decl_map;
6242   if (id.debug_map)
6243     delete id.debug_map;
6244   free_dominance_info (CDI_DOMINATORS);
6245   free_dominance_info (CDI_POST_DOMINATORS);
6246 
6247   fold_marked_statements (0, id.statements_to_fold);
6248   delete id.statements_to_fold;
6249   delete_unreachable_blocks_update_callgraph (&id);
6250   if (id.dst_node->definition)
6251     cgraph_edge::rebuild_references ();
6252   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6253     {
6254       calculate_dominance_info (CDI_DOMINATORS);
6255       fix_loop_structure (NULL);
6256     }
6257   update_ssa (TODO_update_ssa);
6258 
6259   /* After partial cloning we need to rescale frequencies, so they are
6260      within proper range in the cloned function.  */
6261   if (new_entry)
6262     {
6263       struct cgraph_edge *e;
6264       rebuild_frequencies ();
6265 
6266       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6267       for (e = new_version_node->callees; e; e = e->next_callee)
6268 	{
6269 	  basic_block bb = gimple_bb (e->call_stmt);
6270 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
6271 							 bb);
6272 	  e->count = bb->count;
6273 	}
6274       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6275 	{
6276 	  basic_block bb = gimple_bb (e->call_stmt);
6277 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
6278 							 bb);
6279 	  e->count = bb->count;
6280 	}
6281     }
6282 
6283   if (debug_args_to_skip && MAY_HAVE_DEBUG_STMTS)
6284     {
6285       tree parm;
6286       vec<tree, va_gc> **debug_args = NULL;
6287       unsigned int len = 0;
6288       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6289 	   parm; parm = DECL_CHAIN (parm), i++)
6290 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6291 	  {
6292 	    tree ddecl;
6293 
6294 	    if (debug_args == NULL)
6295 	      {
6296 		debug_args = decl_debug_args_insert (new_decl);
6297 		len = vec_safe_length (*debug_args);
6298 	      }
6299 	    ddecl = make_node (DEBUG_EXPR_DECL);
6300 	    DECL_ARTIFICIAL (ddecl) = 1;
6301 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6302 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
6303 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6304 	    vec_safe_push (*debug_args, ddecl);
6305 	  }
6306       if (debug_args != NULL)
6307 	{
6308 	  /* On the callee side, add
6309 	     DEBUG D#Y s=> parm
6310 	     DEBUG var => D#Y
6311 	     stmts to the first bb where var is a VAR_DECL created for the
6312 	     optimized away parameter in DECL_INITIAL block.  This hints
6313 	     in the debug info that var (whole DECL_ORIGIN is the parm
6314 	     PARM_DECL) is optimized away, but could be looked up at the
6315 	     call site as value of D#X there.  */
6316 	  tree var = vars, vexpr;
6317 	  gimple_stmt_iterator cgsi
6318 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6319 	  gimple *def_temp;
6320 	  var = vars;
6321 	  i = vec_safe_length (*debug_args);
6322 	  do
6323 	    {
6324 	      i -= 2;
6325 	      while (var != NULL_TREE
6326 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6327 		var = TREE_CHAIN (var);
6328 	      if (var == NULL_TREE)
6329 		break;
6330 	      vexpr = make_node (DEBUG_EXPR_DECL);
6331 	      parm = (**debug_args)[i];
6332 	      DECL_ARTIFICIAL (vexpr) = 1;
6333 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6334 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6335 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6336 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6337 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6338 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6339 	    }
6340 	  while (i > len);
6341 	}
6342     }
6343 
6344   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6345     BITMAP_FREE (debug_args_to_skip);
6346   free_dominance_info (CDI_DOMINATORS);
6347   free_dominance_info (CDI_POST_DOMINATORS);
6348 
6349   gcc_assert (!id.debug_stmts.exists ());
6350   pop_cfun ();
6351   return;
6352 }
6353 
6354 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6355    the callee and return the inlined body on success.  */
6356 
6357 tree
6358 maybe_inline_call_in_expr (tree exp)
6359 {
6360   tree fn = get_callee_fndecl (exp);
6361 
6362   /* We can only try to inline "const" functions.  */
6363   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6364     {
6365       call_expr_arg_iterator iter;
6366       copy_body_data id;
6367       tree param, arg, t;
6368       hash_map<tree, tree> decl_map;
6369 
6370       /* Remap the parameters.  */
6371       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6372 	   param;
6373 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6374 	decl_map.put (param, arg);
6375 
6376       memset (&id, 0, sizeof (id));
6377       id.src_fn = fn;
6378       id.dst_fn = current_function_decl;
6379       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6380       id.decl_map = &decl_map;
6381 
6382       id.copy_decl = copy_decl_no_change;
6383       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6384       id.transform_new_cfg = false;
6385       id.transform_return_to_modify = true;
6386       id.transform_parameter = true;
6387       id.transform_lang_insert_block = NULL;
6388 
6389       /* Make sure not to unshare trees behind the front-end's back
6390 	 since front-end specific mechanisms may rely on sharing.  */
6391       id.regimplify = false;
6392       id.do_not_unshare = true;
6393 
6394       /* We're not inside any EH region.  */
6395       id.eh_lp_nr = 0;
6396 
6397       t = copy_tree_body (&id);
6398 
6399       /* We can only return something suitable for use in a GENERIC
6400 	 expression tree.  */
6401       if (TREE_CODE (t) == MODIFY_EXPR)
6402 	return TREE_OPERAND (t, 1);
6403     }
6404 
6405    return NULL_TREE;
6406 }
6407 
6408 /* Duplicate a type, fields and all.  */
6409 
6410 tree
6411 build_duplicate_type (tree type)
6412 {
6413   struct copy_body_data id;
6414 
6415   memset (&id, 0, sizeof (id));
6416   id.src_fn = current_function_decl;
6417   id.dst_fn = current_function_decl;
6418   id.src_cfun = cfun;
6419   id.decl_map = new hash_map<tree, tree>;
6420   id.debug_map = NULL;
6421   id.copy_decl = copy_decl_no_change;
6422 
6423   type = remap_type_1 (type, &id);
6424 
6425   delete id.decl_map;
6426   if (id.debug_map)
6427     delete id.debug_map;
6428 
6429   TYPE_CANONICAL (type) = type;
6430 
6431   return type;
6432 }
6433 
6434 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6435    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6436    evaluation.  */
6437 
6438 tree
6439 copy_fn (tree fn, tree& parms, tree& result)
6440 {
6441   copy_body_data id;
6442   tree param;
6443   hash_map<tree, tree> decl_map;
6444 
6445   tree *p = &parms;
6446   *p = NULL_TREE;
6447 
6448   memset (&id, 0, sizeof (id));
6449   id.src_fn = fn;
6450   id.dst_fn = current_function_decl;
6451   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6452   id.decl_map = &decl_map;
6453 
6454   id.copy_decl = copy_decl_no_change;
6455   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6456   id.transform_new_cfg = false;
6457   id.transform_return_to_modify = false;
6458   id.transform_parameter = true;
6459   id.transform_lang_insert_block = NULL;
6460 
6461   /* Make sure not to unshare trees behind the front-end's back
6462      since front-end specific mechanisms may rely on sharing.  */
6463   id.regimplify = false;
6464   id.do_not_unshare = true;
6465 
6466   /* We're not inside any EH region.  */
6467   id.eh_lp_nr = 0;
6468 
6469   /* Remap the parameters and result and return them to the caller.  */
6470   for (param = DECL_ARGUMENTS (fn);
6471        param;
6472        param = DECL_CHAIN (param))
6473     {
6474       *p = remap_decl (param, &id);
6475       p = &DECL_CHAIN (*p);
6476     }
6477 
6478   if (DECL_RESULT (fn))
6479     result = remap_decl (DECL_RESULT (fn), &id);
6480   else
6481     result = NULL_TREE;
6482 
6483   return copy_tree_body (&id);
6484 }
6485