xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Tree inlining.
2    Copyright (C) 2001-2017 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "tree-chkp.h"
60 #include "attribs.h"
61 
62 
63 /* I'm not real happy about this, but we need to handle gimple and
64    non-gimple trees.  */
65 
66 /* Inlining, Cloning, Versioning, Parallelization
67 
68    Inlining: a function body is duplicated, but the PARM_DECLs are
69    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
70    MODIFY_EXPRs that store to a dedicated returned-value variable.
71    The duplicated eh_region info of the copy will later be appended
72    to the info for the caller; the eh_region info in copied throwing
73    statements and RESX statements are adjusted accordingly.
74 
75    Cloning: (only in C++) We have one body for a con/de/structor, and
76    multiple function decls, each with a unique parameter list.
77    Duplicate the body, using the given splay tree; some parameters
78    will become constants (like 0 or 1).
79 
80    Versioning: a function body is duplicated and the result is a new
81    function rather than into blocks of an existing function as with
82    inlining.  Some parameters will become constants.
83 
84    Parallelization: a region of a function is duplicated resulting in
85    a new function.  Variables may be replaced with complex expressions
86    to enable shared variable semantics.
87 
88    All of these will simultaneously lookup any callgraph edges.  If
89    we're going to inline the duplicated function body, and the given
90    function has some cloned callgraph nodes (one for each place this
91    function will be inlined) those callgraph edges will be duplicated.
92    If we're cloning the body, those callgraph edges will be
93    updated to point into the new body.  (Note that the original
94    callgraph node and edge list will not be altered.)
95 
96    See the CALL_EXPR handling case in copy_tree_body_r ().  */
97 
98 /* To Do:
99 
100    o In order to make inlining-on-trees work, we pessimized
101      function-local static constants.  In particular, they are now
102      always output, even when not addressed.  Fix this by treating
103      function-local static constants just like global static
104      constants; the back-end already knows not to output them if they
105      are not needed.
106 
107    o Provide heuristics to clamp inlining of recursive template
108      calls?  */
109 
110 
111 /* Weights that estimate_num_insns uses to estimate the size of the
112    produced code.  */
113 
114 eni_weights eni_size_weights;
115 
116 /* Weights that estimate_num_insns uses to estimate the time necessary
117    to execute the produced code.  */
118 
119 eni_weights eni_time_weights;
120 
121 /* Prototypes.  */
122 
123 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
124 				     basic_block);
125 static void remap_block (tree *, copy_body_data *);
126 static void copy_bind_expr (tree *, int *, copy_body_data *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136 
137 /* Insert a tree->tree mapping for ID.  Despite the name suggests
138    that the trees should be variables, it is used for more than that.  */
139 
140 void
141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143   id->decl_map->put (key, value);
144 
145   /* Always insert an identity map as well.  If we see this same new
146      node again, we won't want to duplicate it a second time.  */
147   if (key != value)
148     id->decl_map->put (value, value);
149 }
150 
151 /* Insert a tree->tree mapping for ID.  This is only used for
152    variables.  */
153 
154 static void
155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157   if (!gimple_in_ssa_p (id->src_cfun))
158     return;
159 
160   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161     return;
162 
163   if (!target_for_debug_bind (key))
164     return;
165 
166   gcc_assert (TREE_CODE (key) == PARM_DECL);
167   gcc_assert (VAR_P (value));
168 
169   if (!id->debug_map)
170     id->debug_map = new hash_map<tree, tree>;
171 
172   id->debug_map->put (key, value);
173 }
174 
175 /* If nonzero, we're remapping the contents of inlined debug
176    statements.  If negative, an error has occurred, such as a
177    reference to a variable that isn't available in the inlined
178    context.  */
179 static int processing_debug_stmt = 0;
180 
181 /* Construct new SSA name for old NAME. ID is the inline context.  */
182 
183 static tree
184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186   tree new_tree, var;
187   tree *n;
188 
189   gcc_assert (TREE_CODE (name) == SSA_NAME);
190 
191   n = id->decl_map->get (name);
192   if (n)
193     return unshare_expr (*n);
194 
195   if (processing_debug_stmt)
196     {
197       if (SSA_NAME_IS_DEFAULT_DEF (name)
198 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
199 	  && id->entry_bb == NULL
200 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
201 	{
202 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
203 	  gimple *def_temp;
204 	  gimple_stmt_iterator gsi;
205 	  tree val = SSA_NAME_VAR (name);
206 
207 	  n = id->decl_map->get (val);
208 	  if (n != NULL)
209 	    val = *n;
210 	  if (TREE_CODE (val) != PARM_DECL)
211 	    {
212 	      processing_debug_stmt = -1;
213 	      return name;
214 	    }
215 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
216 	  DECL_ARTIFICIAL (vexpr) = 1;
217 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
218 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
219 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
220 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
221 	  return vexpr;
222 	}
223 
224       processing_debug_stmt = -1;
225       return name;
226     }
227 
228   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
229   var = SSA_NAME_VAR (name);
230   if (!var
231       || (!SSA_NAME_IS_DEFAULT_DEF (name)
232 	  && VAR_P (var)
233 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
234 	  && DECL_ARTIFICIAL (var)
235 	  && DECL_IGNORED_P (var)
236 	  && !DECL_NAME (var)))
237     {
238       struct ptr_info_def *pi;
239       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
240       if (!var && SSA_NAME_IDENTIFIER (name))
241 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
242       insert_decl_map (id, name, new_tree);
243       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
244 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
245       /* At least IPA points-to info can be directly transferred.  */
246       if (id->src_cfun->gimple_df
247 	  && id->src_cfun->gimple_df->ipa_pta
248 	  && POINTER_TYPE_P (TREE_TYPE (name))
249 	  && (pi = SSA_NAME_PTR_INFO (name))
250 	  && !pi->pt.anything)
251 	{
252 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
253 	  new_pi->pt = pi->pt;
254 	}
255       return new_tree;
256     }
257 
258   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
259      in copy_bb.  */
260   new_tree = remap_decl (var, id);
261 
262   /* We might've substituted constant or another SSA_NAME for
263      the variable.
264 
265      Replace the SSA name representing RESULT_DECL by variable during
266      inlining:  this saves us from need to introduce PHI node in a case
267      return value is just partly initialized.  */
268   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
269       && (!SSA_NAME_VAR (name)
270 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
271 	  || !id->transform_return_to_modify))
272     {
273       struct ptr_info_def *pi;
274       new_tree = make_ssa_name (new_tree);
275       insert_decl_map (id, name, new_tree);
276       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
277 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
278       /* At least IPA points-to info can be directly transferred.  */
279       if (id->src_cfun->gimple_df
280 	  && id->src_cfun->gimple_df->ipa_pta
281 	  && POINTER_TYPE_P (TREE_TYPE (name))
282 	  && (pi = SSA_NAME_PTR_INFO (name))
283 	  && !pi->pt.anything)
284 	{
285 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
286 	  new_pi->pt = pi->pt;
287 	}
288       if (SSA_NAME_IS_DEFAULT_DEF (name))
289 	{
290 	  /* By inlining function having uninitialized variable, we might
291 	     extend the lifetime (variable might get reused).  This cause
292 	     ICE in the case we end up extending lifetime of SSA name across
293 	     abnormal edge, but also increase register pressure.
294 
295 	     We simply initialize all uninitialized vars by 0 except
296 	     for case we are inlining to very first BB.  We can avoid
297 	     this for all BBs that are not inside strongly connected
298 	     regions of the CFG, but this is expensive to test.  */
299 	  if (id->entry_bb
300 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
301 	      && (!SSA_NAME_VAR (name)
302 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
303 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
304 					     0)->dest
305 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
306 	    {
307 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
308 	      gimple *init_stmt;
309 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
310 
311 	      init_stmt = gimple_build_assign (new_tree, zero);
312 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
313 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
314 	    }
315 	  else
316 	    {
317 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
318 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
319 	    }
320 	}
321     }
322   else
323     insert_decl_map (id, name, new_tree);
324   return new_tree;
325 }
326 
327 /* Remap DECL during the copying of the BLOCK tree for the function.  */
328 
329 tree
330 remap_decl (tree decl, copy_body_data *id)
331 {
332   tree *n;
333 
334   /* We only remap local variables in the current function.  */
335 
336   /* See if we have remapped this declaration.  */
337 
338   n = id->decl_map->get (decl);
339 
340   if (!n && processing_debug_stmt)
341     {
342       processing_debug_stmt = -1;
343       return decl;
344     }
345 
346   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
347      necessary DECLs have already been remapped and we do not want to duplicate
348      a decl coming from outside of the sequence we are copying.  */
349   if (!n
350       && id->prevent_decl_creation_for_types
351       && id->remapping_type_depth > 0
352       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
353     return decl;
354 
355   /* If we didn't already have an equivalent for this declaration, create one
356      now.  */
357   if (!n)
358     {
359       /* Make a copy of the variable or label.  */
360       tree t = id->copy_decl (decl, id);
361 
362       /* Remember it, so that if we encounter this local entity again
363 	 we can reuse this copy.  Do this early because remap_type may
364 	 need this decl for TYPE_STUB_DECL.  */
365       insert_decl_map (id, decl, t);
366 
367       if (!DECL_P (t))
368 	return t;
369 
370       /* Remap types, if necessary.  */
371       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
372       if (TREE_CODE (t) == TYPE_DECL)
373 	{
374 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
375 
376 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
377 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
378 	     is not set on the TYPE_DECL, for example in LTO mode.  */
379 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
380 	    {
381 	      tree x = build_variant_type_copy (TREE_TYPE (t));
382 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
383 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
384 	      DECL_ORIGINAL_TYPE (t) = x;
385 	    }
386 	}
387 
388       /* Remap sizes as necessary.  */
389       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
390       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
391 
392       /* If fields, do likewise for offset and qualifier.  */
393       if (TREE_CODE (t) == FIELD_DECL)
394 	{
395 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
396 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
397 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
398 	}
399 
400       return t;
401     }
402 
403   if (id->do_not_unshare)
404     return *n;
405   else
406     return unshare_expr (*n);
407 }
408 
409 static tree
410 remap_type_1 (tree type, copy_body_data *id)
411 {
412   tree new_tree, t;
413 
414   /* We do need a copy.  build and register it now.  If this is a pointer or
415      reference type, remap the designated type and make a new pointer or
416      reference type.  */
417   if (TREE_CODE (type) == POINTER_TYPE)
418     {
419       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
420 					 TYPE_MODE (type),
421 					 TYPE_REF_CAN_ALIAS_ALL (type));
422       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
423 	new_tree = build_type_attribute_qual_variant (new_tree,
424 						      TYPE_ATTRIBUTES (type),
425 						      TYPE_QUALS (type));
426       insert_decl_map (id, type, new_tree);
427       return new_tree;
428     }
429   else if (TREE_CODE (type) == REFERENCE_TYPE)
430     {
431       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
432 					    TYPE_MODE (type),
433 					    TYPE_REF_CAN_ALIAS_ALL (type));
434       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
435 	new_tree = build_type_attribute_qual_variant (new_tree,
436 						      TYPE_ATTRIBUTES (type),
437 						      TYPE_QUALS (type));
438       insert_decl_map (id, type, new_tree);
439       return new_tree;
440     }
441   else
442     new_tree = copy_node (type);
443 
444   insert_decl_map (id, type, new_tree);
445 
446   /* This is a new type, not a copy of an old type.  Need to reassociate
447      variants.  We can handle everything except the main variant lazily.  */
448   t = TYPE_MAIN_VARIANT (type);
449   if (type != t)
450     {
451       t = remap_type (t, id);
452       TYPE_MAIN_VARIANT (new_tree) = t;
453       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
454       TYPE_NEXT_VARIANT (t) = new_tree;
455     }
456   else
457     {
458       TYPE_MAIN_VARIANT (new_tree) = new_tree;
459       TYPE_NEXT_VARIANT (new_tree) = NULL;
460     }
461 
462   if (TYPE_STUB_DECL (type))
463     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
464 
465   /* Lazily create pointer and reference types.  */
466   TYPE_POINTER_TO (new_tree) = NULL;
467   TYPE_REFERENCE_TO (new_tree) = NULL;
468 
469   /* Copy all types that may contain references to local variables; be sure to
470      preserve sharing in between type and its main variant when possible.  */
471   switch (TREE_CODE (new_tree))
472     {
473     case INTEGER_TYPE:
474     case REAL_TYPE:
475     case FIXED_POINT_TYPE:
476     case ENUMERAL_TYPE:
477     case BOOLEAN_TYPE:
478       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
479 	{
480 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
481 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
482 
483 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
484 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
485 	}
486       else
487 	{
488 	  t = TYPE_MIN_VALUE (new_tree);
489 	  if (t && TREE_CODE (t) != INTEGER_CST)
490 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
491 
492 	  t = TYPE_MAX_VALUE (new_tree);
493 	  if (t && TREE_CODE (t) != INTEGER_CST)
494 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
495 	}
496       return new_tree;
497 
498     case FUNCTION_TYPE:
499       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
500 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
501 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
502       else
503         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
504       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
505 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
506 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
507       else
508         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
509       return new_tree;
510 
511     case ARRAY_TYPE:
512       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
513 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
514 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
515       else
516 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
517 
518       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
519 	{
520 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
521 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
522 	}
523       else
524 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
525       break;
526 
527     case RECORD_TYPE:
528     case UNION_TYPE:
529     case QUAL_UNION_TYPE:
530       if (TYPE_MAIN_VARIANT (type) != type
531 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
532 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
533       else
534 	{
535 	  tree f, nf = NULL;
536 
537 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
538 	    {
539 	      t = remap_decl (f, id);
540 	      DECL_CONTEXT (t) = new_tree;
541 	      DECL_CHAIN (t) = nf;
542 	      nf = t;
543 	    }
544 	  TYPE_FIELDS (new_tree) = nreverse (nf);
545 	}
546       break;
547 
548     case OFFSET_TYPE:
549     default:
550       /* Shouldn't have been thought variable sized.  */
551       gcc_unreachable ();
552     }
553 
554   /* All variants of type share the same size, so use the already remaped data.  */
555   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
556     {
557       gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
558       gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
559 
560       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
561       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
562     }
563   else
564     {
565       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
566       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
567     }
568 
569   return new_tree;
570 }
571 
572 tree
573 remap_type (tree type, copy_body_data *id)
574 {
575   tree *node;
576   tree tmp;
577 
578   if (type == NULL)
579     return type;
580 
581   /* See if we have remapped this type.  */
582   node = id->decl_map->get (type);
583   if (node)
584     return *node;
585 
586   /* The type only needs remapping if it's variably modified.  */
587   if (! variably_modified_type_p (type, id->src_fn))
588     {
589       insert_decl_map (id, type, type);
590       return type;
591     }
592 
593   id->remapping_type_depth++;
594   tmp = remap_type_1 (type, id);
595   id->remapping_type_depth--;
596 
597   return tmp;
598 }
599 
600 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
601 
602 static bool
603 can_be_nonlocal (tree decl, copy_body_data *id)
604 {
605   /* We can not duplicate function decls.  */
606   if (TREE_CODE (decl) == FUNCTION_DECL)
607     return true;
608 
609   /* Local static vars must be non-local or we get multiple declaration
610      problems.  */
611   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
612     return true;
613 
614   return false;
615 }
616 
617 static tree
618 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
619 	     copy_body_data *id)
620 {
621   tree old_var;
622   tree new_decls = NULL_TREE;
623 
624   /* Remap its variables.  */
625   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
626     {
627       tree new_var;
628 
629       if (can_be_nonlocal (old_var, id))
630 	{
631 	  /* We need to add this variable to the local decls as otherwise
632 	     nothing else will do so.  */
633 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
634 	    add_local_decl (cfun, old_var);
635 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
636 	      && !DECL_IGNORED_P (old_var)
637 	      && nonlocalized_list)
638 	    vec_safe_push (*nonlocalized_list, old_var);
639 	  continue;
640 	}
641 
642       /* Remap the variable.  */
643       new_var = remap_decl (old_var, id);
644 
645       /* If we didn't remap this variable, we can't mess with its
646 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
647 	 already declared somewhere else, so don't declare it here.  */
648 
649       if (new_var == id->retvar)
650 	;
651       else if (!new_var)
652         {
653 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
654 	      && !DECL_IGNORED_P (old_var)
655 	      && nonlocalized_list)
656 	    vec_safe_push (*nonlocalized_list, old_var);
657 	}
658       else
659 	{
660 	  gcc_assert (DECL_P (new_var));
661 	  DECL_CHAIN (new_var) = new_decls;
662 	  new_decls = new_var;
663 
664 	  /* Also copy value-expressions.  */
665 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
666 	    {
667 	      tree tem = DECL_VALUE_EXPR (new_var);
668 	      bool old_regimplify = id->regimplify;
669 	      id->remapping_type_depth++;
670 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
671 	      id->remapping_type_depth--;
672 	      id->regimplify = old_regimplify;
673 	      SET_DECL_VALUE_EXPR (new_var, tem);
674 	    }
675 	}
676     }
677 
678   return nreverse (new_decls);
679 }
680 
681 /* Copy the BLOCK to contain remapped versions of the variables
682    therein.  And hook the new block into the block-tree.  */
683 
684 static void
685 remap_block (tree *block, copy_body_data *id)
686 {
687   tree old_block;
688   tree new_block;
689 
690   /* Make the new block.  */
691   old_block = *block;
692   new_block = make_node (BLOCK);
693   TREE_USED (new_block) = TREE_USED (old_block);
694   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
695   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
696   BLOCK_NONLOCALIZED_VARS (new_block)
697     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
698   *block = new_block;
699 
700   /* Remap its variables.  */
701   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
702   					&BLOCK_NONLOCALIZED_VARS (new_block),
703 					id);
704 
705   if (id->transform_lang_insert_block)
706     id->transform_lang_insert_block (new_block);
707 
708   /* Remember the remapped block.  */
709   insert_decl_map (id, old_block, new_block);
710 }
711 
712 /* Copy the whole block tree and root it in id->block.  */
713 static tree
714 remap_blocks (tree block, copy_body_data *id)
715 {
716   tree t;
717   tree new_tree = block;
718 
719   if (!block)
720     return NULL;
721 
722   remap_block (&new_tree, id);
723   gcc_assert (new_tree != block);
724   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
725     prepend_lexical_block (new_tree, remap_blocks (t, id));
726   /* Blocks are in arbitrary order, but make things slightly prettier and do
727      not swap order when producing a copy.  */
728   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
729   return new_tree;
730 }
731 
732 /* Remap the block tree rooted at BLOCK to nothing.  */
733 static void
734 remap_blocks_to_null (tree block, copy_body_data *id)
735 {
736   tree t;
737   insert_decl_map (id, block, NULL_TREE);
738   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
739     remap_blocks_to_null (t, id);
740 }
741 
742 static void
743 copy_statement_list (tree *tp)
744 {
745   tree_stmt_iterator oi, ni;
746   tree new_tree;
747 
748   new_tree = alloc_stmt_list ();
749   ni = tsi_start (new_tree);
750   oi = tsi_start (*tp);
751   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
752   *tp = new_tree;
753 
754   for (; !tsi_end_p (oi); tsi_next (&oi))
755     {
756       tree stmt = tsi_stmt (oi);
757       if (TREE_CODE (stmt) == STATEMENT_LIST)
758 	/* This copy is not redundant; tsi_link_after will smash this
759 	   STATEMENT_LIST into the end of the one we're building, and we
760 	   don't want to do that with the original.  */
761 	copy_statement_list (&stmt);
762       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
763     }
764 }
765 
766 static void
767 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
768 {
769   tree block = BIND_EXPR_BLOCK (*tp);
770   /* Copy (and replace) the statement.  */
771   copy_tree_r (tp, walk_subtrees, NULL);
772   if (block)
773     {
774       remap_block (&block, id);
775       BIND_EXPR_BLOCK (*tp) = block;
776     }
777 
778   if (BIND_EXPR_VARS (*tp))
779     /* This will remap a lot of the same decls again, but this should be
780        harmless.  */
781     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
782 }
783 
784 
785 /* Create a new gimple_seq by remapping all the statements in BODY
786    using the inlining information in ID.  */
787 
788 static gimple_seq
789 remap_gimple_seq (gimple_seq body, copy_body_data *id)
790 {
791   gimple_stmt_iterator si;
792   gimple_seq new_body = NULL;
793 
794   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
795     {
796       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
797       gimple_seq_add_seq (&new_body, new_stmts);
798     }
799 
800   return new_body;
801 }
802 
803 
804 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
805    block using the mapping information in ID.  */
806 
807 static gimple *
808 copy_gimple_bind (gbind *stmt, copy_body_data *id)
809 {
810   gimple *new_bind;
811   tree new_block, new_vars;
812   gimple_seq body, new_body;
813 
814   /* Copy the statement.  Note that we purposely don't use copy_stmt
815      here because we need to remap statements as we copy.  */
816   body = gimple_bind_body (stmt);
817   new_body = remap_gimple_seq (body, id);
818 
819   new_block = gimple_bind_block (stmt);
820   if (new_block)
821     remap_block (&new_block, id);
822 
823   /* This will remap a lot of the same decls again, but this should be
824      harmless.  */
825   new_vars = gimple_bind_vars (stmt);
826   if (new_vars)
827     new_vars = remap_decls (new_vars, NULL, id);
828 
829   new_bind = gimple_build_bind (new_vars, new_body, new_block);
830 
831   return new_bind;
832 }
833 
834 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
835 
836 static bool
837 is_parm (tree decl)
838 {
839   if (TREE_CODE (decl) == SSA_NAME)
840     {
841       decl = SSA_NAME_VAR (decl);
842       if (!decl)
843 	return false;
844     }
845 
846   return (TREE_CODE (decl) == PARM_DECL);
847 }
848 
849 /* Remap the dependence CLIQUE from the source to the destination function
850    as specified in ID.  */
851 
852 static unsigned short
853 remap_dependence_clique (copy_body_data *id, unsigned short clique)
854 {
855   if (clique == 0 || processing_debug_stmt)
856     return 0;
857   if (!id->dependence_map)
858     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
859   bool existed;
860   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
861   if (!existed)
862     newc = ++cfun->last_clique;
863   return newc;
864 }
865 
866 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
867    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
868    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
869    recursing into the children nodes of *TP.  */
870 
871 static tree
872 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
873 {
874   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
875   copy_body_data *id = (copy_body_data *) wi_p->info;
876   tree fn = id->src_fn;
877 
878   /* For recursive invocations this is no longer the LHS itself.  */
879   bool is_lhs = wi_p->is_lhs;
880   wi_p->is_lhs = false;
881 
882   if (TREE_CODE (*tp) == SSA_NAME)
883     {
884       *tp = remap_ssa_name (*tp, id);
885       *walk_subtrees = 0;
886       if (is_lhs)
887 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
888       return NULL;
889     }
890   else if (auto_var_in_fn_p (*tp, fn))
891     {
892       /* Local variables and labels need to be replaced by equivalent
893 	 variables.  We don't want to copy static variables; there's
894 	 only one of those, no matter how many times we inline the
895 	 containing function.  Similarly for globals from an outer
896 	 function.  */
897       tree new_decl;
898 
899       /* Remap the declaration.  */
900       new_decl = remap_decl (*tp, id);
901       gcc_assert (new_decl);
902       /* Replace this variable with the copy.  */
903       STRIP_TYPE_NOPS (new_decl);
904       /* ???  The C++ frontend uses void * pointer zero to initialize
905          any other type.  This confuses the middle-end type verification.
906 	 As cloned bodies do not go through gimplification again the fixup
907 	 there doesn't trigger.  */
908       if (TREE_CODE (new_decl) == INTEGER_CST
909 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
910 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
911       *tp = new_decl;
912       *walk_subtrees = 0;
913     }
914   else if (TREE_CODE (*tp) == STATEMENT_LIST)
915     gcc_unreachable ();
916   else if (TREE_CODE (*tp) == SAVE_EXPR)
917     gcc_unreachable ();
918   else if (TREE_CODE (*tp) == LABEL_DECL
919 	   && (!DECL_CONTEXT (*tp)
920 	       || decl_function_context (*tp) == id->src_fn))
921     /* These may need to be remapped for EH handling.  */
922     *tp = remap_decl (*tp, id);
923   else if (TREE_CODE (*tp) == FIELD_DECL)
924     {
925       /* If the enclosing record type is variably_modified_type_p, the field
926 	 has already been remapped.  Otherwise, it need not be.  */
927       tree *n = id->decl_map->get (*tp);
928       if (n)
929 	*tp = *n;
930       *walk_subtrees = 0;
931     }
932   else if (TYPE_P (*tp))
933     /* Types may need remapping as well.  */
934     *tp = remap_type (*tp, id);
935   else if (CONSTANT_CLASS_P (*tp))
936     {
937       /* If this is a constant, we have to copy the node iff the type
938 	 will be remapped.  copy_tree_r will not copy a constant.  */
939       tree new_type = remap_type (TREE_TYPE (*tp), id);
940 
941       if (new_type == TREE_TYPE (*tp))
942 	*walk_subtrees = 0;
943 
944       else if (TREE_CODE (*tp) == INTEGER_CST)
945 	*tp = wide_int_to_tree (new_type, *tp);
946       else
947 	{
948 	  *tp = copy_node (*tp);
949 	  TREE_TYPE (*tp) = new_type;
950 	}
951     }
952   else
953     {
954       /* Otherwise, just copy the node.  Note that copy_tree_r already
955 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
956 
957       if (TREE_CODE (*tp) == MEM_REF)
958 	{
959 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
960 	     that can happen when a pointer argument is an ADDR_EXPR.
961 	     Recurse here manually to allow that.  */
962 	  tree ptr = TREE_OPERAND (*tp, 0);
963 	  tree type = remap_type (TREE_TYPE (*tp), id);
964 	  tree old = *tp;
965 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
966 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
967 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
968 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
969 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
970 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
971 	    {
972 	      MR_DEPENDENCE_CLIQUE (*tp)
973 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
974 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
975 	    }
976 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
977 	     remapped a parameter as the property might be valid only
978 	     for the parameter itself.  */
979 	  if (TREE_THIS_NOTRAP (old)
980 	      && (!is_parm (TREE_OPERAND (old, 0))
981 		  || (!id->transform_parameter && is_parm (ptr))))
982 	    TREE_THIS_NOTRAP (*tp) = 1;
983 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
984 	  *walk_subtrees = 0;
985 	  return NULL;
986 	}
987 
988       /* Here is the "usual case".  Copy this tree node, and then
989 	 tweak some special cases.  */
990       copy_tree_r (tp, walk_subtrees, NULL);
991 
992       if (TREE_CODE (*tp) != OMP_CLAUSE)
993 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
994 
995       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
996 	{
997 	  /* The copied TARGET_EXPR has never been expanded, even if the
998 	     original node was expanded already.  */
999 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1000 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1001 	}
1002       else if (TREE_CODE (*tp) == ADDR_EXPR)
1003 	{
1004 	  /* Variable substitution need not be simple.  In particular,
1005 	     the MEM_REF substitution above.  Make sure that
1006 	     TREE_CONSTANT and friends are up-to-date.  */
1007 	  int invariant = is_gimple_min_invariant (*tp);
1008 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1009 	  recompute_tree_invariant_for_addr_expr (*tp);
1010 
1011 	  /* If this used to be invariant, but is not any longer,
1012 	     then regimplification is probably needed.  */
1013 	  if (invariant && !is_gimple_min_invariant (*tp))
1014 	    id->regimplify = true;
1015 
1016 	  *walk_subtrees = 0;
1017 	}
1018     }
1019 
1020   /* Update the TREE_BLOCK for the cloned expr.  */
1021   if (EXPR_P (*tp))
1022     {
1023       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1024       tree old_block = TREE_BLOCK (*tp);
1025       if (old_block)
1026 	{
1027 	  tree *n;
1028 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1029 	  if (n)
1030 	    new_block = *n;
1031 	}
1032       TREE_SET_BLOCK (*tp, new_block);
1033     }
1034 
1035   /* Keep iterating.  */
1036   return NULL_TREE;
1037 }
1038 
1039 
1040 /* Called from copy_body_id via walk_tree.  DATA is really a
1041    `copy_body_data *'.  */
1042 
1043 tree
1044 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1045 {
1046   copy_body_data *id = (copy_body_data *) data;
1047   tree fn = id->src_fn;
1048   tree new_block;
1049 
1050   /* Begin by recognizing trees that we'll completely rewrite for the
1051      inlining context.  Our output for these trees is completely
1052      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1053      into an edge).  Further down, we'll handle trees that get
1054      duplicated and/or tweaked.  */
1055 
1056   /* When requested, RETURN_EXPRs should be transformed to just the
1057      contained MODIFY_EXPR.  The branch semantics of the return will
1058      be handled elsewhere by manipulating the CFG rather than a statement.  */
1059   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1060     {
1061       tree assignment = TREE_OPERAND (*tp, 0);
1062 
1063       /* If we're returning something, just turn that into an
1064 	 assignment into the equivalent of the original RESULT_DECL.
1065 	 If the "assignment" is just the result decl, the result
1066 	 decl has already been set (e.g. a recent "foo (&result_decl,
1067 	 ...)"); just toss the entire RETURN_EXPR.  */
1068       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1069 	{
1070 	  /* Replace the RETURN_EXPR with (a copy of) the
1071 	     MODIFY_EXPR hanging underneath.  */
1072 	  *tp = copy_node (assignment);
1073 	}
1074       else /* Else the RETURN_EXPR returns no value.  */
1075 	{
1076 	  *tp = NULL;
1077 	  return (tree) (void *)1;
1078 	}
1079     }
1080   else if (TREE_CODE (*tp) == SSA_NAME)
1081     {
1082       *tp = remap_ssa_name (*tp, id);
1083       *walk_subtrees = 0;
1084       return NULL;
1085     }
1086 
1087   /* Local variables and labels need to be replaced by equivalent
1088      variables.  We don't want to copy static variables; there's only
1089      one of those, no matter how many times we inline the containing
1090      function.  Similarly for globals from an outer function.  */
1091   else if (auto_var_in_fn_p (*tp, fn))
1092     {
1093       tree new_decl;
1094 
1095       /* Remap the declaration.  */
1096       new_decl = remap_decl (*tp, id);
1097       gcc_assert (new_decl);
1098       /* Replace this variable with the copy.  */
1099       STRIP_TYPE_NOPS (new_decl);
1100       *tp = new_decl;
1101       *walk_subtrees = 0;
1102     }
1103   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1104     copy_statement_list (tp);
1105   else if (TREE_CODE (*tp) == SAVE_EXPR
1106 	   || TREE_CODE (*tp) == TARGET_EXPR)
1107     remap_save_expr (tp, id->decl_map, walk_subtrees);
1108   else if (TREE_CODE (*tp) == LABEL_DECL
1109 	   && (! DECL_CONTEXT (*tp)
1110 	       || decl_function_context (*tp) == id->src_fn))
1111     /* These may need to be remapped for EH handling.  */
1112     *tp = remap_decl (*tp, id);
1113   else if (TREE_CODE (*tp) == BIND_EXPR)
1114     copy_bind_expr (tp, walk_subtrees, id);
1115   /* Types may need remapping as well.  */
1116   else if (TYPE_P (*tp))
1117     *tp = remap_type (*tp, id);
1118 
1119   /* If this is a constant, we have to copy the node iff the type will be
1120      remapped.  copy_tree_r will not copy a constant.  */
1121   else if (CONSTANT_CLASS_P (*tp))
1122     {
1123       tree new_type = remap_type (TREE_TYPE (*tp), id);
1124 
1125       if (new_type == TREE_TYPE (*tp))
1126 	*walk_subtrees = 0;
1127 
1128       else if (TREE_CODE (*tp) == INTEGER_CST)
1129 	*tp = wide_int_to_tree (new_type, *tp);
1130       else
1131 	{
1132 	  *tp = copy_node (*tp);
1133 	  TREE_TYPE (*tp) = new_type;
1134 	}
1135     }
1136 
1137   /* Otherwise, just copy the node.  Note that copy_tree_r already
1138      knows not to copy VAR_DECLs, etc., so this is safe.  */
1139   else
1140     {
1141       /* Here we handle trees that are not completely rewritten.
1142 	 First we detect some inlining-induced bogosities for
1143 	 discarding.  */
1144       if (TREE_CODE (*tp) == MODIFY_EXPR
1145 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1146 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1147 	{
1148 	  /* Some assignments VAR = VAR; don't generate any rtl code
1149 	     and thus don't count as variable modification.  Avoid
1150 	     keeping bogosities like 0 = 0.  */
1151 	  tree decl = TREE_OPERAND (*tp, 0), value;
1152 	  tree *n;
1153 
1154 	  n = id->decl_map->get (decl);
1155 	  if (n)
1156 	    {
1157 	      value = *n;
1158 	      STRIP_TYPE_NOPS (value);
1159 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1160 		{
1161 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1162 		  return copy_tree_body_r (tp, walk_subtrees, data);
1163 		}
1164 	    }
1165 	}
1166       else if (TREE_CODE (*tp) == INDIRECT_REF)
1167 	{
1168 	  /* Get rid of *& from inline substitutions that can happen when a
1169 	     pointer argument is an ADDR_EXPR.  */
1170 	  tree decl = TREE_OPERAND (*tp, 0);
1171 	  tree *n = id->decl_map->get (decl);
1172 	  if (n)
1173 	    {
1174 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1175 	         it manually here as we'll eventually get ADDR_EXPRs
1176 		 which lie about their types pointed to.  In this case
1177 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1178 		 but we absolutely rely on that.  As fold_indirect_ref
1179 	         does other useful transformations, try that first, though.  */
1180 	      tree type = TREE_TYPE (*tp);
1181 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1182 	      tree old = *tp;
1183 	      *tp = gimple_fold_indirect_ref (ptr);
1184 	      if (! *tp)
1185 	        {
1186 		  type = remap_type (type, id);
1187 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1188 		    {
1189 		      *tp
1190 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1191 		      /* ???  We should either assert here or build
1192 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1193 			 incompatible types to our IL.  */
1194 		      if (! *tp)
1195 			*tp = TREE_OPERAND (ptr, 0);
1196 		    }
1197 	          else
1198 		    {
1199 	              *tp = build1 (INDIRECT_REF, type, ptr);
1200 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1201 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1202 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1203 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1204 			 have remapped a parameter as the property might be
1205 			 valid only for the parameter itself.  */
1206 		      if (TREE_THIS_NOTRAP (old)
1207 			  && (!is_parm (TREE_OPERAND (old, 0))
1208 			      || (!id->transform_parameter && is_parm (ptr))))
1209 		        TREE_THIS_NOTRAP (*tp) = 1;
1210 		    }
1211 		}
1212 	      *walk_subtrees = 0;
1213 	      return NULL;
1214 	    }
1215 	}
1216       else if (TREE_CODE (*tp) == MEM_REF)
1217 	{
1218 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1219 	     that can happen when a pointer argument is an ADDR_EXPR.
1220 	     Recurse here manually to allow that.  */
1221 	  tree ptr = TREE_OPERAND (*tp, 0);
1222 	  tree type = remap_type (TREE_TYPE (*tp), id);
1223 	  tree old = *tp;
1224 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1225 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1226 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1227 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1228 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1229 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1230 	    {
1231 	      MR_DEPENDENCE_CLIQUE (*tp)
1232 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1233 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1234 	    }
1235 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1236 	     remapped a parameter as the property might be valid only
1237 	     for the parameter itself.  */
1238 	  if (TREE_THIS_NOTRAP (old)
1239 	      && (!is_parm (TREE_OPERAND (old, 0))
1240 		  || (!id->transform_parameter && is_parm (ptr))))
1241 	    TREE_THIS_NOTRAP (*tp) = 1;
1242 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1243 	  *walk_subtrees = 0;
1244 	  return NULL;
1245 	}
1246 
1247       /* Here is the "usual case".  Copy this tree node, and then
1248 	 tweak some special cases.  */
1249       copy_tree_r (tp, walk_subtrees, NULL);
1250 
1251       /* If EXPR has block defined, map it to newly constructed block.
1252          When inlining we want EXPRs without block appear in the block
1253 	 of function call if we are not remapping a type.  */
1254       if (EXPR_P (*tp))
1255 	{
1256 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1257 	  if (TREE_BLOCK (*tp))
1258 	    {
1259 	      tree *n;
1260 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1261 	      if (n)
1262 		new_block = *n;
1263 	    }
1264 	  TREE_SET_BLOCK (*tp, new_block);
1265 	}
1266 
1267       if (TREE_CODE (*tp) != OMP_CLAUSE)
1268 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1269 
1270       /* The copied TARGET_EXPR has never been expanded, even if the
1271 	 original node was expanded already.  */
1272       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1273 	{
1274 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1275 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1276 	}
1277 
1278       /* Variable substitution need not be simple.  In particular, the
1279 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1280 	 and friends are up-to-date.  */
1281       else if (TREE_CODE (*tp) == ADDR_EXPR)
1282 	{
1283 	  int invariant = is_gimple_min_invariant (*tp);
1284 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1285 
1286 	  /* Handle the case where we substituted an INDIRECT_REF
1287 	     into the operand of the ADDR_EXPR.  */
1288 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1289 	    {
1290 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1291 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1292 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1293 	      *tp = t;
1294 	    }
1295 	  else
1296 	    recompute_tree_invariant_for_addr_expr (*tp);
1297 
1298 	  /* If this used to be invariant, but is not any longer,
1299 	     then regimplification is probably needed.  */
1300 	  if (invariant && !is_gimple_min_invariant (*tp))
1301 	    id->regimplify = true;
1302 
1303 	  *walk_subtrees = 0;
1304 	}
1305     }
1306 
1307   /* Keep iterating.  */
1308   return NULL_TREE;
1309 }
1310 
1311 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1312    source function, map that to the duplicate EH region number in
1313    the destination function.  */
1314 
1315 static int
1316 remap_eh_region_nr (int old_nr, copy_body_data *id)
1317 {
1318   eh_region old_r, new_r;
1319 
1320   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1321   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1322 
1323   return new_r->index;
1324 }
1325 
1326 /* Similar, but operate on INTEGER_CSTs.  */
1327 
1328 static tree
1329 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1330 {
1331   int old_nr, new_nr;
1332 
1333   old_nr = tree_to_shwi (old_t_nr);
1334   new_nr = remap_eh_region_nr (old_nr, id);
1335 
1336   return build_int_cst (integer_type_node, new_nr);
1337 }
1338 
1339 /* Helper for copy_bb.  Remap statement STMT using the inlining
1340    information in ID.  Return the new statement copy.  */
1341 
1342 static gimple_seq
1343 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1344 {
1345   gimple *copy = NULL;
1346   struct walk_stmt_info wi;
1347   bool skip_first = false;
1348   gimple_seq stmts = NULL;
1349 
1350   if (is_gimple_debug (stmt)
1351       && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1352     return stmts;
1353 
1354   /* Begin by recognizing trees that we'll completely rewrite for the
1355      inlining context.  Our output for these trees is completely
1356      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1357      into an edge).  Further down, we'll handle trees that get
1358      duplicated and/or tweaked.  */
1359 
1360   /* When requested, GIMPLE_RETURNs should be transformed to just the
1361      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1362      be handled elsewhere by manipulating the CFG rather than the
1363      statement.  */
1364   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1365     {
1366       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1367       tree retbnd = gimple_return_retbnd (stmt);
1368       tree bndslot = id->retbnd;
1369 
1370       if (retbnd && bndslot)
1371 	{
1372 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1373 	  memset (&wi, 0, sizeof (wi));
1374 	  wi.info = id;
1375 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1376 	  gimple_seq_add_stmt (&stmts, bndcopy);
1377 	}
1378 
1379       /* If we're returning something, just turn that into an
1380 	 assignment into the equivalent of the original RESULT_DECL.
1381 	 If RETVAL is just the result decl, the result decl has
1382 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1383 	 just toss the entire GIMPLE_RETURN.  */
1384       if (retval
1385 	  && (TREE_CODE (retval) != RESULT_DECL
1386 	      && (TREE_CODE (retval) != SSA_NAME
1387 		  || ! SSA_NAME_VAR (retval)
1388 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1389         {
1390 	  copy = gimple_build_assign (id->do_not_unshare
1391 				      ? id->retvar : unshare_expr (id->retvar),
1392 				      retval);
1393 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1394 	  skip_first = true;
1395 
1396 	  /* We need to copy bounds if return structure with pointers into
1397 	     instrumented function.  */
1398 	  if (chkp_function_instrumented_p (id->dst_fn)
1399 	      && !bndslot
1400 	      && !BOUNDED_P (id->retvar)
1401 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1402 	    id->assign_stmts.safe_push (copy);
1403 
1404 	}
1405       else
1406 	return stmts;
1407     }
1408   else if (gimple_has_substatements (stmt))
1409     {
1410       gimple_seq s1, s2;
1411 
1412       /* When cloning bodies from the C++ front end, we will be handed bodies
1413 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1414 	 have embedded statements.  */
1415       switch (gimple_code (stmt))
1416 	{
1417 	case GIMPLE_BIND:
1418 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1419 	  break;
1420 
1421 	case GIMPLE_CATCH:
1422 	  {
1423 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1424 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1425 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1426 	  }
1427 	  break;
1428 
1429 	case GIMPLE_EH_FILTER:
1430 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1431 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1432 	  break;
1433 
1434 	case GIMPLE_TRY:
1435 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1436 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1437 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1438 	  break;
1439 
1440 	case GIMPLE_WITH_CLEANUP_EXPR:
1441 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1442 	  copy = gimple_build_wce (s1);
1443 	  break;
1444 
1445 	case GIMPLE_OMP_PARALLEL:
1446 	  {
1447 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1448 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1449 	    copy = gimple_build_omp_parallel
1450 	             (s1,
1451 		      gimple_omp_parallel_clauses (omp_par_stmt),
1452 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1453 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1454 	  }
1455 	  break;
1456 
1457 	case GIMPLE_OMP_TASK:
1458 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1459 	  copy = gimple_build_omp_task
1460 	           (s1,
1461 		    gimple_omp_task_clauses (stmt),
1462 		    gimple_omp_task_child_fn (stmt),
1463 		    gimple_omp_task_data_arg (stmt),
1464 		    gimple_omp_task_copy_fn (stmt),
1465 		    gimple_omp_task_arg_size (stmt),
1466 		    gimple_omp_task_arg_align (stmt));
1467 	  break;
1468 
1469 	case GIMPLE_OMP_FOR:
1470 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1471 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1472 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1473 				       gimple_omp_for_clauses (stmt),
1474 				       gimple_omp_for_collapse (stmt), s2);
1475 	  {
1476 	    size_t i;
1477 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1478 	      {
1479 		gimple_omp_for_set_index (copy, i,
1480 					  gimple_omp_for_index (stmt, i));
1481 		gimple_omp_for_set_initial (copy, i,
1482 					    gimple_omp_for_initial (stmt, i));
1483 		gimple_omp_for_set_final (copy, i,
1484 					  gimple_omp_for_final (stmt, i));
1485 		gimple_omp_for_set_incr (copy, i,
1486 					 gimple_omp_for_incr (stmt, i));
1487 		gimple_omp_for_set_cond (copy, i,
1488 					 gimple_omp_for_cond (stmt, i));
1489 	      }
1490 	  }
1491 	  break;
1492 
1493 	case GIMPLE_OMP_MASTER:
1494 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1495 	  copy = gimple_build_omp_master (s1);
1496 	  break;
1497 
1498 	case GIMPLE_OMP_TASKGROUP:
1499 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1500 	  copy = gimple_build_omp_taskgroup (s1);
1501 	  break;
1502 
1503 	case GIMPLE_OMP_ORDERED:
1504 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1505 	  copy = gimple_build_omp_ordered
1506 		   (s1,
1507 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1508 	  break;
1509 
1510 	case GIMPLE_OMP_SECTION:
1511 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1512 	  copy = gimple_build_omp_section (s1);
1513 	  break;
1514 
1515 	case GIMPLE_OMP_SECTIONS:
1516 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1517 	  copy = gimple_build_omp_sections
1518 	           (s1, gimple_omp_sections_clauses (stmt));
1519 	  break;
1520 
1521 	case GIMPLE_OMP_SINGLE:
1522 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 	  copy = gimple_build_omp_single
1524 	           (s1, gimple_omp_single_clauses (stmt));
1525 	  break;
1526 
1527 	case GIMPLE_OMP_TARGET:
1528 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 	  copy = gimple_build_omp_target
1530 		   (s1, gimple_omp_target_kind (stmt),
1531 		    gimple_omp_target_clauses (stmt));
1532 	  break;
1533 
1534 	case GIMPLE_OMP_TEAMS:
1535 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1536 	  copy = gimple_build_omp_teams
1537 		   (s1, gimple_omp_teams_clauses (stmt));
1538 	  break;
1539 
1540 	case GIMPLE_OMP_CRITICAL:
1541 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1542 	  copy = gimple_build_omp_critical (s1,
1543 					    gimple_omp_critical_name
1544 					      (as_a <gomp_critical *> (stmt)),
1545 					    gimple_omp_critical_clauses
1546 					      (as_a <gomp_critical *> (stmt)));
1547 	  break;
1548 
1549 	case GIMPLE_TRANSACTION:
1550 	  {
1551 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1552 	    gtransaction *new_trans_stmt;
1553 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1554 				   id);
1555 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1556 	    gimple_transaction_set_subcode (new_trans_stmt,
1557 	      gimple_transaction_subcode (old_trans_stmt));
1558 	    gimple_transaction_set_label_norm (new_trans_stmt,
1559 	      gimple_transaction_label_norm (old_trans_stmt));
1560 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1561 	      gimple_transaction_label_uninst (old_trans_stmt));
1562 	    gimple_transaction_set_label_over (new_trans_stmt,
1563 	      gimple_transaction_label_over (old_trans_stmt));
1564 	  }
1565 	  break;
1566 
1567 	default:
1568 	  gcc_unreachable ();
1569 	}
1570     }
1571   else
1572     {
1573       if (gimple_assign_copy_p (stmt)
1574 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1575 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1576 	{
1577 	  /* Here we handle statements that are not completely rewritten.
1578 	     First we detect some inlining-induced bogosities for
1579 	     discarding.  */
1580 
1581 	  /* Some assignments VAR = VAR; don't generate any rtl code
1582 	     and thus don't count as variable modification.  Avoid
1583 	     keeping bogosities like 0 = 0.  */
1584 	  tree decl = gimple_assign_lhs (stmt), value;
1585 	  tree *n;
1586 
1587 	  n = id->decl_map->get (decl);
1588 	  if (n)
1589 	    {
1590 	      value = *n;
1591 	      STRIP_TYPE_NOPS (value);
1592 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1593 		return NULL;
1594 	    }
1595 	}
1596 
1597       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1598 	 in a block that we aren't copying during tree_function_versioning,
1599 	 just drop the clobber stmt.  */
1600       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1601 	{
1602 	  tree lhs = gimple_assign_lhs (stmt);
1603 	  if (TREE_CODE (lhs) == MEM_REF
1604 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1605 	    {
1606 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1607 	      if (gimple_bb (def_stmt)
1608 		  && !bitmap_bit_p (id->blocks_to_copy,
1609 				    gimple_bb (def_stmt)->index))
1610 		return NULL;
1611 	    }
1612 	}
1613 
1614       if (gimple_debug_bind_p (stmt))
1615 	{
1616 	  gdebug *copy
1617 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1618 				       gimple_debug_bind_get_value (stmt),
1619 				       stmt);
1620 	  id->debug_stmts.safe_push (copy);
1621 	  gimple_seq_add_stmt (&stmts, copy);
1622 	  return stmts;
1623 	}
1624       if (gimple_debug_source_bind_p (stmt))
1625 	{
1626 	  gdebug *copy = gimple_build_debug_source_bind
1627 	                   (gimple_debug_source_bind_get_var (stmt),
1628 			    gimple_debug_source_bind_get_value (stmt),
1629 			    stmt);
1630 	  id->debug_stmts.safe_push (copy);
1631 	  gimple_seq_add_stmt (&stmts, copy);
1632 	  return stmts;
1633 	}
1634 
1635       /* Create a new deep copy of the statement.  */
1636       copy = gimple_copy (stmt);
1637 
1638       /* Clear flags that need revisiting.  */
1639       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1640         {
1641 	  if (gimple_call_tail_p (call_stmt))
1642 	    gimple_call_set_tail (call_stmt, false);
1643 	  if (gimple_call_from_thunk_p (call_stmt))
1644 	    gimple_call_set_from_thunk (call_stmt, false);
1645 	  if (gimple_call_internal_p (call_stmt))
1646 	    switch (gimple_call_internal_fn (call_stmt))
1647 	      {
1648 	      case IFN_GOMP_SIMD_LANE:
1649 	      case IFN_GOMP_SIMD_VF:
1650 	      case IFN_GOMP_SIMD_LAST_LANE:
1651 	      case IFN_GOMP_SIMD_ORDERED_START:
1652 	      case IFN_GOMP_SIMD_ORDERED_END:
1653 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1654 	        break;
1655 	      default:
1656 		break;
1657 	      }
1658 	}
1659 
1660       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1661 	 RESX and EH_DISPATCH.  */
1662       if (id->eh_map)
1663 	switch (gimple_code (copy))
1664 	  {
1665 	  case GIMPLE_CALL:
1666 	    {
1667 	      tree r, fndecl = gimple_call_fndecl (copy);
1668 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1669 		switch (DECL_FUNCTION_CODE (fndecl))
1670 		  {
1671 		  case BUILT_IN_EH_COPY_VALUES:
1672 		    r = gimple_call_arg (copy, 1);
1673 		    r = remap_eh_region_tree_nr (r, id);
1674 		    gimple_call_set_arg (copy, 1, r);
1675 		    /* FALLTHRU */
1676 
1677 		  case BUILT_IN_EH_POINTER:
1678 		  case BUILT_IN_EH_FILTER:
1679 		    r = gimple_call_arg (copy, 0);
1680 		    r = remap_eh_region_tree_nr (r, id);
1681 		    gimple_call_set_arg (copy, 0, r);
1682 		    break;
1683 
1684 		  default:
1685 		    break;
1686 		  }
1687 
1688 	      /* Reset alias info if we didn't apply measures to
1689 		 keep it valid over inlining by setting DECL_PT_UID.  */
1690 	      if (!id->src_cfun->gimple_df
1691 		  || !id->src_cfun->gimple_df->ipa_pta)
1692 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1693 	    }
1694 	    break;
1695 
1696 	  case GIMPLE_RESX:
1697 	    {
1698 	      gresx *resx_stmt = as_a <gresx *> (copy);
1699 	      int r = gimple_resx_region (resx_stmt);
1700 	      r = remap_eh_region_nr (r, id);
1701 	      gimple_resx_set_region (resx_stmt, r);
1702 	    }
1703 	    break;
1704 
1705 	  case GIMPLE_EH_DISPATCH:
1706 	    {
1707 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1708 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1709 	      r = remap_eh_region_nr (r, id);
1710 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1711 	    }
1712 	    break;
1713 
1714 	  default:
1715 	    break;
1716 	  }
1717     }
1718 
1719   /* If STMT has a block defined, map it to the newly constructed
1720      block.  */
1721   if (gimple_block (copy))
1722     {
1723       tree *n;
1724       n = id->decl_map->get (gimple_block (copy));
1725       gcc_assert (n);
1726       gimple_set_block (copy, *n);
1727     }
1728 
1729   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1730     {
1731       gimple_seq_add_stmt (&stmts, copy);
1732       return stmts;
1733     }
1734 
1735   /* Remap all the operands in COPY.  */
1736   memset (&wi, 0, sizeof (wi));
1737   wi.info = id;
1738   if (skip_first)
1739     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1740   else
1741     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1742 
1743   /* Clear the copied virtual operands.  We are not remapping them here
1744      but are going to recreate them from scratch.  */
1745   if (gimple_has_mem_ops (copy))
1746     {
1747       gimple_set_vdef (copy, NULL_TREE);
1748       gimple_set_vuse (copy, NULL_TREE);
1749     }
1750 
1751   gimple_seq_add_stmt (&stmts, copy);
1752   return stmts;
1753 }
1754 
1755 
1756 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1757    later  */
1758 
1759 static basic_block
1760 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1761          gcov_type count_scale)
1762 {
1763   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1764   basic_block copy_basic_block;
1765   tree decl;
1766   gcov_type freq;
1767   basic_block prev;
1768 
1769   /* Search for previous copied basic block.  */
1770   prev = bb->prev_bb;
1771   while (!prev->aux)
1772     prev = prev->prev_bb;
1773 
1774   /* create_basic_block() will append every new block to
1775      basic_block_info automatically.  */
1776   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1777   copy_basic_block->count = apply_scale (bb->count, count_scale);
1778 
1779   /* We are going to rebuild frequencies from scratch.  These values
1780      have just small importance to drive canonicalize_loop_headers.  */
1781   freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1782 
1783   /* We recompute frequencies after inlining, so this is quite safe.  */
1784   if (freq > BB_FREQ_MAX)
1785     freq = BB_FREQ_MAX;
1786   copy_basic_block->frequency = freq;
1787 
1788   copy_gsi = gsi_start_bb (copy_basic_block);
1789 
1790   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1791     {
1792       gimple_seq stmts;
1793       gimple *stmt = gsi_stmt (gsi);
1794       gimple *orig_stmt = stmt;
1795       gimple_stmt_iterator stmts_gsi;
1796       bool stmt_added = false;
1797 
1798       id->regimplify = false;
1799       stmts = remap_gimple_stmt (stmt, id);
1800 
1801       if (gimple_seq_empty_p (stmts))
1802 	continue;
1803 
1804       seq_gsi = copy_gsi;
1805 
1806       for (stmts_gsi = gsi_start (stmts);
1807 	   !gsi_end_p (stmts_gsi); )
1808 	{
1809 	  stmt = gsi_stmt (stmts_gsi);
1810 
1811 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1812 	  gsi_next (&stmts_gsi);
1813 
1814 	  if (gimple_nop_p (stmt))
1815 	      continue;
1816 
1817 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1818 					    orig_stmt);
1819 
1820 	  /* With return slot optimization we can end up with
1821 	     non-gimple (foo *)&this->m, fix that here.  */
1822 	  if (is_gimple_assign (stmt)
1823 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1824 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1825 	    {
1826 	      tree new_rhs;
1827 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1828 						  gimple_assign_rhs1 (stmt),
1829 						  true, NULL, false,
1830 						  GSI_CONTINUE_LINKING);
1831 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1832 	      id->regimplify = false;
1833 	    }
1834 
1835 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1836 
1837 	  if (id->regimplify)
1838 	    gimple_regimplify_operands (stmt, &seq_gsi);
1839 
1840 	  stmt_added = true;
1841 	}
1842 
1843       if (!stmt_added)
1844 	continue;
1845 
1846       /* If copy_basic_block has been empty at the start of this iteration,
1847 	 call gsi_start_bb again to get at the newly added statements.  */
1848       if (gsi_end_p (copy_gsi))
1849 	copy_gsi = gsi_start_bb (copy_basic_block);
1850       else
1851 	gsi_next (&copy_gsi);
1852 
1853       /* Process the new statement.  The call to gimple_regimplify_operands
1854 	 possibly turned the statement into multiple statements, we
1855 	 need to process all of them.  */
1856       do
1857 	{
1858 	  tree fn;
1859 	  gcall *call_stmt;
1860 
1861 	  stmt = gsi_stmt (copy_gsi);
1862 	  call_stmt = dyn_cast <gcall *> (stmt);
1863 	  if (call_stmt
1864 	      && gimple_call_va_arg_pack_p (call_stmt)
1865 	      && id->call_stmt
1866 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1867 	    {
1868 	      /* __builtin_va_arg_pack () should be replaced by
1869 		 all arguments corresponding to ... in the caller.  */
1870 	      tree p;
1871 	      gcall *new_call;
1872 	      vec<tree> argarray;
1873 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1874 	      size_t n, i, nargs_to_copy;
1875 	      bool remove_bounds = false;
1876 
1877 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1878 		nargs--;
1879 
1880 	      /* Bounds should be removed from arg pack in case
1881 		 we handle not instrumented call in instrumented
1882 		 function.  */
1883 	      nargs_to_copy = nargs;
1884 	      if (gimple_call_with_bounds_p (id->call_stmt)
1885 		  && !gimple_call_with_bounds_p (stmt))
1886 		{
1887 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1888 		       i < gimple_call_num_args (id->call_stmt);
1889 		       i++)
1890 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1891 		      nargs_to_copy--;
1892 		  remove_bounds = true;
1893 		}
1894 
1895 	      /* Create the new array of arguments.  */
1896 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1897 	      argarray.create (n);
1898 	      argarray.safe_grow_cleared (n);
1899 
1900 	      /* Copy all the arguments before '...'  */
1901 	      memcpy (argarray.address (),
1902 		      gimple_call_arg_ptr (call_stmt, 0),
1903 		      gimple_call_num_args (call_stmt) * sizeof (tree));
1904 
1905 	      if (remove_bounds)
1906 		{
1907 		  /* Append the rest of arguments removing bounds.  */
1908 		  unsigned cur = gimple_call_num_args (call_stmt);
1909 		  i = gimple_call_num_args (id->call_stmt) - nargs;
1910 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1911 		       i < gimple_call_num_args (id->call_stmt);
1912 		       i++)
1913 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1914 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1915 		  gcc_assert (cur == n);
1916 		}
1917 	      else
1918 		{
1919 		  /* Append the arguments passed in '...'  */
1920 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1921 			  gimple_call_arg_ptr (id->call_stmt, 0)
1922 			  + (gimple_call_num_args (id->call_stmt) - nargs),
1923 			  nargs * sizeof (tree));
1924 		}
1925 
1926 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1927 						argarray);
1928 
1929 	      argarray.release ();
1930 
1931 	      /* Copy all GIMPLE_CALL flags, location and block, except
1932 		 GF_CALL_VA_ARG_PACK.  */
1933 	      gimple_call_copy_flags (new_call, call_stmt);
1934 	      gimple_call_set_va_arg_pack (new_call, false);
1935 	      gimple_set_location (new_call, gimple_location (stmt));
1936 	      gimple_set_block (new_call, gimple_block (stmt));
1937 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1938 
1939 	      gsi_replace (&copy_gsi, new_call, false);
1940 	      stmt = new_call;
1941 	    }
1942 	  else if (call_stmt
1943 		   && id->call_stmt
1944 		   && (decl = gimple_call_fndecl (stmt))
1945 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1946 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1947 	    {
1948 	      /* __builtin_va_arg_pack_len () should be replaced by
1949 		 the number of anonymous arguments.  */
1950 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
1951 	      tree count, p;
1952 	      gimple *new_stmt;
1953 
1954 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1955 		nargs--;
1956 
1957 	      /* For instrumented calls we should ignore bounds.  */
1958 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
1959 		   i < gimple_call_num_args (id->call_stmt);
1960 		   i++)
1961 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1962 		  nargs--;
1963 
1964 	      if (!gimple_call_lhs (stmt))
1965 		{
1966 		  /* Drop unused calls.  */
1967 		  gsi_remove (&copy_gsi, false);
1968 		  continue;
1969 		}
1970 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1971 		{
1972 		  count = build_int_cst (integer_type_node, nargs);
1973 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1974 		  gsi_replace (&copy_gsi, new_stmt, false);
1975 		  stmt = new_stmt;
1976 		}
1977 	      else if (nargs != 0)
1978 		{
1979 		  tree newlhs;
1980 		  if (gimple_in_ssa_p (cfun))
1981 		    newlhs = make_ssa_name (integer_type_node, NULL);
1982 		  else
1983 		    newlhs = create_tmp_reg (integer_type_node);
1984 		  count = build_int_cst (integer_type_node, nargs);
1985 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1986 						  PLUS_EXPR, newlhs, count);
1987 		  gimple_call_set_lhs (stmt, newlhs);
1988 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1989 		}
1990 	    }
1991 	  else if (call_stmt
1992 		   && id->call_stmt
1993 		   && gimple_call_internal_p (stmt)
1994 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1995 	    {
1996 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
1997 	      gsi_remove (&copy_gsi, false);
1998 	      continue;
1999 	    }
2000 
2001 	  /* Statements produced by inlining can be unfolded, especially
2002 	     when we constant propagated some operands.  We can't fold
2003 	     them right now for two reasons:
2004 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2005 	     2) we can't change function calls to builtins.
2006 	     So we just mark statement for later folding.  We mark
2007 	     all new statements, instead just statements that has changed
2008 	     by some nontrivial substitution so even statements made
2009 	     foldable indirectly are updated.  If this turns out to be
2010 	     expensive, copy_body can be told to watch for nontrivial
2011 	     changes.  */
2012 	  if (id->statements_to_fold)
2013 	    id->statements_to_fold->add (stmt);
2014 
2015 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2016 	     callgraph edges and update or duplicate them.  */
2017 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2018 	    {
2019 	      struct cgraph_edge *edge;
2020 
2021 	      switch (id->transform_call_graph_edges)
2022 		{
2023 		case CB_CGE_DUPLICATE:
2024 		  edge = id->src_node->get_edge (orig_stmt);
2025 		  if (edge)
2026 		    {
2027 		      int edge_freq = edge->frequency;
2028 		      int new_freq;
2029 		      struct cgraph_edge *old_edge = edge;
2030 		      edge = edge->clone (id->dst_node, call_stmt,
2031 					  gimple_uid (stmt),
2032 					  REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2033 					  true);
2034 		      /* We could also just rescale the frequency, but
2035 		         doing so would introduce roundoff errors and make
2036 			 verifier unhappy.  */
2037 		      new_freq  = compute_call_stmt_bb_frequency (id->dst_node->decl,
2038 								  copy_basic_block);
2039 
2040 		      /* Speculative calls consist of two edges - direct and indirect.
2041 			 Duplicate the whole thing and distribute frequencies accordingly.  */
2042 		      if (edge->speculative)
2043 			{
2044 			  struct cgraph_edge *direct, *indirect;
2045 			  struct ipa_ref *ref;
2046 
2047 			  gcc_assert (!edge->indirect_unknown_callee);
2048 			  old_edge->speculative_call_info (direct, indirect, ref);
2049 			  indirect = indirect->clone (id->dst_node, call_stmt,
2050 						      gimple_uid (stmt),
2051 						      REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2052 						      true);
2053 			  if (old_edge->frequency + indirect->frequency)
2054 			    {
2055 			      edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2056 						           (old_edge->frequency + indirect->frequency)),
2057 						     CGRAPH_FREQ_MAX);
2058 			      indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2059 							       (old_edge->frequency + indirect->frequency)),
2060 							 CGRAPH_FREQ_MAX);
2061 			    }
2062 			  id->dst_node->clone_reference (ref, stmt);
2063 			}
2064 		      else
2065 			{
2066 			  edge->frequency = new_freq;
2067 			  if (dump_file
2068 			      && profile_status_for_fn (cfun) != PROFILE_ABSENT
2069 			      && (edge_freq > edge->frequency + 10
2070 				  || edge_freq < edge->frequency - 10))
2071 			    {
2072 			      fprintf (dump_file, "Edge frequency estimated by "
2073 				       "cgraph %i diverge from inliner's estimate %i\n",
2074 				       edge_freq,
2075 				       edge->frequency);
2076 			      fprintf (dump_file,
2077 				       "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2078 				       bb->index,
2079 				       bb->frequency,
2080 				       copy_basic_block->frequency);
2081 			    }
2082 			}
2083 		    }
2084 		  break;
2085 
2086 		case CB_CGE_MOVE_CLONES:
2087 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2088 								call_stmt);
2089 		  edge = id->dst_node->get_edge (stmt);
2090 		  break;
2091 
2092 		case CB_CGE_MOVE:
2093 		  edge = id->dst_node->get_edge (orig_stmt);
2094 		  if (edge)
2095 		    edge->set_call_stmt (call_stmt);
2096 		  break;
2097 
2098 		default:
2099 		  gcc_unreachable ();
2100 		}
2101 
2102 	      /* Constant propagation on argument done during inlining
2103 		 may create new direct call.  Produce an edge for it.  */
2104 	      if ((!edge
2105 		   || (edge->indirect_inlining_edge
2106 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2107 		  && id->dst_node->definition
2108 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2109 		{
2110 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2111 
2112 		  /* We have missing edge in the callgraph.  This can happen
2113 		     when previous inlining turned an indirect call into a
2114 		     direct call by constant propagating arguments or we are
2115 		     producing dead clone (for further cloning).  In all
2116 		     other cases we hit a bug (incorrect node sharing is the
2117 		     most common reason for missing edges).  */
2118 		  gcc_assert (!dest->definition
2119 			      || dest->address_taken
2120 		  	      || !id->src_node->definition
2121 			      || !id->dst_node->definition);
2122 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2123 		    id->dst_node->create_edge_including_clones
2124 		      (dest, orig_stmt, call_stmt, bb->count,
2125 		       compute_call_stmt_bb_frequency (id->dst_node->decl,
2126 		       				       copy_basic_block),
2127 		       CIF_ORIGINALLY_INDIRECT_CALL);
2128 		  else
2129 		    id->dst_node->create_edge (dest, call_stmt,
2130 					bb->count,
2131 					compute_call_stmt_bb_frequency
2132 					  (id->dst_node->decl,
2133 					   copy_basic_block))->inline_failed
2134 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2135 		  if (dump_file)
2136 		    {
2137 		      fprintf (dump_file, "Created new direct edge to %s\n",
2138 			       dest->name ());
2139 		    }
2140 		}
2141 
2142 	      notice_special_calls (as_a <gcall *> (stmt));
2143 	    }
2144 
2145 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2146 				      id->eh_map, id->eh_lp_nr);
2147 
2148 	  gsi_next (&copy_gsi);
2149 	}
2150       while (!gsi_end_p (copy_gsi));
2151 
2152       copy_gsi = gsi_last_bb (copy_basic_block);
2153     }
2154 
2155   return copy_basic_block;
2156 }
2157 
2158 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2159    form is quite easy, since dominator relationship for old basic blocks does
2160    not change.
2161 
2162    There is however exception where inlining might change dominator relation
2163    across EH edges from basic block within inlined functions destinating
2164    to landing pads in function we inline into.
2165 
2166    The function fills in PHI_RESULTs of such PHI nodes if they refer
2167    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2168    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2169    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2170    set, and this means that there will be no overlapping live ranges
2171    for the underlying symbol.
2172 
2173    This might change in future if we allow redirecting of EH edges and
2174    we might want to change way build CFG pre-inlining to include
2175    all the possible edges then.  */
2176 static void
2177 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2178 				  bool can_throw, bool nonlocal_goto)
2179 {
2180   edge e;
2181   edge_iterator ei;
2182 
2183   FOR_EACH_EDGE (e, ei, bb->succs)
2184     if (!e->dest->aux
2185 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2186       {
2187 	gphi *phi;
2188 	gphi_iterator si;
2189 
2190 	if (!nonlocal_goto)
2191 	  gcc_assert (e->flags & EDGE_EH);
2192 
2193 	if (!can_throw)
2194 	  gcc_assert (!(e->flags & EDGE_EH));
2195 
2196 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2197 	  {
2198 	    edge re;
2199 
2200 	    phi = si.phi ();
2201 
2202 	    /* For abnormal goto/call edges the receiver can be the
2203 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2204 
2205 	    gcc_assert ((e->flags & EDGE_EH)
2206 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2207 
2208 	    re = find_edge (ret_bb, e->dest);
2209 	    gcc_checking_assert (re);
2210 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2211 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2212 
2213 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2214 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2215 	  }
2216       }
2217 }
2218 
2219 
2220 /* Copy edges from BB into its copy constructed earlier, scale profile
2221    accordingly.  Edges will be taken care of later.  Assume aux
2222    pointers to point to the copies of each BB.  Return true if any
2223    debug stmts are left after a statement that must end the basic block.  */
2224 
2225 static bool
2226 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2227 		   basic_block abnormal_goto_dest)
2228 {
2229   basic_block new_bb = (basic_block) bb->aux;
2230   edge_iterator ei;
2231   edge old_edge;
2232   gimple_stmt_iterator si;
2233   int flags;
2234   bool need_debug_cleanup = false;
2235 
2236   /* Use the indices from the original blocks to create edges for the
2237      new ones.  */
2238   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2239     if (!(old_edge->flags & EDGE_EH))
2240       {
2241 	edge new_edge;
2242 
2243 	flags = old_edge->flags;
2244 
2245 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2246 	if (old_edge->dest->index == EXIT_BLOCK
2247 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2248 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2249 	  flags |= EDGE_FALLTHRU;
2250 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2251 	new_edge->count = apply_scale (old_edge->count, count_scale);
2252 	new_edge->probability = old_edge->probability;
2253       }
2254 
2255   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2256     return false;
2257 
2258   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2259     {
2260       gimple *copy_stmt;
2261       bool can_throw, nonlocal_goto;
2262 
2263       copy_stmt = gsi_stmt (si);
2264       if (!is_gimple_debug (copy_stmt))
2265 	update_stmt (copy_stmt);
2266 
2267       /* Do this before the possible split_block.  */
2268       gsi_next (&si);
2269 
2270       /* If this tree could throw an exception, there are two
2271          cases where we need to add abnormal edge(s): the
2272          tree wasn't in a region and there is a "current
2273          region" in the caller; or the original tree had
2274          EH edges.  In both cases split the block after the tree,
2275          and add abnormal edge(s) as needed; we need both
2276          those from the callee and the caller.
2277          We check whether the copy can throw, because the const
2278          propagation can change an INDIRECT_REF which throws
2279          into a COMPONENT_REF which doesn't.  If the copy
2280          can throw, the original could also throw.  */
2281       can_throw = stmt_can_throw_internal (copy_stmt);
2282       nonlocal_goto
2283 	= (stmt_can_make_abnormal_goto (copy_stmt)
2284 	   && !computed_goto_p (copy_stmt));
2285 
2286       if (can_throw || nonlocal_goto)
2287 	{
2288 	  if (!gsi_end_p (si))
2289 	    {
2290 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2291 		gsi_next (&si);
2292 	      if (gsi_end_p (si))
2293 		need_debug_cleanup = true;
2294 	    }
2295 	  if (!gsi_end_p (si))
2296 	    /* Note that bb's predecessor edges aren't necessarily
2297 	       right at this point; split_block doesn't care.  */
2298 	    {
2299 	      edge e = split_block (new_bb, copy_stmt);
2300 
2301 	      new_bb = e->dest;
2302 	      new_bb->aux = e->src->aux;
2303 	      si = gsi_start_bb (new_bb);
2304 	    }
2305 	}
2306 
2307       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2308 	make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2309       else if (can_throw)
2310 	make_eh_edges (copy_stmt);
2311 
2312       /* If the call we inline cannot make abnormal goto do not add
2313          additional abnormal edges but only retain those already present
2314 	 in the original function body.  */
2315       if (abnormal_goto_dest == NULL)
2316 	nonlocal_goto = false;
2317       if (nonlocal_goto)
2318 	{
2319 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2320 
2321 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2322 	    nonlocal_goto = false;
2323 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2324 	     in OpenMP regions which aren't allowed to be left abnormally.
2325 	     So, no need to add abnormal edge in that case.  */
2326 	  else if (is_gimple_call (copy_stmt)
2327 		   && gimple_call_internal_p (copy_stmt)
2328 		   && (gimple_call_internal_fn (copy_stmt)
2329 		       == IFN_ABNORMAL_DISPATCHER)
2330 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2331 	    nonlocal_goto = false;
2332 	  else
2333 	    make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2334 	}
2335 
2336       if ((can_throw || nonlocal_goto)
2337 	  && gimple_in_ssa_p (cfun))
2338 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2339 					  can_throw, nonlocal_goto);
2340     }
2341   return need_debug_cleanup;
2342 }
2343 
2344 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2345    was possibly split and new outgoing EH edges inserted.
2346    BB points to the block of original function and AUX pointers links
2347    the original and newly copied blocks.  */
2348 
2349 static void
2350 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2351 {
2352   basic_block const new_bb = (basic_block) bb->aux;
2353   edge_iterator ei;
2354   gphi *phi;
2355   gphi_iterator si;
2356   edge new_edge;
2357   bool inserted = false;
2358 
2359   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2360     {
2361       tree res, new_res;
2362       gphi *new_phi;
2363 
2364       phi = si.phi ();
2365       res = PHI_RESULT (phi);
2366       new_res = res;
2367       if (!virtual_operand_p (res))
2368 	{
2369 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2370 	  if (EDGE_COUNT (new_bb->preds) == 0)
2371 	    {
2372 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2373 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2374 	    }
2375 	  else
2376 	    {
2377 	      new_phi = create_phi_node (new_res, new_bb);
2378 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2379 		{
2380 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2381 					     bb);
2382 		  tree arg;
2383 		  tree new_arg;
2384 		  edge_iterator ei2;
2385 		  location_t locus;
2386 
2387 		  /* When doing partial cloning, we allow PHIs on the entry
2388 		     block as long as all the arguments are the same.
2389 		     Find any input edge to see argument to copy.  */
2390 		  if (!old_edge)
2391 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2392 		      if (!old_edge->src->aux)
2393 			break;
2394 
2395 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2396 		  new_arg = arg;
2397 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2398 		  gcc_assert (new_arg);
2399 		  /* With return slot optimization we can end up with
2400 		     non-gimple (foo *)&this->m, fix that here.  */
2401 		  if (TREE_CODE (new_arg) != SSA_NAME
2402 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2403 		      && !is_gimple_val (new_arg))
2404 		    {
2405 		      gimple_seq stmts = NULL;
2406 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2407 						      NULL);
2408 		      gsi_insert_seq_on_edge (new_edge, stmts);
2409 		      inserted = true;
2410 		    }
2411 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2412 		  if (LOCATION_BLOCK (locus))
2413 		    {
2414 		      tree *n;
2415 		      n = id->decl_map->get (LOCATION_BLOCK (locus));
2416 		      gcc_assert (n);
2417 		      locus = set_block (locus, *n);
2418 		    }
2419 		  else
2420 		    locus = LOCATION_LOCUS (locus);
2421 
2422 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2423 		}
2424 	    }
2425 	}
2426     }
2427 
2428   /* Commit the delayed edge insertions.  */
2429   if (inserted)
2430     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2431       gsi_commit_one_edge_insert (new_edge, NULL);
2432 }
2433 
2434 
2435 /* Wrapper for remap_decl so it can be used as a callback.  */
2436 
2437 static tree
2438 remap_decl_1 (tree decl, void *data)
2439 {
2440   return remap_decl (decl, (copy_body_data *) data);
2441 }
2442 
2443 /* Build struct function and associated datastructures for the new clone
2444    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2445    the cfun to the function of new_fndecl (and current_function_decl too).  */
2446 
2447 static void
2448 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2449 {
2450   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2451   gcov_type count_scale;
2452 
2453   if (!DECL_ARGUMENTS (new_fndecl))
2454     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2455   if (!DECL_RESULT (new_fndecl))
2456     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2457 
2458   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2459     count_scale
2460         = GCOV_COMPUTE_SCALE (count,
2461                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2462   else
2463     count_scale = REG_BR_PROB_BASE;
2464 
2465   /* Register specific tree functions.  */
2466   gimple_register_cfg_hooks ();
2467 
2468   /* Get clean struct function.  */
2469   push_struct_function (new_fndecl);
2470 
2471   /* We will rebuild these, so just sanity check that they are empty.  */
2472   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2473   gcc_assert (cfun->local_decls == NULL);
2474   gcc_assert (cfun->cfg == NULL);
2475   gcc_assert (cfun->decl == new_fndecl);
2476 
2477   /* Copy items we preserve during cloning.  */
2478   cfun->static_chain_decl = src_cfun->static_chain_decl;
2479   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2480   cfun->function_end_locus = src_cfun->function_end_locus;
2481   cfun->curr_properties = src_cfun->curr_properties;
2482   cfun->last_verified = src_cfun->last_verified;
2483   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2484   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2485   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2486   cfun->stdarg = src_cfun->stdarg;
2487   cfun->after_inlining = src_cfun->after_inlining;
2488   cfun->can_throw_non_call_exceptions
2489     = src_cfun->can_throw_non_call_exceptions;
2490   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2491   cfun->returns_struct = src_cfun->returns_struct;
2492   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2493 
2494   init_empty_tree_cfg ();
2495 
2496   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2497   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2498     (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2499      REG_BR_PROB_BASE);
2500   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2501     = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2502   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2503     (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2504      REG_BR_PROB_BASE);
2505   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2506     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2507   if (src_cfun->eh)
2508     init_eh_for_function ();
2509 
2510   if (src_cfun->gimple_df)
2511     {
2512       init_tree_ssa (cfun);
2513       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2514       if (cfun->gimple_df->in_ssa_p)
2515 	init_ssa_operands (cfun);
2516     }
2517 }
2518 
2519 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2520    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2521    successor has multiple predecessors, reset them, otherwise keep
2522    their value.  */
2523 
2524 static void
2525 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2526 {
2527   edge e;
2528   edge_iterator ei;
2529   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2530 
2531   if (gsi_end_p (si)
2532       || gsi_one_before_end_p (si)
2533       || !(stmt_can_throw_internal (gsi_stmt (si))
2534 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2535     return;
2536 
2537   FOR_EACH_EDGE (e, ei, new_bb->succs)
2538     {
2539       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2540       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2541       while (is_gimple_debug (gsi_stmt (ssi)))
2542 	{
2543 	  gimple *stmt = gsi_stmt (ssi);
2544 	  gdebug *new_stmt;
2545 	  tree var;
2546 	  tree value;
2547 
2548 	  /* For the last edge move the debug stmts instead of copying
2549 	     them.  */
2550 	  if (ei_one_before_end_p (ei))
2551 	    {
2552 	      si = ssi;
2553 	      gsi_prev (&ssi);
2554 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2555 		gimple_debug_bind_reset_value (stmt);
2556 	      gsi_remove (&si, false);
2557 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2558 	      continue;
2559 	    }
2560 
2561 	  if (gimple_debug_bind_p (stmt))
2562 	    {
2563 	      var = gimple_debug_bind_get_var (stmt);
2564 	      if (single_pred_p (e->dest))
2565 		{
2566 		  value = gimple_debug_bind_get_value (stmt);
2567 		  value = unshare_expr (value);
2568 		}
2569 	      else
2570 		value = NULL_TREE;
2571 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2572 	    }
2573 	  else if (gimple_debug_source_bind_p (stmt))
2574 	    {
2575 	      var = gimple_debug_source_bind_get_var (stmt);
2576 	      value = gimple_debug_source_bind_get_value (stmt);
2577 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2578 	    }
2579 	  else
2580 	    gcc_unreachable ();
2581 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2582 	  id->debug_stmts.safe_push (new_stmt);
2583 	  gsi_prev (&ssi);
2584 	}
2585     }
2586 }
2587 
2588 /* Make a copy of the sub-loops of SRC_PARENT and place them
2589    as siblings of DEST_PARENT.  */
2590 
2591 static void
2592 copy_loops (copy_body_data *id,
2593 	    struct loop *dest_parent, struct loop *src_parent)
2594 {
2595   struct loop *src_loop = src_parent->inner;
2596   while (src_loop)
2597     {
2598       if (!id->blocks_to_copy
2599 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2600 	{
2601 	  struct loop *dest_loop = alloc_loop ();
2602 
2603 	  /* Assign the new loop its header and latch and associate
2604 	     those with the new loop.  */
2605 	  dest_loop->header = (basic_block)src_loop->header->aux;
2606 	  dest_loop->header->loop_father = dest_loop;
2607 	  if (src_loop->latch != NULL)
2608 	    {
2609 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2610 	      dest_loop->latch->loop_father = dest_loop;
2611 	    }
2612 
2613 	  /* Copy loop meta-data.  */
2614 	  copy_loop_info (src_loop, dest_loop);
2615 
2616 	  /* Finally place it into the loop array and the loop tree.  */
2617 	  place_new_loop (cfun, dest_loop);
2618 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2619 
2620 	  dest_loop->safelen = src_loop->safelen;
2621 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2622 	  if (src_loop->force_vectorize)
2623 	    {
2624 	      dest_loop->force_vectorize = true;
2625 	      cfun->has_force_vectorize_loops = true;
2626 	    }
2627 	  if (src_loop->simduid)
2628 	    {
2629 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2630 	      cfun->has_simduid_loops = true;
2631 	    }
2632 
2633 	  /* Recurse.  */
2634 	  copy_loops (id, dest_loop, src_loop);
2635 	}
2636       src_loop = src_loop->next;
2637     }
2638 }
2639 
2640 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2641 
2642 void
2643 redirect_all_calls (copy_body_data * id, basic_block bb)
2644 {
2645   gimple_stmt_iterator si;
2646   gimple *last = last_stmt (bb);
2647   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2648     {
2649       gimple *stmt = gsi_stmt (si);
2650       if (is_gimple_call (stmt))
2651 	{
2652 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2653 	  if (edge)
2654 	    {
2655 	      edge->redirect_call_stmt_to_callee ();
2656 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2657 		gimple_purge_dead_eh_edges (bb);
2658 	    }
2659 	}
2660     }
2661 }
2662 
2663 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2664    with each bb's frequency. Used when NODE has a 0-weight entry
2665    but we are about to inline it into a non-zero count call bb.
2666    See the comments for handle_missing_profiles() in predict.c for
2667    when this can happen for COMDATs.  */
2668 
2669 void
2670 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2671 {
2672   basic_block bb;
2673   edge_iterator ei;
2674   edge e;
2675   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2676 
2677   FOR_ALL_BB_FN(bb, fn)
2678     {
2679       bb->count = apply_scale (count,
2680                                GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2681       FOR_EACH_EDGE (e, ei, bb->succs)
2682         e->count = apply_probability (e->src->count, e->probability);
2683     }
2684 }
2685 
2686 /* Make a copy of the body of FN so that it can be inserted inline in
2687    another function.  Walks FN via CFG, returns new fndecl.  */
2688 
2689 static tree
2690 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2691 	       basic_block entry_block_map, basic_block exit_block_map,
2692 	       basic_block new_entry)
2693 {
2694   tree callee_fndecl = id->src_fn;
2695   /* Original cfun for the callee, doesn't change.  */
2696   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2697   struct function *cfun_to_copy;
2698   basic_block bb;
2699   tree new_fndecl = NULL;
2700   bool need_debug_cleanup = false;
2701   gcov_type count_scale;
2702   int last;
2703   int incoming_frequency = 0;
2704   gcov_type incoming_count = 0;
2705 
2706   /* This can happen for COMDAT routines that end up with 0 counts
2707      despite being called (see the comments for handle_missing_profiles()
2708      in predict.c as to why). Apply counts to the blocks in the callee
2709      before inlining, using the guessed edge frequencies, so that we don't
2710      end up with a 0-count inline body which can confuse downstream
2711      optimizations such as function splitting.  */
2712   if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2713     {
2714       /* Apply the larger of the call bb count and the total incoming
2715          call edge count to the callee.  */
2716       gcov_type in_count = 0;
2717       struct cgraph_edge *in_edge;
2718       for (in_edge = id->src_node->callers; in_edge;
2719            in_edge = in_edge->next_caller)
2720         in_count += in_edge->count;
2721       freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2722     }
2723 
2724   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2725     count_scale
2726         = GCOV_COMPUTE_SCALE (count,
2727                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2728   else
2729     count_scale = REG_BR_PROB_BASE;
2730 
2731   /* Register specific tree functions.  */
2732   gimple_register_cfg_hooks ();
2733 
2734   /* If we are inlining just region of the function, make sure to connect
2735      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2736      part of loop, we must compute frequency and probability of
2737      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2738      probabilities of edges incoming from nonduplicated region.  */
2739   if (new_entry)
2740     {
2741       edge e;
2742       edge_iterator ei;
2743 
2744       FOR_EACH_EDGE (e, ei, new_entry->preds)
2745 	if (!e->src->aux)
2746 	  {
2747 	    incoming_frequency += EDGE_FREQUENCY (e);
2748 	    incoming_count += e->count;
2749 	  }
2750       incoming_count = apply_scale (incoming_count, count_scale);
2751       incoming_frequency
2752 	= apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2753       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2754       ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2755     }
2756 
2757   /* Must have a CFG here at this point.  */
2758   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2759 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2760 
2761   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2762 
2763   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2764   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2765   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2766   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2767 
2768   /* Duplicate any exception-handling regions.  */
2769   if (cfun->eh)
2770     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2771 				       remap_decl_1, id);
2772 
2773   /* Use aux pointers to map the original blocks to copy.  */
2774   FOR_EACH_BB_FN (bb, cfun_to_copy)
2775     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2776       {
2777 	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2778 	bb->aux = new_bb;
2779 	new_bb->aux = bb;
2780 	new_bb->loop_father = entry_block_map->loop_father;
2781       }
2782 
2783   last = last_basic_block_for_fn (cfun);
2784 
2785   /* Now that we've duplicated the blocks, duplicate their edges.  */
2786   basic_block abnormal_goto_dest = NULL;
2787   if (id->call_stmt
2788       && stmt_can_make_abnormal_goto (id->call_stmt))
2789     {
2790       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2791 
2792       bb = gimple_bb (id->call_stmt);
2793       gsi_next (&gsi);
2794       if (gsi_end_p (gsi))
2795 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2796     }
2797   FOR_ALL_BB_FN (bb, cfun_to_copy)
2798     if (!id->blocks_to_copy
2799 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2800       need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2801 					       abnormal_goto_dest);
2802 
2803   if (new_entry)
2804     {
2805       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2806       e->probability = REG_BR_PROB_BASE;
2807       e->count = incoming_count;
2808     }
2809 
2810   /* Duplicate the loop tree, if available and wanted.  */
2811   if (loops_for_fn (src_cfun) != NULL
2812       && current_loops != NULL)
2813     {
2814       copy_loops (id, entry_block_map->loop_father,
2815 		  get_loop (src_cfun, 0));
2816       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2817       loops_state_set (LOOPS_NEED_FIXUP);
2818     }
2819 
2820   /* If the loop tree in the source function needed fixup, mark the
2821      destination loop tree for fixup, too.  */
2822   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2823     loops_state_set (LOOPS_NEED_FIXUP);
2824 
2825   if (gimple_in_ssa_p (cfun))
2826     FOR_ALL_BB_FN (bb, cfun_to_copy)
2827       if (!id->blocks_to_copy
2828 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2829 	copy_phis_for_bb (bb, id);
2830 
2831   FOR_ALL_BB_FN (bb, cfun_to_copy)
2832     if (bb->aux)
2833       {
2834 	if (need_debug_cleanup
2835 	    && bb->index != ENTRY_BLOCK
2836 	    && bb->index != EXIT_BLOCK)
2837 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2838 	/* Update call edge destinations.  This can not be done before loop
2839 	   info is updated, because we may split basic blocks.  */
2840 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2841 	    && bb->index != ENTRY_BLOCK
2842 	    && bb->index != EXIT_BLOCK)
2843 	  redirect_all_calls (id, (basic_block)bb->aux);
2844 	((basic_block)bb->aux)->aux = NULL;
2845 	bb->aux = NULL;
2846       }
2847 
2848   /* Zero out AUX fields of newly created block during EH edge
2849      insertion. */
2850   for (; last < last_basic_block_for_fn (cfun); last++)
2851     {
2852       if (need_debug_cleanup)
2853 	maybe_move_debug_stmts_to_successors (id,
2854 					      BASIC_BLOCK_FOR_FN (cfun, last));
2855       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2856       /* Update call edge destinations.  This can not be done before loop
2857 	 info is updated, because we may split basic blocks.  */
2858       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2859 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2860     }
2861   entry_block_map->aux = NULL;
2862   exit_block_map->aux = NULL;
2863 
2864   if (id->eh_map)
2865     {
2866       delete id->eh_map;
2867       id->eh_map = NULL;
2868     }
2869   if (id->dependence_map)
2870     {
2871       delete id->dependence_map;
2872       id->dependence_map = NULL;
2873     }
2874 
2875   return new_fndecl;
2876 }
2877 
2878 /* Copy the debug STMT using ID.  We deal with these statements in a
2879    special way: if any variable in their VALUE expression wasn't
2880    remapped yet, we won't remap it, because that would get decl uids
2881    out of sync, causing codegen differences between -g and -g0.  If
2882    this arises, we drop the VALUE expression altogether.  */
2883 
2884 static void
2885 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2886 {
2887   tree t, *n;
2888   struct walk_stmt_info wi;
2889 
2890   if (gimple_block (stmt))
2891     {
2892       n = id->decl_map->get (gimple_block (stmt));
2893       gimple_set_block (stmt, n ? *n : id->block);
2894     }
2895 
2896   /* Remap all the operands in COPY.  */
2897   memset (&wi, 0, sizeof (wi));
2898   wi.info = id;
2899 
2900   processing_debug_stmt = 1;
2901 
2902   if (gimple_debug_source_bind_p (stmt))
2903     t = gimple_debug_source_bind_get_var (stmt);
2904   else
2905     t = gimple_debug_bind_get_var (stmt);
2906 
2907   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2908       && (n = id->debug_map->get (t)))
2909     {
2910       gcc_assert (VAR_P (*n));
2911       t = *n;
2912     }
2913   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2914     /* T is a non-localized variable.  */;
2915   else
2916     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2917 
2918   if (gimple_debug_bind_p (stmt))
2919     {
2920       gimple_debug_bind_set_var (stmt, t);
2921 
2922       if (gimple_debug_bind_has_value_p (stmt))
2923 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2924 		   remap_gimple_op_r, &wi, NULL);
2925 
2926       /* Punt if any decl couldn't be remapped.  */
2927       if (processing_debug_stmt < 0)
2928 	gimple_debug_bind_reset_value (stmt);
2929     }
2930   else if (gimple_debug_source_bind_p (stmt))
2931     {
2932       gimple_debug_source_bind_set_var (stmt, t);
2933       /* When inlining and source bind refers to one of the optimized
2934 	 away parameters, change the source bind into normal debug bind
2935 	 referring to the corresponding DEBUG_EXPR_DECL that should have
2936 	 been bound before the call stmt.  */
2937       t = gimple_debug_source_bind_get_value (stmt);
2938       if (t != NULL_TREE
2939 	  && TREE_CODE (t) == PARM_DECL
2940 	  && id->call_stmt)
2941 	{
2942 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2943 	  unsigned int i;
2944 	  if (debug_args != NULL)
2945 	    {
2946 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2947 		if ((**debug_args)[i] == DECL_ORIGIN (t)
2948 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2949 		  {
2950 		    t = (**debug_args)[i + 1];
2951 		    stmt->subcode = GIMPLE_DEBUG_BIND;
2952 		    gimple_debug_bind_set_value (stmt, t);
2953 		    break;
2954 		  }
2955 	    }
2956 	}
2957       if (gimple_debug_source_bind_p (stmt))
2958 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2959 		   remap_gimple_op_r, &wi, NULL);
2960     }
2961 
2962   processing_debug_stmt = 0;
2963 
2964   update_stmt (stmt);
2965 }
2966 
2967 /* Process deferred debug stmts.  In order to give values better odds
2968    of being successfully remapped, we delay the processing of debug
2969    stmts until all other stmts that might require remapping are
2970    processed.  */
2971 
2972 static void
2973 copy_debug_stmts (copy_body_data *id)
2974 {
2975   size_t i;
2976   gdebug *stmt;
2977 
2978   if (!id->debug_stmts.exists ())
2979     return;
2980 
2981   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2982     copy_debug_stmt (stmt, id);
2983 
2984   id->debug_stmts.release ();
2985 }
2986 
2987 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2988    another function.  */
2989 
2990 static tree
2991 copy_tree_body (copy_body_data *id)
2992 {
2993   tree fndecl = id->src_fn;
2994   tree body = DECL_SAVED_TREE (fndecl);
2995 
2996   walk_tree (&body, copy_tree_body_r, id, NULL);
2997 
2998   return body;
2999 }
3000 
3001 /* Make a copy of the body of FN so that it can be inserted inline in
3002    another function.  */
3003 
3004 static tree
3005 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
3006 	   basic_block entry_block_map, basic_block exit_block_map,
3007 	   basic_block new_entry)
3008 {
3009   tree fndecl = id->src_fn;
3010   tree body;
3011 
3012   /* If this body has a CFG, walk CFG and copy.  */
3013   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3014   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
3015 			new_entry);
3016   copy_debug_stmts (id);
3017 
3018   return body;
3019 }
3020 
3021 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3022    defined in function FN, or of a data member thereof.  */
3023 
3024 static bool
3025 self_inlining_addr_expr (tree value, tree fn)
3026 {
3027   tree var;
3028 
3029   if (TREE_CODE (value) != ADDR_EXPR)
3030     return false;
3031 
3032   var = get_base_address (TREE_OPERAND (value, 0));
3033 
3034   return var && auto_var_in_fn_p (var, fn);
3035 }
3036 
3037 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3038    lexical block and line number information from base_stmt, if given,
3039    or from the last stmt of the block otherwise.  */
3040 
3041 static gimple *
3042 insert_init_debug_bind (copy_body_data *id,
3043 			basic_block bb, tree var, tree value,
3044 			gimple *base_stmt)
3045 {
3046   gimple *note;
3047   gimple_stmt_iterator gsi;
3048   tree tracked_var;
3049 
3050   if (!gimple_in_ssa_p (id->src_cfun))
3051     return NULL;
3052 
3053   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3054     return NULL;
3055 
3056   tracked_var = target_for_debug_bind (var);
3057   if (!tracked_var)
3058     return NULL;
3059 
3060   if (bb)
3061     {
3062       gsi = gsi_last_bb (bb);
3063       if (!base_stmt && !gsi_end_p (gsi))
3064 	base_stmt = gsi_stmt (gsi);
3065     }
3066 
3067   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3068 
3069   if (bb)
3070     {
3071       if (!gsi_end_p (gsi))
3072 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3073       else
3074 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3075     }
3076 
3077   return note;
3078 }
3079 
3080 static void
3081 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3082 {
3083   /* If VAR represents a zero-sized variable, it's possible that the
3084      assignment statement may result in no gimple statements.  */
3085   if (init_stmt)
3086     {
3087       gimple_stmt_iterator si = gsi_last_bb (bb);
3088 
3089       /* We can end up with init statements that store to a non-register
3090          from a rhs with a conversion.  Handle that here by forcing the
3091 	 rhs into a temporary.  gimple_regimplify_operands is not
3092 	 prepared to do this for us.  */
3093       if (!is_gimple_debug (init_stmt)
3094 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3095 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3096 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3097 	{
3098 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3099 			     gimple_expr_type (init_stmt),
3100 			     gimple_assign_rhs1 (init_stmt));
3101 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3102 					  GSI_NEW_STMT);
3103 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3104 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3105 	}
3106       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3107       gimple_regimplify_operands (init_stmt, &si);
3108 
3109       if (!is_gimple_debug (init_stmt))
3110 	{
3111 	  tree def = gimple_assign_lhs (init_stmt);
3112 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3113 	}
3114     }
3115 }
3116 
3117 /* Initialize parameter P with VALUE.  If needed, produce init statement
3118    at the end of BB.  When BB is NULL, we return init statement to be
3119    output later.  */
3120 static gimple *
3121 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3122 		     basic_block bb, tree *vars)
3123 {
3124   gimple *init_stmt = NULL;
3125   tree var;
3126   tree rhs = value;
3127   tree def = (gimple_in_ssa_p (cfun)
3128 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3129 
3130   if (value
3131       && value != error_mark_node
3132       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3133     {
3134       /* If we can match up types by promotion/demotion do so.  */
3135       if (fold_convertible_p (TREE_TYPE (p), value))
3136 	rhs = fold_convert (TREE_TYPE (p), value);
3137       else
3138 	{
3139 	  /* ???  For valid programs we should not end up here.
3140 	     Still if we end up with truly mismatched types here, fall back
3141 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3142 	     GIMPLE to the following passes.  */
3143 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3144 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3145 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3146 	  else
3147 	    rhs = build_zero_cst (TREE_TYPE (p));
3148 	}
3149     }
3150 
3151   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3152      here since the type of this decl must be visible to the calling
3153      function.  */
3154   var = copy_decl_to_var (p, id);
3155 
3156   /* Declare this new variable.  */
3157   DECL_CHAIN (var) = *vars;
3158   *vars = var;
3159 
3160   /* Make gimplifier happy about this variable.  */
3161   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3162 
3163   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3164      we would not need to create a new variable here at all, if it
3165      weren't for debug info.  Still, we can just use the argument
3166      value.  */
3167   if (TREE_READONLY (p)
3168       && !TREE_ADDRESSABLE (p)
3169       && value && !TREE_SIDE_EFFECTS (value)
3170       && !def)
3171     {
3172       /* We may produce non-gimple trees by adding NOPs or introduce
3173 	 invalid sharing when operand is not really constant.
3174 	 It is not big deal to prohibit constant propagation here as
3175 	 we will constant propagate in DOM1 pass anyway.  */
3176       if (is_gimple_min_invariant (value)
3177 	  && useless_type_conversion_p (TREE_TYPE (p),
3178 						 TREE_TYPE (value))
3179 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3180 	     the base variable isn't a local variable of the inlined
3181 	     function, e.g., when doing recursive inlining, direct or
3182 	     mutually-recursive or whatever, which is why we don't
3183 	     just test whether fn == current_function_decl.  */
3184 	  && ! self_inlining_addr_expr (value, fn))
3185 	{
3186 	  insert_decl_map (id, p, value);
3187 	  insert_debug_decl_map (id, p, var);
3188 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3189 	}
3190     }
3191 
3192   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3193      that way, when the PARM_DECL is encountered, it will be
3194      automatically replaced by the VAR_DECL.  */
3195   insert_decl_map (id, p, var);
3196 
3197   /* Even if P was TREE_READONLY, the new VAR should not be.
3198      In the original code, we would have constructed a
3199      temporary, and then the function body would have never
3200      changed the value of P.  However, now, we will be
3201      constructing VAR directly.  The constructor body may
3202      change its value multiple times as it is being
3203      constructed.  Therefore, it must not be TREE_READONLY;
3204      the back-end assumes that TREE_READONLY variable is
3205      assigned to only once.  */
3206   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3207     TREE_READONLY (var) = 0;
3208 
3209   /* If there is no setup required and we are in SSA, take the easy route
3210      replacing all SSA names representing the function parameter by the
3211      SSA name passed to function.
3212 
3213      We need to construct map for the variable anyway as it might be used
3214      in different SSA names when parameter is set in function.
3215 
3216      Do replacement at -O0 for const arguments replaced by constant.
3217      This is important for builtin_constant_p and other construct requiring
3218      constant argument to be visible in inlined function body.  */
3219   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3220       && (optimize
3221           || (TREE_READONLY (p)
3222 	      && is_gimple_min_invariant (rhs)))
3223       && (TREE_CODE (rhs) == SSA_NAME
3224 	  || is_gimple_min_invariant (rhs))
3225       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3226     {
3227       insert_decl_map (id, def, rhs);
3228       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3229     }
3230 
3231   /* If the value of argument is never used, don't care about initializing
3232      it.  */
3233   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3234     {
3235       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3236       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3237     }
3238 
3239   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3240      the argument to the proper type in case it was promoted.  */
3241   if (value)
3242     {
3243       if (rhs == error_mark_node)
3244 	{
3245 	  insert_decl_map (id, p, var);
3246 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3247 	}
3248 
3249       STRIP_USELESS_TYPE_CONVERSION (rhs);
3250 
3251       /* If we are in SSA form properly remap the default definition
3252          or assign to a dummy SSA name if the parameter is unused and
3253 	 we are not optimizing.  */
3254       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3255 	{
3256 	  if (def)
3257 	    {
3258 	      def = remap_ssa_name (def, id);
3259 	      init_stmt = gimple_build_assign (def, rhs);
3260 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3261 	      set_ssa_default_def (cfun, var, NULL);
3262 	    }
3263 	  else if (!optimize)
3264 	    {
3265 	      def = make_ssa_name (var);
3266 	      init_stmt = gimple_build_assign (def, rhs);
3267 	    }
3268 	}
3269       else
3270         init_stmt = gimple_build_assign (var, rhs);
3271 
3272       if (bb && init_stmt)
3273         insert_init_stmt (id, bb, init_stmt);
3274     }
3275   return init_stmt;
3276 }
3277 
3278 /* Generate code to initialize the parameters of the function at the
3279    top of the stack in ID from the GIMPLE_CALL STMT.  */
3280 
3281 static void
3282 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3283 			       tree fn, basic_block bb)
3284 {
3285   tree parms;
3286   size_t i;
3287   tree p;
3288   tree vars = NULL_TREE;
3289   tree static_chain = gimple_call_chain (stmt);
3290 
3291   /* Figure out what the parameters are.  */
3292   parms = DECL_ARGUMENTS (fn);
3293 
3294   /* Loop through the parameter declarations, replacing each with an
3295      equivalent VAR_DECL, appropriately initialized.  */
3296   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3297     {
3298       tree val;
3299       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3300       setup_one_parameter (id, p, val, fn, bb, &vars);
3301     }
3302   /* After remapping parameters remap their types.  This has to be done
3303      in a second loop over all parameters to appropriately remap
3304      variable sized arrays when the size is specified in a
3305      parameter following the array.  */
3306   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3307     {
3308       tree *varp = id->decl_map->get (p);
3309       if (varp && VAR_P (*varp))
3310 	{
3311 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3312 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3313 	  tree var = *varp;
3314 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3315 	  /* Also remap the default definition if it was remapped
3316 	     to the default definition of the parameter replacement
3317 	     by the parameter setup.  */
3318 	  if (def)
3319 	    {
3320 	      tree *defp = id->decl_map->get (def);
3321 	      if (defp
3322 		  && TREE_CODE (*defp) == SSA_NAME
3323 		  && SSA_NAME_VAR (*defp) == var)
3324 		TREE_TYPE (*defp) = TREE_TYPE (var);
3325 	    }
3326 	}
3327     }
3328 
3329   /* Initialize the static chain.  */
3330   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3331   gcc_assert (fn != current_function_decl);
3332   if (p)
3333     {
3334       /* No static chain?  Seems like a bug in tree-nested.c.  */
3335       gcc_assert (static_chain);
3336 
3337       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3338     }
3339 
3340   declare_inline_vars (id->block, vars);
3341 }
3342 
3343 
3344 /* Declare a return variable to replace the RESULT_DECL for the
3345    function we are calling.  An appropriate DECL_STMT is returned.
3346    The USE_STMT is filled to contain a use of the declaration to
3347    indicate the return value of the function.
3348 
3349    RETURN_SLOT, if non-null is place where to store the result.  It
3350    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3351    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3352 
3353    RETURN_BOUNDS holds a destination for returned bounds.
3354 
3355    The return value is a (possibly null) value that holds the result
3356    as seen by the caller.  */
3357 
3358 static tree
3359 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3360 			 tree return_bounds, basic_block entry_bb)
3361 {
3362   tree callee = id->src_fn;
3363   tree result = DECL_RESULT (callee);
3364   tree callee_type = TREE_TYPE (result);
3365   tree caller_type;
3366   tree var, use;
3367 
3368   /* Handle type-mismatches in the function declaration return type
3369      vs. the call expression.  */
3370   if (modify_dest)
3371     caller_type = TREE_TYPE (modify_dest);
3372   else
3373     caller_type = TREE_TYPE (TREE_TYPE (callee));
3374 
3375   /* We don't need to do anything for functions that don't return anything.  */
3376   if (VOID_TYPE_P (callee_type))
3377     return NULL_TREE;
3378 
3379   /* If there was a return slot, then the return value is the
3380      dereferenced address of that object.  */
3381   if (return_slot)
3382     {
3383       /* The front end shouldn't have used both return_slot and
3384 	 a modify expression.  */
3385       gcc_assert (!modify_dest);
3386       if (DECL_BY_REFERENCE (result))
3387 	{
3388 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3389 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3390 
3391 	  /* We are going to construct *&return_slot and we can't do that
3392 	     for variables believed to be not addressable.
3393 
3394 	     FIXME: This check possibly can match, because values returned
3395 	     via return slot optimization are not believed to have address
3396 	     taken by alias analysis.  */
3397 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3398 	  var = return_slot_addr;
3399 	}
3400       else
3401 	{
3402 	  var = return_slot;
3403 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3404 	  if (TREE_ADDRESSABLE (result))
3405 	    mark_addressable (var);
3406 	}
3407       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3408            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3409 	  && !DECL_GIMPLE_REG_P (result)
3410 	  && DECL_P (var))
3411 	DECL_GIMPLE_REG_P (var) = 0;
3412       use = NULL;
3413       goto done;
3414     }
3415 
3416   /* All types requiring non-trivial constructors should have been handled.  */
3417   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3418 
3419   /* Attempt to avoid creating a new temporary variable.  */
3420   if (modify_dest
3421       && TREE_CODE (modify_dest) != SSA_NAME)
3422     {
3423       bool use_it = false;
3424 
3425       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3426       if (!useless_type_conversion_p (callee_type, caller_type))
3427 	use_it = false;
3428 
3429       /* ??? If we're assigning to a variable sized type, then we must
3430 	 reuse the destination variable, because we've no good way to
3431 	 create variable sized temporaries at this point.  */
3432       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3433 	use_it = true;
3434 
3435       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3436 	 reuse it as the result of the call directly.  Don't do this if
3437 	 it would promote MODIFY_DEST to addressable.  */
3438       else if (TREE_ADDRESSABLE (result))
3439 	use_it = false;
3440       else
3441 	{
3442 	  tree base_m = get_base_address (modify_dest);
3443 
3444 	  /* If the base isn't a decl, then it's a pointer, and we don't
3445 	     know where that's going to go.  */
3446 	  if (!DECL_P (base_m))
3447 	    use_it = false;
3448 	  else if (is_global_var (base_m))
3449 	    use_it = false;
3450 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3451 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3452 		   && !DECL_GIMPLE_REG_P (result)
3453 		   && DECL_GIMPLE_REG_P (base_m))
3454 	    use_it = false;
3455 	  else if (!TREE_ADDRESSABLE (base_m))
3456 	    use_it = true;
3457 	}
3458 
3459       if (use_it)
3460 	{
3461 	  var = modify_dest;
3462 	  use = NULL;
3463 	  goto done;
3464 	}
3465     }
3466 
3467   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3468 
3469   var = copy_result_decl_to_var (result, id);
3470   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3471 
3472   /* Do not have the rest of GCC warn about this variable as it should
3473      not be visible to the user.  */
3474   TREE_NO_WARNING (var) = 1;
3475 
3476   declare_inline_vars (id->block, var);
3477 
3478   /* Build the use expr.  If the return type of the function was
3479      promoted, convert it back to the expected type.  */
3480   use = var;
3481   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3482     {
3483       /* If we can match up types by promotion/demotion do so.  */
3484       if (fold_convertible_p (caller_type, var))
3485 	use = fold_convert (caller_type, var);
3486       else
3487 	{
3488 	  /* ???  For valid programs we should not end up here.
3489 	     Still if we end up with truly mismatched types here, fall back
3490 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3491 	     passes.  */
3492 	  /* Prevent var from being written into SSA form.  */
3493 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3494 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3495 	    DECL_GIMPLE_REG_P (var) = false;
3496 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3497 	    TREE_ADDRESSABLE (var) = true;
3498 	  use = fold_build2 (MEM_REF, caller_type,
3499 			     build_fold_addr_expr (var),
3500 			     build_int_cst (ptr_type_node, 0));
3501 	}
3502     }
3503 
3504   STRIP_USELESS_TYPE_CONVERSION (use);
3505 
3506   if (DECL_BY_REFERENCE (result))
3507     {
3508       TREE_ADDRESSABLE (var) = 1;
3509       var = build_fold_addr_expr (var);
3510     }
3511 
3512  done:
3513   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3514      way, when the RESULT_DECL is encountered, it will be
3515      automatically replaced by the VAR_DECL.
3516 
3517      When returning by reference, ensure that RESULT_DECL remaps to
3518      gimple_val.  */
3519   if (DECL_BY_REFERENCE (result)
3520       && !is_gimple_val (var))
3521     {
3522       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3523       insert_decl_map (id, result, temp);
3524       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3525 	 it's default_def SSA_NAME.  */
3526       if (gimple_in_ssa_p (id->src_cfun)
3527 	  && is_gimple_reg (result))
3528 	{
3529 	  temp = make_ssa_name (temp);
3530 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3531 	}
3532       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3533     }
3534   else
3535     insert_decl_map (id, result, var);
3536 
3537   /* Remember this so we can ignore it in remap_decls.  */
3538   id->retvar = var;
3539 
3540   /* If returned bounds are used, then make var for them.  */
3541   if (return_bounds)
3542   {
3543     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3544     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3545     TREE_NO_WARNING (bndtemp) = 1;
3546     declare_inline_vars (id->block, bndtemp);
3547 
3548     id->retbnd = bndtemp;
3549     insert_init_stmt (id, entry_bb,
3550 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3551   }
3552 
3553   return use;
3554 }
3555 
3556 /* Determine if the function can be copied.  If so return NULL.  If
3557    not return a string describng the reason for failure.  */
3558 
3559 const char *
3560 copy_forbidden (struct function *fun)
3561 {
3562   const char *reason = fun->cannot_be_copied_reason;
3563 
3564   /* Only examine the function once.  */
3565   if (fun->cannot_be_copied_set)
3566     return reason;
3567 
3568   /* We cannot copy a function that receives a non-local goto
3569      because we cannot remap the destination label used in the
3570      function that is performing the non-local goto.  */
3571   /* ??? Actually, this should be possible, if we work at it.
3572      No doubt there's just a handful of places that simply
3573      assume it doesn't happen and don't substitute properly.  */
3574   if (fun->has_nonlocal_label)
3575     {
3576       reason = G_("function %q+F can never be copied "
3577 		  "because it receives a non-local goto");
3578       goto fail;
3579     }
3580 
3581   if (fun->has_forced_label_in_static)
3582     {
3583       reason = G_("function %q+F can never be copied because it saves "
3584 		  "address of local label in a static variable");
3585       goto fail;
3586     }
3587 
3588  fail:
3589   fun->cannot_be_copied_reason = reason;
3590   fun->cannot_be_copied_set = true;
3591   return reason;
3592 }
3593 
3594 
3595 static const char *inline_forbidden_reason;
3596 
3597 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3598    iff a function can not be inlined.  Also sets the reason why. */
3599 
3600 static tree
3601 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3602 			 struct walk_stmt_info *wip)
3603 {
3604   tree fn = (tree) wip->info;
3605   tree t;
3606   gimple *stmt = gsi_stmt (*gsi);
3607 
3608   switch (gimple_code (stmt))
3609     {
3610     case GIMPLE_CALL:
3611       /* Refuse to inline alloca call unless user explicitly forced so as
3612 	 this may change program's memory overhead drastically when the
3613 	 function using alloca is called in loop.  In GCC present in
3614 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3615 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3616 	 VLA objects as those can't cause unbounded growth (they're always
3617 	 wrapped inside stack_save/stack_restore regions.  */
3618       if (gimple_maybe_alloca_call_p (stmt)
3619 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3620 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3621 	{
3622 	  inline_forbidden_reason
3623 	    = G_("function %q+F can never be inlined because it uses "
3624 		 "alloca (override using the always_inline attribute)");
3625 	  *handled_ops_p = true;
3626 	  return fn;
3627 	}
3628 
3629       t = gimple_call_fndecl (stmt);
3630       if (t == NULL_TREE)
3631 	break;
3632 
3633       /* We cannot inline functions that call setjmp.  */
3634       if (setjmp_call_p (t))
3635 	{
3636 	  inline_forbidden_reason
3637 	    = G_("function %q+F can never be inlined because it uses setjmp");
3638 	  *handled_ops_p = true;
3639 	  return t;
3640 	}
3641 
3642       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3643 	switch (DECL_FUNCTION_CODE (t))
3644 	  {
3645 	    /* We cannot inline functions that take a variable number of
3646 	       arguments.  */
3647 	  case BUILT_IN_VA_START:
3648 	  case BUILT_IN_NEXT_ARG:
3649 	  case BUILT_IN_VA_END:
3650 	    inline_forbidden_reason
3651 	      = G_("function %q+F can never be inlined because it "
3652 		   "uses variable argument lists");
3653 	    *handled_ops_p = true;
3654 	    return t;
3655 
3656 	  case BUILT_IN_LONGJMP:
3657 	    /* We can't inline functions that call __builtin_longjmp at
3658 	       all.  The non-local goto machinery really requires the
3659 	       destination be in a different function.  If we allow the
3660 	       function calling __builtin_longjmp to be inlined into the
3661 	       function calling __builtin_setjmp, Things will Go Awry.  */
3662 	    inline_forbidden_reason
3663 	      = G_("function %q+F can never be inlined because "
3664 		   "it uses setjmp-longjmp exception handling");
3665 	    *handled_ops_p = true;
3666 	    return t;
3667 
3668 	  case BUILT_IN_NONLOCAL_GOTO:
3669 	    /* Similarly.  */
3670 	    inline_forbidden_reason
3671 	      = G_("function %q+F can never be inlined because "
3672 		   "it uses non-local goto");
3673 	    *handled_ops_p = true;
3674 	    return t;
3675 
3676 	  case BUILT_IN_RETURN:
3677 	  case BUILT_IN_APPLY_ARGS:
3678 	    /* If a __builtin_apply_args caller would be inlined,
3679 	       it would be saving arguments of the function it has
3680 	       been inlined into.  Similarly __builtin_return would
3681 	       return from the function the inline has been inlined into.  */
3682 	    inline_forbidden_reason
3683 	      = G_("function %q+F can never be inlined because "
3684 		   "it uses __builtin_return or __builtin_apply_args");
3685 	    *handled_ops_p = true;
3686 	    return t;
3687 
3688 	  default:
3689 	    break;
3690 	  }
3691       break;
3692 
3693     case GIMPLE_GOTO:
3694       t = gimple_goto_dest (stmt);
3695 
3696       /* We will not inline a function which uses computed goto.  The
3697 	 addresses of its local labels, which may be tucked into
3698 	 global storage, are of course not constant across
3699 	 instantiations, which causes unexpected behavior.  */
3700       if (TREE_CODE (t) != LABEL_DECL)
3701 	{
3702 	  inline_forbidden_reason
3703 	    = G_("function %q+F can never be inlined "
3704 		 "because it contains a computed goto");
3705 	  *handled_ops_p = true;
3706 	  return t;
3707 	}
3708       break;
3709 
3710     default:
3711       break;
3712     }
3713 
3714   *handled_ops_p = false;
3715   return NULL_TREE;
3716 }
3717 
3718 /* Return true if FNDECL is a function that cannot be inlined into
3719    another one.  */
3720 
3721 static bool
3722 inline_forbidden_p (tree fndecl)
3723 {
3724   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3725   struct walk_stmt_info wi;
3726   basic_block bb;
3727   bool forbidden_p = false;
3728 
3729   /* First check for shared reasons not to copy the code.  */
3730   inline_forbidden_reason = copy_forbidden (fun);
3731   if (inline_forbidden_reason != NULL)
3732     return true;
3733 
3734   /* Next, walk the statements of the function looking for
3735      constraucts we can't handle, or are non-optimal for inlining.  */
3736   hash_set<tree> visited_nodes;
3737   memset (&wi, 0, sizeof (wi));
3738   wi.info = (void *) fndecl;
3739   wi.pset = &visited_nodes;
3740 
3741   FOR_EACH_BB_FN (bb, fun)
3742     {
3743       gimple *ret;
3744       gimple_seq seq = bb_seq (bb);
3745       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3746       forbidden_p = (ret != NULL);
3747       if (forbidden_p)
3748 	break;
3749     }
3750 
3751   return forbidden_p;
3752 }
3753 
3754 /* Return false if the function FNDECL cannot be inlined on account of its
3755    attributes, true otherwise.  */
3756 static bool
3757 function_attribute_inlinable_p (const_tree fndecl)
3758 {
3759   if (targetm.attribute_table)
3760     {
3761       const_tree a;
3762 
3763       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3764 	{
3765 	  const_tree name = TREE_PURPOSE (a);
3766 	  int i;
3767 
3768 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3769 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3770 	      return targetm.function_attribute_inlinable_p (fndecl);
3771 	}
3772     }
3773 
3774   return true;
3775 }
3776 
3777 /* Returns nonzero if FN is a function that does not have any
3778    fundamental inline blocking properties.  */
3779 
3780 bool
3781 tree_inlinable_function_p (tree fn)
3782 {
3783   bool inlinable = true;
3784   bool do_warning;
3785   tree always_inline;
3786 
3787   /* If we've already decided this function shouldn't be inlined,
3788      there's no need to check again.  */
3789   if (DECL_UNINLINABLE (fn))
3790     return false;
3791 
3792   /* We only warn for functions declared `inline' by the user.  */
3793   do_warning = (warn_inline
3794 		&& DECL_DECLARED_INLINE_P (fn)
3795 		&& !DECL_NO_INLINE_WARNING_P (fn)
3796 		&& !DECL_IN_SYSTEM_HEADER (fn));
3797 
3798   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3799 
3800   if (flag_no_inline
3801       && always_inline == NULL)
3802     {
3803       if (do_warning)
3804         warning (OPT_Winline, "function %q+F can never be inlined because it "
3805                  "is suppressed using -fno-inline", fn);
3806       inlinable = false;
3807     }
3808 
3809   else if (!function_attribute_inlinable_p (fn))
3810     {
3811       if (do_warning)
3812         warning (OPT_Winline, "function %q+F can never be inlined because it "
3813                  "uses attributes conflicting with inlining", fn);
3814       inlinable = false;
3815     }
3816 
3817   else if (inline_forbidden_p (fn))
3818     {
3819       /* See if we should warn about uninlinable functions.  Previously,
3820 	 some of these warnings would be issued while trying to expand
3821 	 the function inline, but that would cause multiple warnings
3822 	 about functions that would for example call alloca.  But since
3823 	 this a property of the function, just one warning is enough.
3824 	 As a bonus we can now give more details about the reason why a
3825 	 function is not inlinable.  */
3826       if (always_inline)
3827 	error (inline_forbidden_reason, fn);
3828       else if (do_warning)
3829 	warning (OPT_Winline, inline_forbidden_reason, fn);
3830 
3831       inlinable = false;
3832     }
3833 
3834   /* Squirrel away the result so that we don't have to check again.  */
3835   DECL_UNINLINABLE (fn) = !inlinable;
3836 
3837   return inlinable;
3838 }
3839 
3840 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3841    word size and take possible memcpy call into account and return
3842    cost based on whether optimizing for size or speed according to SPEED_P.  */
3843 
3844 int
3845 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3846 {
3847   HOST_WIDE_INT size;
3848 
3849   gcc_assert (!VOID_TYPE_P (type));
3850 
3851   if (TREE_CODE (type) == VECTOR_TYPE)
3852     {
3853       machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3854       machine_mode simd
3855 	= targetm.vectorize.preferred_simd_mode (inner);
3856       int simd_mode_size = GET_MODE_SIZE (simd);
3857       return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3858 	      / simd_mode_size);
3859     }
3860 
3861   size = int_size_in_bytes (type);
3862 
3863   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3864     /* Cost of a memcpy call, 3 arguments and the call.  */
3865     return 4;
3866   else
3867     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3868 }
3869 
3870 /* Returns cost of operation CODE, according to WEIGHTS  */
3871 
3872 static int
3873 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3874 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3875 {
3876   switch (code)
3877     {
3878     /* These are "free" conversions, or their presumed cost
3879        is folded into other operations.  */
3880     case RANGE_EXPR:
3881     CASE_CONVERT:
3882     case COMPLEX_EXPR:
3883     case PAREN_EXPR:
3884     case VIEW_CONVERT_EXPR:
3885       return 0;
3886 
3887     /* Assign cost of 1 to usual operations.
3888        ??? We may consider mapping RTL costs to this.  */
3889     case COND_EXPR:
3890     case VEC_COND_EXPR:
3891     case VEC_PERM_EXPR:
3892 
3893     case PLUS_EXPR:
3894     case POINTER_PLUS_EXPR:
3895     case MINUS_EXPR:
3896     case MULT_EXPR:
3897     case MULT_HIGHPART_EXPR:
3898     case FMA_EXPR:
3899 
3900     case ADDR_SPACE_CONVERT_EXPR:
3901     case FIXED_CONVERT_EXPR:
3902     case FIX_TRUNC_EXPR:
3903 
3904     case NEGATE_EXPR:
3905     case FLOAT_EXPR:
3906     case MIN_EXPR:
3907     case MAX_EXPR:
3908     case ABS_EXPR:
3909 
3910     case LSHIFT_EXPR:
3911     case RSHIFT_EXPR:
3912     case LROTATE_EXPR:
3913     case RROTATE_EXPR:
3914 
3915     case BIT_IOR_EXPR:
3916     case BIT_XOR_EXPR:
3917     case BIT_AND_EXPR:
3918     case BIT_NOT_EXPR:
3919 
3920     case TRUTH_ANDIF_EXPR:
3921     case TRUTH_ORIF_EXPR:
3922     case TRUTH_AND_EXPR:
3923     case TRUTH_OR_EXPR:
3924     case TRUTH_XOR_EXPR:
3925     case TRUTH_NOT_EXPR:
3926 
3927     case LT_EXPR:
3928     case LE_EXPR:
3929     case GT_EXPR:
3930     case GE_EXPR:
3931     case EQ_EXPR:
3932     case NE_EXPR:
3933     case ORDERED_EXPR:
3934     case UNORDERED_EXPR:
3935 
3936     case UNLT_EXPR:
3937     case UNLE_EXPR:
3938     case UNGT_EXPR:
3939     case UNGE_EXPR:
3940     case UNEQ_EXPR:
3941     case LTGT_EXPR:
3942 
3943     case CONJ_EXPR:
3944 
3945     case PREDECREMENT_EXPR:
3946     case PREINCREMENT_EXPR:
3947     case POSTDECREMENT_EXPR:
3948     case POSTINCREMENT_EXPR:
3949 
3950     case REALIGN_LOAD_EXPR:
3951 
3952     case REDUC_MAX_EXPR:
3953     case REDUC_MIN_EXPR:
3954     case REDUC_PLUS_EXPR:
3955     case WIDEN_SUM_EXPR:
3956     case WIDEN_MULT_EXPR:
3957     case DOT_PROD_EXPR:
3958     case SAD_EXPR:
3959     case WIDEN_MULT_PLUS_EXPR:
3960     case WIDEN_MULT_MINUS_EXPR:
3961     case WIDEN_LSHIFT_EXPR:
3962 
3963     case VEC_WIDEN_MULT_HI_EXPR:
3964     case VEC_WIDEN_MULT_LO_EXPR:
3965     case VEC_WIDEN_MULT_EVEN_EXPR:
3966     case VEC_WIDEN_MULT_ODD_EXPR:
3967     case VEC_UNPACK_HI_EXPR:
3968     case VEC_UNPACK_LO_EXPR:
3969     case VEC_UNPACK_FLOAT_HI_EXPR:
3970     case VEC_UNPACK_FLOAT_LO_EXPR:
3971     case VEC_PACK_TRUNC_EXPR:
3972     case VEC_PACK_SAT_EXPR:
3973     case VEC_PACK_FIX_TRUNC_EXPR:
3974     case VEC_WIDEN_LSHIFT_HI_EXPR:
3975     case VEC_WIDEN_LSHIFT_LO_EXPR:
3976 
3977       return 1;
3978 
3979     /* Few special cases of expensive operations.  This is useful
3980        to avoid inlining on functions having too many of these.  */
3981     case TRUNC_DIV_EXPR:
3982     case CEIL_DIV_EXPR:
3983     case FLOOR_DIV_EXPR:
3984     case ROUND_DIV_EXPR:
3985     case EXACT_DIV_EXPR:
3986     case TRUNC_MOD_EXPR:
3987     case CEIL_MOD_EXPR:
3988     case FLOOR_MOD_EXPR:
3989     case ROUND_MOD_EXPR:
3990     case RDIV_EXPR:
3991       if (TREE_CODE (op2) != INTEGER_CST)
3992         return weights->div_mod_cost;
3993       return 1;
3994 
3995     /* Bit-field insertion needs several shift and mask operations.  */
3996     case BIT_INSERT_EXPR:
3997       return 3;
3998 
3999     default:
4000       /* We expect a copy assignment with no operator.  */
4001       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4002       return 0;
4003     }
4004 }
4005 
4006 
4007 /* Estimate number of instructions that will be created by expanding
4008    the statements in the statement sequence STMTS.
4009    WEIGHTS contains weights attributed to various constructs.  */
4010 
4011 int
4012 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4013 {
4014   int cost;
4015   gimple_stmt_iterator gsi;
4016 
4017   cost = 0;
4018   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4019     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4020 
4021   return cost;
4022 }
4023 
4024 
4025 /* Estimate number of instructions that will be created by expanding STMT.
4026    WEIGHTS contains weights attributed to various constructs.  */
4027 
4028 int
4029 estimate_num_insns (gimple *stmt, eni_weights *weights)
4030 {
4031   unsigned cost, i;
4032   enum gimple_code code = gimple_code (stmt);
4033   tree lhs;
4034   tree rhs;
4035 
4036   switch (code)
4037     {
4038     case GIMPLE_ASSIGN:
4039       /* Try to estimate the cost of assignments.  We have three cases to
4040 	 deal with:
4041 	 1) Simple assignments to registers;
4042 	 2) Stores to things that must live in memory.  This includes
4043 	    "normal" stores to scalars, but also assignments of large
4044 	    structures, or constructors of big arrays;
4045 
4046 	 Let us look at the first two cases, assuming we have "a = b + C":
4047 	 <GIMPLE_ASSIGN <var_decl "a">
4048 	        <plus_expr <var_decl "b"> <constant C>>
4049 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4050 	 any target, because "a" usually ends up in a real register.  Hence
4051 	 the only cost of this expression comes from the PLUS_EXPR, and we
4052 	 can ignore the GIMPLE_ASSIGN.
4053 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4054 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4055 	 of moving something into "a", which we compute using the function
4056 	 estimate_move_cost.  */
4057       if (gimple_clobber_p (stmt))
4058 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4059 
4060       lhs = gimple_assign_lhs (stmt);
4061       rhs = gimple_assign_rhs1 (stmt);
4062 
4063       cost = 0;
4064 
4065       /* Account for the cost of moving to / from memory.  */
4066       if (gimple_store_p (stmt))
4067 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4068       if (gimple_assign_load_p (stmt))
4069 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4070 
4071       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4072       				      gimple_assign_rhs1 (stmt),
4073 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4074 				      == GIMPLE_BINARY_RHS
4075 				      ? gimple_assign_rhs2 (stmt) : NULL);
4076       break;
4077 
4078     case GIMPLE_COND:
4079       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4080       				         gimple_op (stmt, 0),
4081 				         gimple_op (stmt, 1));
4082       break;
4083 
4084     case GIMPLE_SWITCH:
4085       {
4086 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4087 	/* Take into account cost of the switch + guess 2 conditional jumps for
4088 	   each case label.
4089 
4090 	   TODO: once the switch expansion logic is sufficiently separated, we can
4091 	   do better job on estimating cost of the switch.  */
4092 	if (weights->time_based)
4093 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4094 	else
4095 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4096       }
4097       break;
4098 
4099     case GIMPLE_CALL:
4100       {
4101 	tree decl;
4102 
4103 	if (gimple_call_internal_p (stmt))
4104 	  return 0;
4105 	else if ((decl = gimple_call_fndecl (stmt))
4106 		 && DECL_BUILT_IN (decl))
4107 	  {
4108 	    /* Do not special case builtins where we see the body.
4109 	       This just confuse inliner.  */
4110 	    struct cgraph_node *node;
4111 	    if (!(node = cgraph_node::get (decl))
4112 		|| node->definition)
4113 	      ;
4114 	    /* For buitins that are likely expanded to nothing or
4115 	       inlined do not account operand costs.  */
4116 	    else if (is_simple_builtin (decl))
4117 	      return 0;
4118 	    else if (is_inexpensive_builtin (decl))
4119 	      return weights->target_builtin_call_cost;
4120 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4121 	      {
4122 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4123 		   specialize the cheap expansion we do here.
4124 		   ???  This asks for a more general solution.  */
4125 		switch (DECL_FUNCTION_CODE (decl))
4126 		  {
4127 		    case BUILT_IN_POW:
4128 		    case BUILT_IN_POWF:
4129 		    case BUILT_IN_POWL:
4130 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4131 			  && (real_equal
4132 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4133 			       &dconst2)))
4134 			return estimate_operator_cost
4135 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4136 			     gimple_call_arg (stmt, 0));
4137 		      break;
4138 
4139 		    default:
4140 		      break;
4141 		  }
4142 	      }
4143 	  }
4144 
4145 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4146 	if (gimple_call_lhs (stmt))
4147 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4148 				      weights->time_based);
4149 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4150 	  {
4151 	    tree arg = gimple_call_arg (stmt, i);
4152 	    cost += estimate_move_cost (TREE_TYPE (arg),
4153 					weights->time_based);
4154 	  }
4155 	break;
4156       }
4157 
4158     case GIMPLE_RETURN:
4159       return weights->return_cost;
4160 
4161     case GIMPLE_GOTO:
4162     case GIMPLE_LABEL:
4163     case GIMPLE_NOP:
4164     case GIMPLE_PHI:
4165     case GIMPLE_PREDICT:
4166     case GIMPLE_DEBUG:
4167       return 0;
4168 
4169     case GIMPLE_ASM:
4170       {
4171 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4172 	/* 1000 means infinity. This avoids overflows later
4173 	   with very long asm statements.  */
4174 	if (count > 1000)
4175 	  count = 1000;
4176 	return count;
4177       }
4178 
4179     case GIMPLE_RESX:
4180       /* This is either going to be an external function call with one
4181 	 argument, or two register copy statements plus a goto.  */
4182       return 2;
4183 
4184     case GIMPLE_EH_DISPATCH:
4185       /* ??? This is going to turn into a switch statement.  Ideally
4186 	 we'd have a look at the eh region and estimate the number of
4187 	 edges involved.  */
4188       return 10;
4189 
4190     case GIMPLE_BIND:
4191       return estimate_num_insns_seq (
4192 	       gimple_bind_body (as_a <gbind *> (stmt)),
4193 	       weights);
4194 
4195     case GIMPLE_EH_FILTER:
4196       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4197 
4198     case GIMPLE_CATCH:
4199       return estimate_num_insns_seq (gimple_catch_handler (
4200 				       as_a <gcatch *> (stmt)),
4201 				     weights);
4202 
4203     case GIMPLE_TRY:
4204       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4205               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4206 
4207     /* OMP directives are generally very expensive.  */
4208 
4209     case GIMPLE_OMP_RETURN:
4210     case GIMPLE_OMP_SECTIONS_SWITCH:
4211     case GIMPLE_OMP_ATOMIC_STORE:
4212     case GIMPLE_OMP_CONTINUE:
4213       /* ...except these, which are cheap.  */
4214       return 0;
4215 
4216     case GIMPLE_OMP_ATOMIC_LOAD:
4217       return weights->omp_cost;
4218 
4219     case GIMPLE_OMP_FOR:
4220       return (weights->omp_cost
4221               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4222               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4223 
4224     case GIMPLE_OMP_PARALLEL:
4225     case GIMPLE_OMP_TASK:
4226     case GIMPLE_OMP_CRITICAL:
4227     case GIMPLE_OMP_MASTER:
4228     case GIMPLE_OMP_TASKGROUP:
4229     case GIMPLE_OMP_ORDERED:
4230     case GIMPLE_OMP_SECTION:
4231     case GIMPLE_OMP_SECTIONS:
4232     case GIMPLE_OMP_SINGLE:
4233     case GIMPLE_OMP_TARGET:
4234     case GIMPLE_OMP_TEAMS:
4235       return (weights->omp_cost
4236               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4237 
4238     case GIMPLE_TRANSACTION:
4239       return (weights->tm_cost
4240 	      + estimate_num_insns_seq (gimple_transaction_body (
4241 					  as_a <gtransaction *> (stmt)),
4242 					weights));
4243 
4244     default:
4245       gcc_unreachable ();
4246     }
4247 
4248   return cost;
4249 }
4250 
4251 /* Estimate number of instructions that will be created by expanding
4252    function FNDECL.  WEIGHTS contains weights attributed to various
4253    constructs.  */
4254 
4255 int
4256 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4257 {
4258   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4259   gimple_stmt_iterator bsi;
4260   basic_block bb;
4261   int n = 0;
4262 
4263   gcc_assert (my_function && my_function->cfg);
4264   FOR_EACH_BB_FN (bb, my_function)
4265     {
4266       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4267 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4268     }
4269 
4270   return n;
4271 }
4272 
4273 
4274 /* Initializes weights used by estimate_num_insns.  */
4275 
4276 void
4277 init_inline_once (void)
4278 {
4279   eni_size_weights.call_cost = 1;
4280   eni_size_weights.indirect_call_cost = 3;
4281   eni_size_weights.target_builtin_call_cost = 1;
4282   eni_size_weights.div_mod_cost = 1;
4283   eni_size_weights.omp_cost = 40;
4284   eni_size_weights.tm_cost = 10;
4285   eni_size_weights.time_based = false;
4286   eni_size_weights.return_cost = 1;
4287 
4288   /* Estimating time for call is difficult, since we have no idea what the
4289      called function does.  In the current uses of eni_time_weights,
4290      underestimating the cost does less harm than overestimating it, so
4291      we choose a rather small value here.  */
4292   eni_time_weights.call_cost = 10;
4293   eni_time_weights.indirect_call_cost = 15;
4294   eni_time_weights.target_builtin_call_cost = 1;
4295   eni_time_weights.div_mod_cost = 10;
4296   eni_time_weights.omp_cost = 40;
4297   eni_time_weights.tm_cost = 40;
4298   eni_time_weights.time_based = true;
4299   eni_time_weights.return_cost = 2;
4300 }
4301 
4302 
4303 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4304 
4305 static void
4306 prepend_lexical_block (tree current_block, tree new_block)
4307 {
4308   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4309   BLOCK_SUBBLOCKS (current_block) = new_block;
4310   BLOCK_SUPERCONTEXT (new_block) = current_block;
4311 }
4312 
4313 /* Add local variables from CALLEE to CALLER.  */
4314 
4315 static inline void
4316 add_local_variables (struct function *callee, struct function *caller,
4317 		     copy_body_data *id)
4318 {
4319   tree var;
4320   unsigned ix;
4321 
4322   FOR_EACH_LOCAL_DECL (callee, ix, var)
4323     if (!can_be_nonlocal (var, id))
4324       {
4325         tree new_var = remap_decl (var, id);
4326 
4327         /* Remap debug-expressions.  */
4328 	if (VAR_P (new_var)
4329 	    && DECL_HAS_DEBUG_EXPR_P (var)
4330 	    && new_var != var)
4331 	  {
4332 	    tree tem = DECL_DEBUG_EXPR (var);
4333 	    bool old_regimplify = id->regimplify;
4334 	    id->remapping_type_depth++;
4335 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4336 	    id->remapping_type_depth--;
4337 	    id->regimplify = old_regimplify;
4338 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4339 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4340 	  }
4341 	add_local_decl (caller, new_var);
4342       }
4343 }
4344 
4345 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4346    have brought in or introduced any debug stmts for SRCVAR.  */
4347 
4348 static inline void
4349 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4350 {
4351   tree *remappedvarp = id->decl_map->get (srcvar);
4352 
4353   if (!remappedvarp)
4354     return;
4355 
4356   if (!VAR_P (*remappedvarp))
4357     return;
4358 
4359   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4360     return;
4361 
4362   tree tvar = target_for_debug_bind (*remappedvarp);
4363   if (!tvar)
4364     return;
4365 
4366   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4367 					  id->call_stmt);
4368   gimple_seq_add_stmt (bindings, stmt);
4369 }
4370 
4371 /* For each inlined variable for which we may have debug bind stmts,
4372    add before GSI a final debug stmt resetting it, marking the end of
4373    its life, so that var-tracking knows it doesn't have to compute
4374    further locations for it.  */
4375 
4376 static inline void
4377 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4378 {
4379   tree var;
4380   unsigned ix;
4381   gimple_seq bindings = NULL;
4382 
4383   if (!gimple_in_ssa_p (id->src_cfun))
4384     return;
4385 
4386   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4387     return;
4388 
4389   for (var = DECL_ARGUMENTS (id->src_fn);
4390        var; var = DECL_CHAIN (var))
4391     reset_debug_binding (id, var, &bindings);
4392 
4393   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4394     reset_debug_binding (id, var, &bindings);
4395 
4396   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4397 }
4398 
4399 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4400 
4401 static bool
4402 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4403 {
4404   tree use_retvar;
4405   tree fn;
4406   hash_map<tree, tree> *dst;
4407   hash_map<tree, tree> *st = NULL;
4408   tree return_slot;
4409   tree modify_dest;
4410   tree return_bounds = NULL;
4411   struct cgraph_edge *cg_edge;
4412   cgraph_inline_failed_t reason;
4413   basic_block return_block;
4414   edge e;
4415   gimple_stmt_iterator gsi, stmt_gsi;
4416   bool successfully_inlined = false;
4417   bool purge_dead_abnormal_edges;
4418   gcall *call_stmt;
4419   unsigned int i;
4420   unsigned int prop_mask, src_properties;
4421   struct function *dst_cfun;
4422   tree simduid;
4423   use_operand_p use;
4424   gimple *simtenter_stmt = NULL;
4425   vec<tree> *simtvars_save;
4426 
4427   /* The gimplifier uses input_location in too many places, such as
4428      internal_get_tmp_var ().  */
4429   location_t saved_location = input_location;
4430   input_location = gimple_location (stmt);
4431 
4432   /* From here on, we're only interested in CALL_EXPRs.  */
4433   call_stmt = dyn_cast <gcall *> (stmt);
4434   if (!call_stmt)
4435     goto egress;
4436 
4437   cg_edge = id->dst_node->get_edge (stmt);
4438   gcc_checking_assert (cg_edge);
4439   /* First, see if we can figure out what function is being called.
4440      If we cannot, then there is no hope of inlining the function.  */
4441   if (cg_edge->indirect_unknown_callee)
4442     goto egress;
4443   fn = cg_edge->callee->decl;
4444   gcc_checking_assert (fn);
4445 
4446   /* If FN is a declaration of a function in a nested scope that was
4447      globally declared inline, we don't set its DECL_INITIAL.
4448      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4449      C++ front-end uses it for cdtors to refer to their internal
4450      declarations, that are not real functions.  Fortunately those
4451      don't have trees to be saved, so we can tell by checking their
4452      gimple_body.  */
4453   if (!DECL_INITIAL (fn)
4454       && DECL_ABSTRACT_ORIGIN (fn)
4455       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4456     fn = DECL_ABSTRACT_ORIGIN (fn);
4457 
4458   /* Don't try to inline functions that are not well-suited to inlining.  */
4459   if (cg_edge->inline_failed)
4460     {
4461       reason = cg_edge->inline_failed;
4462       /* If this call was originally indirect, we do not want to emit any
4463 	 inlining related warnings or sorry messages because there are no
4464 	 guarantees regarding those.  */
4465       if (cg_edge->indirect_inlining_edge)
4466 	goto egress;
4467 
4468       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4469           /* For extern inline functions that get redefined we always
4470 	     silently ignored always_inline flag. Better behavior would
4471 	     be to be able to keep both bodies and use extern inline body
4472 	     for inlining, but we can't do that because frontends overwrite
4473 	     the body.  */
4474 	  && !cg_edge->callee->local.redefined_extern_inline
4475 	  /* During early inline pass, report only when optimization is
4476 	     not turned on.  */
4477 	  && (symtab->global_info_ready
4478 	      || !optimize
4479 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4480 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4481 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4482 	{
4483 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4484 		 cgraph_inline_failed_string (reason));
4485 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4486 	    inform (gimple_location (stmt), "called from here");
4487 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4488 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4489                    "called from this function");
4490 	}
4491       else if (warn_inline
4492 	       && DECL_DECLARED_INLINE_P (fn)
4493 	       && !DECL_NO_INLINE_WARNING_P (fn)
4494 	       && !DECL_IN_SYSTEM_HEADER (fn)
4495 	       && reason != CIF_UNSPECIFIED
4496 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4497 	       /* Do not warn about not inlined recursive calls.  */
4498 	       && !cg_edge->recursive_p ()
4499 	       /* Avoid warnings during early inline pass. */
4500 	       && symtab->global_info_ready)
4501 	{
4502 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4503 		       fn, _(cgraph_inline_failed_string (reason))))
4504 	    {
4505 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4506 		inform (gimple_location (stmt), "called from here");
4507 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4508 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4509                        "called from this function");
4510 	    }
4511 	}
4512       goto egress;
4513     }
4514   id->src_node = cg_edge->callee;
4515 
4516   /* If callee is thunk, all we need is to adjust the THIS pointer
4517      and redirect to function being thunked.  */
4518   if (id->src_node->thunk.thunk_p)
4519     {
4520       cgraph_edge *edge;
4521       tree virtual_offset = NULL;
4522       int freq = cg_edge->frequency;
4523       gcov_type count = cg_edge->count;
4524       tree op;
4525       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4526 
4527       cg_edge->remove ();
4528       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4529 		   		           gimple_uid (stmt),
4530 				   	   REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
4531 				           true);
4532       edge->frequency = freq;
4533       edge->count = count;
4534       if (id->src_node->thunk.virtual_offset_p)
4535         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4536       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4537 			      NULL);
4538       gsi_insert_before (&iter, gimple_build_assign (op,
4539 						    gimple_call_arg (stmt, 0)),
4540 			 GSI_NEW_STMT);
4541       gcc_assert (id->src_node->thunk.this_adjusting);
4542       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4543 			 virtual_offset);
4544 
4545       gimple_call_set_arg (stmt, 0, op);
4546       gimple_call_set_fndecl (stmt, edge->callee->decl);
4547       update_stmt (stmt);
4548       id->src_node->remove ();
4549       expand_call_inline (bb, stmt, id);
4550       maybe_remove_unused_call_args (cfun, stmt);
4551       return true;
4552     }
4553   fn = cg_edge->callee->decl;
4554   cg_edge->callee->get_untransformed_body ();
4555 
4556   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4557     cg_edge->callee->verify ();
4558 
4559   /* We will be inlining this callee.  */
4560   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4561   id->assign_stmts.create (0);
4562 
4563   /* Update the callers EH personality.  */
4564   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4565     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4566       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4567 
4568   /* Split the block holding the GIMPLE_CALL.  */
4569   e = split_block (bb, stmt);
4570   bb = e->src;
4571   return_block = e->dest;
4572   remove_edge (e);
4573 
4574   /* split_block splits after the statement; work around this by
4575      moving the call into the second block manually.  Not pretty,
4576      but seems easier than doing the CFG manipulation by hand
4577      when the GIMPLE_CALL is in the last statement of BB.  */
4578   stmt_gsi = gsi_last_bb (bb);
4579   gsi_remove (&stmt_gsi, false);
4580 
4581   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4582      been the source of abnormal edges.  In this case, schedule
4583      the removal of dead abnormal edges.  */
4584   gsi = gsi_start_bb (return_block);
4585   if (gsi_end_p (gsi))
4586     {
4587       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4588       purge_dead_abnormal_edges = true;
4589     }
4590   else
4591     {
4592       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4593       purge_dead_abnormal_edges = false;
4594     }
4595 
4596   stmt_gsi = gsi_start_bb (return_block);
4597 
4598   /* Build a block containing code to initialize the arguments, the
4599      actual inline expansion of the body, and a label for the return
4600      statements within the function to jump to.  The type of the
4601      statement expression is the return type of the function call.
4602      ???  If the call does not have an associated block then we will
4603      remap all callee blocks to NULL, effectively dropping most of
4604      its debug information.  This should only happen for calls to
4605      artificial decls inserted by the compiler itself.  We need to
4606      either link the inlined blocks into the caller block tree or
4607      not refer to them in any way to not break GC for locations.  */
4608   if (gimple_block (stmt))
4609     {
4610       id->block = make_node (BLOCK);
4611       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4612       BLOCK_SOURCE_LOCATION (id->block)
4613 	= LOCATION_LOCUS (gimple_location (stmt));
4614       prepend_lexical_block (gimple_block (stmt), id->block);
4615     }
4616 
4617   /* Local declarations will be replaced by their equivalents in this
4618      map.  */
4619   st = id->decl_map;
4620   id->decl_map = new hash_map<tree, tree>;
4621   dst = id->debug_map;
4622   id->debug_map = NULL;
4623 
4624   /* Record the function we are about to inline.  */
4625   id->src_fn = fn;
4626   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4627   id->call_stmt = call_stmt;
4628 
4629   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4630      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4631   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4632   simtvars_save = id->dst_simt_vars;
4633   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4634       && (simduid = bb->loop_father->simduid) != NULL_TREE
4635       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4636       && single_imm_use (simduid, &use, &simtenter_stmt)
4637       && is_gimple_call (simtenter_stmt)
4638       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4639     vec_alloc (id->dst_simt_vars, 0);
4640   else
4641     id->dst_simt_vars = NULL;
4642 
4643   /* If the src function contains an IFN_VA_ARG, then so will the dst
4644      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4645   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4646   src_properties = id->src_cfun->curr_properties & prop_mask;
4647   if (src_properties != prop_mask)
4648     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4649 
4650   gcc_assert (!id->src_cfun->after_inlining);
4651 
4652   id->entry_bb = bb;
4653   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4654     {
4655       gimple_stmt_iterator si = gsi_last_bb (bb);
4656       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4657       						   NOT_TAKEN),
4658 			GSI_NEW_STMT);
4659     }
4660   initialize_inlined_parameters (id, stmt, fn, bb);
4661 
4662   if (DECL_INITIAL (fn))
4663     {
4664       if (gimple_block (stmt))
4665 	{
4666 	  tree *var;
4667 
4668 	  prepend_lexical_block (id->block,
4669 				 remap_blocks (DECL_INITIAL (fn), id));
4670 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4671 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4672 				   == NULL_TREE));
4673 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4674 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4675 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4676 	     under it.  The parameters can be then evaluated in the debugger,
4677 	     but don't show in backtraces.  */
4678 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4679 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4680 	      {
4681 		tree v = *var;
4682 		*var = TREE_CHAIN (v);
4683 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4684 		BLOCK_VARS (id->block) = v;
4685 	      }
4686 	    else
4687 	      var = &TREE_CHAIN (*var);
4688 	}
4689       else
4690 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4691     }
4692 
4693   /* Return statements in the function body will be replaced by jumps
4694      to the RET_LABEL.  */
4695   gcc_assert (DECL_INITIAL (fn));
4696   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4697 
4698   /* Find the LHS to which the result of this call is assigned.  */
4699   return_slot = NULL;
4700   if (gimple_call_lhs (stmt))
4701     {
4702       modify_dest = gimple_call_lhs (stmt);
4703 
4704       /* Remember where to copy returned bounds.  */
4705       if (gimple_call_with_bounds_p (stmt)
4706 	  && TREE_CODE (modify_dest) == SSA_NAME)
4707 	{
4708 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4709 	  if (retbnd)
4710 	    {
4711 	      return_bounds = gimple_call_lhs (retbnd);
4712 	      /* If returned bounds are not used then just
4713 		 remove unused call.  */
4714 	      if (!return_bounds)
4715 		{
4716 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4717 		  gsi_remove (&iter, true);
4718 		}
4719 	    }
4720 	}
4721 
4722       /* The function which we are inlining might not return a value,
4723 	 in which case we should issue a warning that the function
4724 	 does not return a value.  In that case the optimizers will
4725 	 see that the variable to which the value is assigned was not
4726 	 initialized.  We do not want to issue a warning about that
4727 	 uninitialized variable.  */
4728       if (DECL_P (modify_dest))
4729 	TREE_NO_WARNING (modify_dest) = 1;
4730 
4731       if (gimple_call_return_slot_opt_p (call_stmt))
4732 	{
4733 	  return_slot = modify_dest;
4734 	  modify_dest = NULL;
4735 	}
4736     }
4737   else
4738     modify_dest = NULL;
4739 
4740   /* If we are inlining a call to the C++ operator new, we don't want
4741      to use type based alias analysis on the return value.  Otherwise
4742      we may get confused if the compiler sees that the inlined new
4743      function returns a pointer which was just deleted.  See bug
4744      33407.  */
4745   if (DECL_IS_OPERATOR_NEW (fn))
4746     {
4747       return_slot = NULL;
4748       modify_dest = NULL;
4749     }
4750 
4751   /* Declare the return variable for the function.  */
4752   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4753 					return_bounds, bb);
4754 
4755   /* Add local vars in this inlined callee to caller.  */
4756   add_local_variables (id->src_cfun, cfun, id);
4757 
4758   if (dump_file && (dump_flags & TDF_DETAILS))
4759     {
4760       fprintf (dump_file, "Inlining ");
4761       print_generic_expr (dump_file, id->src_fn, 0);
4762       fprintf (dump_file, " to ");
4763       print_generic_expr (dump_file, id->dst_fn, 0);
4764       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4765     }
4766 
4767   /* This is it.  Duplicate the callee body.  Assume callee is
4768      pre-gimplified.  Note that we must not alter the caller
4769      function in any way before this point, as this CALL_EXPR may be
4770      a self-referential call; if we're calling ourselves, we need to
4771      duplicate our body before altering anything.  */
4772   copy_body (id, cg_edge->callee->count,
4773   	     GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4774 	     bb, return_block, NULL);
4775 
4776   reset_debug_bindings (id, stmt_gsi);
4777 
4778   if (flag_stack_reuse != SR_NONE
4779       && (flag_sanitize & SANITIZE_KERNEL_ADDRESS) != 0)
4780     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4781       if (!TREE_THIS_VOLATILE (p))
4782 	{
4783 	  tree *varp = id->decl_map->get (p);
4784 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4785 	    {
4786 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4787 	      gimple *clobber_stmt;
4788 	      TREE_THIS_VOLATILE (clobber) = 1;
4789 	      clobber_stmt = gimple_build_assign (*varp, clobber);
4790 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
4791 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4792 	    }
4793 	}
4794 
4795   /* Reset the escaped solution.  */
4796   if (cfun->gimple_df)
4797     pt_solution_reset (&cfun->gimple_df->escaped);
4798 
4799   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
4800   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4801     {
4802       size_t nargs = gimple_call_num_args (simtenter_stmt);
4803       vec<tree> *vars = id->dst_simt_vars;
4804       auto_vec<tree> newargs (nargs + vars->length ());
4805       for (size_t i = 0; i < nargs; i++)
4806 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4807       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4808 	{
4809 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4810 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4811 	}
4812       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4813       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4814       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4815       gsi_replace (&gsi, g, false);
4816     }
4817   vec_free (id->dst_simt_vars);
4818   id->dst_simt_vars = simtvars_save;
4819 
4820   /* Clean up.  */
4821   if (id->debug_map)
4822     {
4823       delete id->debug_map;
4824       id->debug_map = dst;
4825     }
4826   delete id->decl_map;
4827   id->decl_map = st;
4828 
4829   /* Unlink the calls virtual operands before replacing it.  */
4830   unlink_stmt_vdef (stmt);
4831   if (gimple_vdef (stmt)
4832       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4833     release_ssa_name (gimple_vdef (stmt));
4834 
4835   /* If the inlined function returns a result that we care about,
4836      substitute the GIMPLE_CALL with an assignment of the return
4837      variable to the LHS of the call.  That is, if STMT was
4838      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4839   if (use_retvar && gimple_call_lhs (stmt))
4840     {
4841       gimple *old_stmt = stmt;
4842       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4843       gsi_replace (&stmt_gsi, stmt, false);
4844       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4845       /* Append a clobber for id->retvar if easily possible.  */
4846       if (flag_stack_reuse != SR_NONE
4847 	  && (flag_sanitize & SANITIZE_KERNEL_ADDRESS) != 0
4848 	  && id->retvar
4849 	  && VAR_P (id->retvar)
4850 	  && id->retvar != return_slot
4851 	  && id->retvar != modify_dest
4852 	  && !TREE_THIS_VOLATILE (id->retvar)
4853 	  && !is_gimple_reg (id->retvar)
4854 	  && !stmt_ends_bb_p (stmt))
4855 	{
4856 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4857 	  gimple *clobber_stmt;
4858 	  TREE_THIS_VOLATILE (clobber) = 1;
4859 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4860 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4861 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4862 	}
4863 
4864       /* Copy bounds if we copy structure with bounds.  */
4865       if (chkp_function_instrumented_p (id->dst_fn)
4866 	  && !BOUNDED_P (use_retvar)
4867 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4868 	id->assign_stmts.safe_push (stmt);
4869     }
4870   else
4871     {
4872       /* Handle the case of inlining a function with no return
4873 	 statement, which causes the return value to become undefined.  */
4874       if (gimple_call_lhs (stmt)
4875 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4876 	{
4877 	  tree name = gimple_call_lhs (stmt);
4878 	  tree var = SSA_NAME_VAR (name);
4879 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4880 
4881 	  if (def)
4882 	    {
4883 	      /* If the variable is used undefined, make this name
4884 		 undefined via a move.  */
4885 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4886 	      gsi_replace (&stmt_gsi, stmt, true);
4887 	    }
4888 	  else
4889 	    {
4890 	      if (!var)
4891 		{
4892 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4893 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4894 		}
4895 	      /* Otherwise make this variable undefined.  */
4896 	      gsi_remove (&stmt_gsi, true);
4897 	      set_ssa_default_def (cfun, var, name);
4898 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4899 	    }
4900 	}
4901       /* Replace with a clobber for id->retvar.  */
4902       else if (flag_stack_reuse != SR_NONE
4903 	       && (flag_sanitize & SANITIZE_KERNEL_ADDRESS) != 0
4904 	       && id->retvar
4905 	       && VAR_P (id->retvar)
4906 	       && id->retvar != return_slot
4907 	       && id->retvar != modify_dest
4908 	       && !TREE_THIS_VOLATILE (id->retvar)
4909 	       && !is_gimple_reg (id->retvar))
4910 	{
4911 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4912 	  gimple *clobber_stmt;
4913 	  TREE_THIS_VOLATILE (clobber) = 1;
4914 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4915 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
4916 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
4917 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4918 	}
4919       else
4920 	gsi_remove (&stmt_gsi, true);
4921     }
4922 
4923   /* Put returned bounds into the correct place if required.  */
4924   if (return_bounds)
4925     {
4926       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4927       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4928       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4929       unlink_stmt_vdef (old_stmt);
4930       gsi_replace (&bnd_gsi, new_stmt, false);
4931       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4932       cgraph_update_edges_for_call_stmt (old_stmt,
4933 					 gimple_call_fndecl (old_stmt),
4934 					 new_stmt);
4935     }
4936 
4937   if (purge_dead_abnormal_edges)
4938     {
4939       gimple_purge_dead_eh_edges (return_block);
4940       gimple_purge_dead_abnormal_call_edges (return_block);
4941     }
4942 
4943   /* If the value of the new expression is ignored, that's OK.  We
4944      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4945      the equivalent inlined version either.  */
4946   if (is_gimple_assign (stmt))
4947     {
4948       gcc_assert (gimple_assign_single_p (stmt)
4949 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4950       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4951     }
4952 
4953   /* Copy bounds for all generated assigns that need it.  */
4954   for (i = 0; i < id->assign_stmts.length (); i++)
4955     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4956   id->assign_stmts.release ();
4957 
4958   /* Output the inlining info for this abstract function, since it has been
4959      inlined.  If we don't do this now, we can lose the information about the
4960      variables in the function when the blocks get blown away as soon as we
4961      remove the cgraph node.  */
4962   if (gimple_block (stmt))
4963     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4964 
4965   /* Update callgraph if needed.  */
4966   cg_edge->callee->remove ();
4967 
4968   id->block = NULL_TREE;
4969   id->retvar = NULL_TREE;
4970   id->retbnd = NULL_TREE;
4971   successfully_inlined = true;
4972 
4973  egress:
4974   input_location = saved_location;
4975   return successfully_inlined;
4976 }
4977 
4978 /* Expand call statements reachable from STMT_P.
4979    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4980    in a MODIFY_EXPR.  */
4981 
4982 static bool
4983 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4984 {
4985   gimple_stmt_iterator gsi;
4986   bool inlined = false;
4987 
4988   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4989     {
4990       gimple *stmt = gsi_stmt (gsi);
4991       gsi_prev (&gsi);
4992 
4993       if (is_gimple_call (stmt)
4994 	  && !gimple_call_internal_p (stmt))
4995 	inlined |= expand_call_inline (bb, stmt, id);
4996     }
4997 
4998   return inlined;
4999 }
5000 
5001 
5002 /* Walk all basic blocks created after FIRST and try to fold every statement
5003    in the STATEMENTS pointer set.  */
5004 
5005 static void
5006 fold_marked_statements (int first, hash_set<gimple *> *statements)
5007 {
5008   for (; first < n_basic_blocks_for_fn (cfun); first++)
5009     if (BASIC_BLOCK_FOR_FN (cfun, first))
5010       {
5011         gimple_stmt_iterator gsi;
5012 
5013 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5014 	     !gsi_end_p (gsi);
5015 	     gsi_next (&gsi))
5016 	  if (statements->contains (gsi_stmt (gsi)))
5017 	    {
5018 	      gimple *old_stmt = gsi_stmt (gsi);
5019 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5020 
5021 	      if (old_decl && DECL_BUILT_IN (old_decl))
5022 		{
5023 		  /* Folding builtins can create multiple instructions,
5024 		     we need to look at all of them.  */
5025 		  gimple_stmt_iterator i2 = gsi;
5026 		  gsi_prev (&i2);
5027 		  if (fold_stmt (&gsi))
5028 		    {
5029 		      gimple *new_stmt;
5030 		      /* If a builtin at the end of a bb folded into nothing,
5031 			 the following loop won't work.  */
5032 		      if (gsi_end_p (gsi))
5033 			{
5034 			  cgraph_update_edges_for_call_stmt (old_stmt,
5035 							     old_decl, NULL);
5036 			  break;
5037 			}
5038 		      if (gsi_end_p (i2))
5039 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5040 		      else
5041 			gsi_next (&i2);
5042 		      while (1)
5043 			{
5044 			  new_stmt = gsi_stmt (i2);
5045 			  update_stmt (new_stmt);
5046 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5047 							     new_stmt);
5048 
5049 			  if (new_stmt == gsi_stmt (gsi))
5050 			    {
5051 			      /* It is okay to check only for the very last
5052 				 of these statements.  If it is a throwing
5053 				 statement nothing will change.  If it isn't
5054 				 this can remove EH edges.  If that weren't
5055 				 correct then because some intermediate stmts
5056 				 throw, but not the last one.  That would mean
5057 				 we'd have to split the block, which we can't
5058 				 here and we'd loose anyway.  And as builtins
5059 				 probably never throw, this all
5060 				 is mood anyway.  */
5061 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
5062 								  new_stmt))
5063 				gimple_purge_dead_eh_edges (
5064 				  BASIC_BLOCK_FOR_FN (cfun, first));
5065 			      break;
5066 			    }
5067 			  gsi_next (&i2);
5068 			}
5069 		    }
5070 		}
5071 	      else if (fold_stmt (&gsi))
5072 		{
5073 		  /* Re-read the statement from GSI as fold_stmt() may
5074 		     have changed it.  */
5075 		  gimple *new_stmt = gsi_stmt (gsi);
5076 		  update_stmt (new_stmt);
5077 
5078 		  if (is_gimple_call (old_stmt)
5079 		      || is_gimple_call (new_stmt))
5080 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5081 						       new_stmt);
5082 
5083 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5084 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5085 								    first));
5086 		}
5087 	    }
5088       }
5089 }
5090 
5091 /* Expand calls to inline functions in the body of FN.  */
5092 
5093 unsigned int
5094 optimize_inline_calls (tree fn)
5095 {
5096   copy_body_data id;
5097   basic_block bb;
5098   int last = n_basic_blocks_for_fn (cfun);
5099   bool inlined_p = false;
5100 
5101   /* Clear out ID.  */
5102   memset (&id, 0, sizeof (id));
5103 
5104   id.src_node = id.dst_node = cgraph_node::get (fn);
5105   gcc_assert (id.dst_node->definition);
5106   id.dst_fn = fn;
5107   /* Or any functions that aren't finished yet.  */
5108   if (current_function_decl)
5109     id.dst_fn = current_function_decl;
5110 
5111   id.copy_decl = copy_decl_maybe_to_var;
5112   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5113   id.transform_new_cfg = false;
5114   id.transform_return_to_modify = true;
5115   id.transform_parameter = true;
5116   id.transform_lang_insert_block = NULL;
5117   id.statements_to_fold = new hash_set<gimple *>;
5118 
5119   push_gimplify_context ();
5120 
5121   /* We make no attempts to keep dominance info up-to-date.  */
5122   free_dominance_info (CDI_DOMINATORS);
5123   free_dominance_info (CDI_POST_DOMINATORS);
5124 
5125   /* Register specific gimple functions.  */
5126   gimple_register_cfg_hooks ();
5127 
5128   /* Reach the trees by walking over the CFG, and note the
5129      enclosing basic-blocks in the call edges.  */
5130   /* We walk the blocks going forward, because inlined function bodies
5131      will split id->current_basic_block, and the new blocks will
5132      follow it; we'll trudge through them, processing their CALL_EXPRs
5133      along the way.  */
5134   FOR_EACH_BB_FN (bb, cfun)
5135     inlined_p |= gimple_expand_calls_inline (bb, &id);
5136 
5137   pop_gimplify_context (NULL);
5138 
5139   if (flag_checking)
5140     {
5141       struct cgraph_edge *e;
5142 
5143       id.dst_node->verify ();
5144 
5145       /* Double check that we inlined everything we are supposed to inline.  */
5146       for (e = id.dst_node->callees; e; e = e->next_callee)
5147 	gcc_assert (e->inline_failed);
5148     }
5149 
5150   /* Fold queued statements.  */
5151   fold_marked_statements (last, id.statements_to_fold);
5152   delete id.statements_to_fold;
5153 
5154   gcc_assert (!id.debug_stmts.exists ());
5155 
5156   /* If we didn't inline into the function there is nothing to do.  */
5157   if (!inlined_p)
5158     return 0;
5159 
5160   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5161   number_blocks (fn);
5162 
5163   delete_unreachable_blocks_update_callgraph (&id);
5164   if (flag_checking)
5165     id.dst_node->verify ();
5166 
5167   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5168      not possible yet - the IPA passes might make various functions to not
5169      throw and they don't care to proactively update local EH info.  This is
5170      done later in fixup_cfg pass that also execute the verification.  */
5171   return (TODO_update_ssa
5172 	  | TODO_cleanup_cfg
5173 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5174 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5175 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5176 	     ? TODO_rebuild_frequencies : 0));
5177 }
5178 
5179 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5180 
5181 tree
5182 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5183 {
5184   enum tree_code code = TREE_CODE (*tp);
5185   enum tree_code_class cl = TREE_CODE_CLASS (code);
5186 
5187   /* We make copies of most nodes.  */
5188   if (IS_EXPR_CODE_CLASS (cl)
5189       || code == TREE_LIST
5190       || code == TREE_VEC
5191       || code == TYPE_DECL
5192       || code == OMP_CLAUSE)
5193     {
5194       /* Because the chain gets clobbered when we make a copy, we save it
5195 	 here.  */
5196       tree chain = NULL_TREE, new_tree;
5197 
5198       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5199 	chain = TREE_CHAIN (*tp);
5200 
5201       /* Copy the node.  */
5202       new_tree = copy_node (*tp);
5203 
5204       *tp = new_tree;
5205 
5206       /* Now, restore the chain, if appropriate.  That will cause
5207 	 walk_tree to walk into the chain as well.  */
5208       if (code == PARM_DECL
5209 	  || code == TREE_LIST
5210 	  || code == OMP_CLAUSE)
5211 	TREE_CHAIN (*tp) = chain;
5212 
5213       /* For now, we don't update BLOCKs when we make copies.  So, we
5214 	 have to nullify all BIND_EXPRs.  */
5215       if (TREE_CODE (*tp) == BIND_EXPR)
5216 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5217     }
5218   else if (code == CONSTRUCTOR)
5219     {
5220       /* CONSTRUCTOR nodes need special handling because
5221          we need to duplicate the vector of elements.  */
5222       tree new_tree;
5223 
5224       new_tree = copy_node (*tp);
5225       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5226       *tp = new_tree;
5227     }
5228   else if (code == STATEMENT_LIST)
5229     /* We used to just abort on STATEMENT_LIST, but we can run into them
5230        with statement-expressions (c++/40975).  */
5231     copy_statement_list (tp);
5232   else if (TREE_CODE_CLASS (code) == tcc_type)
5233     *walk_subtrees = 0;
5234   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5235     *walk_subtrees = 0;
5236   else if (TREE_CODE_CLASS (code) == tcc_constant)
5237     *walk_subtrees = 0;
5238   return NULL_TREE;
5239 }
5240 
5241 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5242    information indicating to what new SAVE_EXPR this one should be mapped,
5243    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5244    the function into which the copy will be placed.  */
5245 
5246 static void
5247 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5248 {
5249   tree *n;
5250   tree t;
5251 
5252   /* See if we already encountered this SAVE_EXPR.  */
5253   n = st->get (*tp);
5254 
5255   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5256   if (!n)
5257     {
5258       t = copy_node (*tp);
5259 
5260       /* Remember this SAVE_EXPR.  */
5261       st->put (*tp, t);
5262       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5263       st->put (t, t);
5264     }
5265   else
5266     {
5267       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5268       *walk_subtrees = 0;
5269       t = *n;
5270     }
5271 
5272   /* Replace this SAVE_EXPR with the copy.  */
5273   *tp = t;
5274 }
5275 
5276 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5277    label, copies the declaration and enters it in the splay_tree in DATA (which
5278    is really a 'copy_body_data *'.  */
5279 
5280 static tree
5281 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5282 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5283 		        struct walk_stmt_info *wi)
5284 {
5285   copy_body_data *id = (copy_body_data *) wi->info;
5286   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5287 
5288   if (stmt)
5289     {
5290       tree decl = gimple_label_label (stmt);
5291 
5292       /* Copy the decl and remember the copy.  */
5293       insert_decl_map (id, decl, id->copy_decl (decl, id));
5294     }
5295 
5296   return NULL_TREE;
5297 }
5298 
5299 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5300 						  struct walk_stmt_info *wi);
5301 
5302 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5303    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5304    remaps all local declarations to appropriate replacements in gimple
5305    operands. */
5306 
5307 static tree
5308 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5309 {
5310   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5311   copy_body_data *id = (copy_body_data *) wi->info;
5312   hash_map<tree, tree> *st = id->decl_map;
5313   tree *n;
5314   tree expr = *tp;
5315 
5316   /* For recursive invocations this is no longer the LHS itself.  */
5317   bool is_lhs = wi->is_lhs;
5318   wi->is_lhs = false;
5319 
5320   if (TREE_CODE (expr) == SSA_NAME)
5321     {
5322       *tp = remap_ssa_name (*tp, id);
5323       *walk_subtrees = 0;
5324       if (is_lhs)
5325 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5326     }
5327   /* Only a local declaration (variable or label).  */
5328   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5329 	   || TREE_CODE (expr) == LABEL_DECL)
5330     {
5331       /* Lookup the declaration.  */
5332       n = st->get (expr);
5333 
5334       /* If it's there, remap it.  */
5335       if (n)
5336 	*tp = *n;
5337       *walk_subtrees = 0;
5338     }
5339   else if (TREE_CODE (expr) == STATEMENT_LIST
5340 	   || TREE_CODE (expr) == BIND_EXPR
5341 	   || TREE_CODE (expr) == SAVE_EXPR)
5342     gcc_unreachable ();
5343   else if (TREE_CODE (expr) == TARGET_EXPR)
5344     {
5345       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5346          It's OK for this to happen if it was part of a subtree that
5347          isn't immediately expanded, such as operand 2 of another
5348          TARGET_EXPR.  */
5349       if (!TREE_OPERAND (expr, 1))
5350 	{
5351 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5352 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5353 	}
5354     }
5355   else if (TREE_CODE (expr) == OMP_CLAUSE)
5356     {
5357       /* Before the omplower pass completes, some OMP clauses can contain
5358 	 sequences that are neither copied by gimple_seq_copy nor walked by
5359 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5360 	 in those situations, we have to copy and process them explicitely.  */
5361 
5362       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5363 	{
5364 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5365 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5366 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5367 	}
5368       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5369 	{
5370 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5371 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5372 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5373 	}
5374       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5375 	{
5376 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5377 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5378 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5379 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5380 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5381 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5382 	}
5383     }
5384 
5385   /* Keep iterating.  */
5386   return NULL_TREE;
5387 }
5388 
5389 
5390 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5391    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5392    remaps all local declarations to appropriate replacements in gimple
5393    statements. */
5394 
5395 static tree
5396 replace_locals_stmt (gimple_stmt_iterator *gsip,
5397 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5398 		     struct walk_stmt_info *wi)
5399 {
5400   copy_body_data *id = (copy_body_data *) wi->info;
5401   gimple *gs = gsi_stmt (*gsip);
5402 
5403   if (gbind *stmt = dyn_cast <gbind *> (gs))
5404     {
5405       tree block = gimple_bind_block (stmt);
5406 
5407       if (block)
5408 	{
5409 	  remap_block (&block, id);
5410 	  gimple_bind_set_block (stmt, block);
5411 	}
5412 
5413       /* This will remap a lot of the same decls again, but this should be
5414 	 harmless.  */
5415       if (gimple_bind_vars (stmt))
5416 	{
5417 	  tree old_var, decls = gimple_bind_vars (stmt);
5418 
5419 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5420 	    if (!can_be_nonlocal (old_var, id)
5421 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5422 	      remap_decl (old_var, id);
5423 
5424 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5425 	  id->prevent_decl_creation_for_types = true;
5426 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5427 	  id->prevent_decl_creation_for_types = false;
5428 	}
5429     }
5430 
5431   /* Keep iterating.  */
5432   return NULL_TREE;
5433 }
5434 
5435 /* Create a copy of SEQ and remap all decls in it.  */
5436 
5437 static gimple_seq
5438 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5439 {
5440   if (!seq)
5441     return NULL;
5442 
5443   /* If there are any labels in OMP sequences, they can be only referred to in
5444      the sequence itself and therefore we can do both here.  */
5445   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5446   gimple_seq copy = gimple_seq_copy (seq);
5447   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5448   return copy;
5449 }
5450 
5451 /* Copies everything in SEQ and replaces variables and labels local to
5452    current_function_decl.  */
5453 
5454 gimple_seq
5455 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5456 {
5457   copy_body_data id;
5458   struct walk_stmt_info wi;
5459   gimple_seq copy;
5460 
5461   /* There's nothing to do for NULL_TREE.  */
5462   if (seq == NULL)
5463     return seq;
5464 
5465   /* Set up ID.  */
5466   memset (&id, 0, sizeof (id));
5467   id.src_fn = current_function_decl;
5468   id.dst_fn = current_function_decl;
5469   id.src_cfun = cfun;
5470   id.decl_map = new hash_map<tree, tree>;
5471   id.debug_map = NULL;
5472 
5473   id.copy_decl = copy_decl_no_change;
5474   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5475   id.transform_new_cfg = false;
5476   id.transform_return_to_modify = false;
5477   id.transform_parameter = false;
5478   id.transform_lang_insert_block = NULL;
5479 
5480   /* Walk the tree once to find local labels.  */
5481   memset (&wi, 0, sizeof (wi));
5482   hash_set<tree> visited;
5483   wi.info = &id;
5484   wi.pset = &visited;
5485   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5486 
5487   copy = gimple_seq_copy (seq);
5488 
5489   /* Walk the copy, remapping decls.  */
5490   memset (&wi, 0, sizeof (wi));
5491   wi.info = &id;
5492   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5493 
5494   /* Clean up.  */
5495   delete id.decl_map;
5496   if (id.debug_map)
5497     delete id.debug_map;
5498   if (id.dependence_map)
5499     {
5500       delete id.dependence_map;
5501       id.dependence_map = NULL;
5502     }
5503 
5504   return copy;
5505 }
5506 
5507 
5508 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5509 
5510 static tree
5511 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5512 {
5513   if (*tp == data)
5514     return (tree) data;
5515   else
5516     return NULL;
5517 }
5518 
5519 DEBUG_FUNCTION bool
5520 debug_find_tree (tree top, tree search)
5521 {
5522   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5523 }
5524 
5525 
5526 /* Declare the variables created by the inliner.  Add all the variables in
5527    VARS to BIND_EXPR.  */
5528 
5529 static void
5530 declare_inline_vars (tree block, tree vars)
5531 {
5532   tree t;
5533   for (t = vars; t; t = DECL_CHAIN (t))
5534     {
5535       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5536       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5537       add_local_decl (cfun, t);
5538     }
5539 
5540   if (block)
5541     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5542 }
5543 
5544 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5545    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5546    VAR_DECL translation.  */
5547 
5548 tree
5549 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5550 {
5551   /* Don't generate debug information for the copy if we wouldn't have
5552      generated it for the copy either.  */
5553   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5554   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5555 
5556   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5557      declaration inspired this copy.  */
5558   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5559 
5560   /* The new variable/label has no RTL, yet.  */
5561   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5562       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5563     SET_DECL_RTL (copy, 0);
5564 
5565   /* These args would always appear unused, if not for this.  */
5566   TREE_USED (copy) = 1;
5567 
5568   /* Set the context for the new declaration.  */
5569   if (!DECL_CONTEXT (decl))
5570     /* Globals stay global.  */
5571     ;
5572   else if (DECL_CONTEXT (decl) != id->src_fn)
5573     /* Things that weren't in the scope of the function we're inlining
5574        from aren't in the scope we're inlining to, either.  */
5575     ;
5576   else if (TREE_STATIC (decl))
5577     /* Function-scoped static variables should stay in the original
5578        function.  */
5579     ;
5580   else
5581     {
5582       /* Ordinary automatic local variables are now in the scope of the
5583 	 new function.  */
5584       DECL_CONTEXT (copy) = id->dst_fn;
5585       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5586 	{
5587 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5588 	    DECL_ATTRIBUTES (copy)
5589 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5590 			   DECL_ATTRIBUTES (copy));
5591 	  id->dst_simt_vars->safe_push (copy);
5592 	}
5593     }
5594 
5595   return copy;
5596 }
5597 
5598 static tree
5599 copy_decl_to_var (tree decl, copy_body_data *id)
5600 {
5601   tree copy, type;
5602 
5603   gcc_assert (TREE_CODE (decl) == PARM_DECL
5604 	      || TREE_CODE (decl) == RESULT_DECL);
5605 
5606   type = TREE_TYPE (decl);
5607 
5608   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5609 		     VAR_DECL, DECL_NAME (decl), type);
5610   if (DECL_PT_UID_SET_P (decl))
5611     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5612   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5613   TREE_READONLY (copy) = TREE_READONLY (decl);
5614   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5615   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5616 
5617   return copy_decl_for_dup_finish (id, decl, copy);
5618 }
5619 
5620 /* Like copy_decl_to_var, but create a return slot object instead of a
5621    pointer variable for return by invisible reference.  */
5622 
5623 static tree
5624 copy_result_decl_to_var (tree decl, copy_body_data *id)
5625 {
5626   tree copy, type;
5627 
5628   gcc_assert (TREE_CODE (decl) == PARM_DECL
5629 	      || TREE_CODE (decl) == RESULT_DECL);
5630 
5631   type = TREE_TYPE (decl);
5632   if (DECL_BY_REFERENCE (decl))
5633     type = TREE_TYPE (type);
5634 
5635   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5636 		     VAR_DECL, DECL_NAME (decl), type);
5637   if (DECL_PT_UID_SET_P (decl))
5638     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5639   TREE_READONLY (copy) = TREE_READONLY (decl);
5640   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5641   if (!DECL_BY_REFERENCE (decl))
5642     {
5643       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5644       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5645     }
5646 
5647   return copy_decl_for_dup_finish (id, decl, copy);
5648 }
5649 
5650 tree
5651 copy_decl_no_change (tree decl, copy_body_data *id)
5652 {
5653   tree copy;
5654 
5655   copy = copy_node (decl);
5656 
5657   /* The COPY is not abstract; it will be generated in DST_FN.  */
5658   DECL_ABSTRACT_P (copy) = false;
5659   lang_hooks.dup_lang_specific_decl (copy);
5660 
5661   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5662      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5663   if (TREE_CODE (copy) == LABEL_DECL)
5664     {
5665       TREE_ADDRESSABLE (copy) = 0;
5666       LABEL_DECL_UID (copy) = -1;
5667     }
5668 
5669   return copy_decl_for_dup_finish (id, decl, copy);
5670 }
5671 
5672 static tree
5673 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5674 {
5675   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5676     return copy_decl_to_var (decl, id);
5677   else
5678     return copy_decl_no_change (decl, id);
5679 }
5680 
5681 /* Return a copy of the function's argument tree.  */
5682 static tree
5683 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5684 			       bitmap args_to_skip, tree *vars)
5685 {
5686   tree arg, *parg;
5687   tree new_parm = NULL;
5688   int i = 0;
5689 
5690   parg = &new_parm;
5691 
5692   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5693     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5694       {
5695         tree new_tree = remap_decl (arg, id);
5696 	if (TREE_CODE (new_tree) != PARM_DECL)
5697 	  new_tree = id->copy_decl (arg, id);
5698         lang_hooks.dup_lang_specific_decl (new_tree);
5699         *parg = new_tree;
5700 	parg = &DECL_CHAIN (new_tree);
5701       }
5702     else if (!id->decl_map->get (arg))
5703       {
5704 	/* Make an equivalent VAR_DECL.  If the argument was used
5705 	   as temporary variable later in function, the uses will be
5706 	   replaced by local variable.  */
5707 	tree var = copy_decl_to_var (arg, id);
5708 	insert_decl_map (id, arg, var);
5709         /* Declare this new variable.  */
5710         DECL_CHAIN (var) = *vars;
5711         *vars = var;
5712       }
5713   return new_parm;
5714 }
5715 
5716 /* Return a copy of the function's static chain.  */
5717 static tree
5718 copy_static_chain (tree static_chain, copy_body_data * id)
5719 {
5720   tree *chain_copy, *pvar;
5721 
5722   chain_copy = &static_chain;
5723   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5724     {
5725       tree new_tree = remap_decl (*pvar, id);
5726       lang_hooks.dup_lang_specific_decl (new_tree);
5727       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5728       *pvar = new_tree;
5729     }
5730   return static_chain;
5731 }
5732 
5733 /* Return true if the function is allowed to be versioned.
5734    This is a guard for the versioning functionality.  */
5735 
5736 bool
5737 tree_versionable_function_p (tree fndecl)
5738 {
5739   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5740 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5741 }
5742 
5743 /* Delete all unreachable basic blocks and update callgraph.
5744    Doing so is somewhat nontrivial because we need to update all clones and
5745    remove inline function that become unreachable.  */
5746 
5747 static bool
5748 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5749 {
5750   bool changed = false;
5751   basic_block b, next_bb;
5752 
5753   find_unreachable_blocks ();
5754 
5755   /* Delete all unreachable basic blocks.  */
5756 
5757   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5758        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5759     {
5760       next_bb = b->next_bb;
5761 
5762       if (!(b->flags & BB_REACHABLE))
5763 	{
5764           gimple_stmt_iterator bsi;
5765 
5766           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5767 	    {
5768 	      struct cgraph_edge *e;
5769 	      struct cgraph_node *node;
5770 
5771 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5772 
5773 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5774 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5775 		{
5776 		  if (!e->inline_failed)
5777 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5778 		  else
5779 		    e->remove ();
5780 		}
5781 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5782 		  && id->dst_node->clones)
5783 		for (node = id->dst_node->clones; node != id->dst_node;)
5784 		  {
5785 		    node->remove_stmt_references (gsi_stmt (bsi));
5786 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5787 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5788 		      {
5789 			if (!e->inline_failed)
5790 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5791 			else
5792 			  e->remove ();
5793 		      }
5794 
5795 		    if (node->clones)
5796 		      node = node->clones;
5797 		    else if (node->next_sibling_clone)
5798 		      node = node->next_sibling_clone;
5799 		    else
5800 		      {
5801 			while (node != id->dst_node && !node->next_sibling_clone)
5802 			  node = node->clone_of;
5803 			if (node != id->dst_node)
5804 			  node = node->next_sibling_clone;
5805 		      }
5806 		  }
5807 	    }
5808 	  delete_basic_block (b);
5809 	  changed = true;
5810 	}
5811     }
5812 
5813   return changed;
5814 }
5815 
5816 /* Update clone info after duplication.  */
5817 
5818 static void
5819 update_clone_info (copy_body_data * id)
5820 {
5821   struct cgraph_node *node;
5822   if (!id->dst_node->clones)
5823     return;
5824   for (node = id->dst_node->clones; node != id->dst_node;)
5825     {
5826       /* First update replace maps to match the new body.  */
5827       if (node->clone.tree_map)
5828         {
5829 	  unsigned int i;
5830           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5831 	    {
5832 	      struct ipa_replace_map *replace_info;
5833 	      replace_info = (*node->clone.tree_map)[i];
5834 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5835 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5836 	    }
5837 	}
5838       if (node->clones)
5839 	node = node->clones;
5840       else if (node->next_sibling_clone)
5841 	node = node->next_sibling_clone;
5842       else
5843 	{
5844 	  while (node != id->dst_node && !node->next_sibling_clone)
5845 	    node = node->clone_of;
5846 	  if (node != id->dst_node)
5847 	    node = node->next_sibling_clone;
5848 	}
5849     }
5850 }
5851 
5852 /* Create a copy of a function's tree.
5853    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5854    of the original function and the new copied function
5855    respectively.  In case we want to replace a DECL
5856    tree with another tree while duplicating the function's
5857    body, TREE_MAP represents the mapping between these
5858    trees. If UPDATE_CLONES is set, the call_stmt fields
5859    of edges of clones of the function will be updated.
5860 
5861    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5862    from new version.
5863    If SKIP_RETURN is true, the new version will return void.
5864    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5865    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5866 */
5867 void
5868 tree_function_versioning (tree old_decl, tree new_decl,
5869 			  vec<ipa_replace_map *, va_gc> *tree_map,
5870 			  bool update_clones, bitmap args_to_skip,
5871 			  bool skip_return, bitmap blocks_to_copy,
5872 			  basic_block new_entry)
5873 {
5874   struct cgraph_node *old_version_node;
5875   struct cgraph_node *new_version_node;
5876   copy_body_data id;
5877   tree p;
5878   unsigned i;
5879   struct ipa_replace_map *replace_info;
5880   basic_block old_entry_block, bb;
5881   auto_vec<gimple *, 10> init_stmts;
5882   tree vars = NULL_TREE;
5883   bitmap debug_args_to_skip = args_to_skip;
5884 
5885   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5886 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5887   DECL_POSSIBLY_INLINED (old_decl) = 1;
5888 
5889   old_version_node = cgraph_node::get (old_decl);
5890   gcc_checking_assert (old_version_node);
5891   new_version_node = cgraph_node::get (new_decl);
5892   gcc_checking_assert (new_version_node);
5893 
5894   /* Copy over debug args.  */
5895   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5896     {
5897       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5898       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5899       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5900       old_debug_args = decl_debug_args_lookup (old_decl);
5901       if (old_debug_args)
5902 	{
5903 	  new_debug_args = decl_debug_args_insert (new_decl);
5904 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5905 	}
5906     }
5907 
5908   /* Output the inlining info for this abstract function, since it has been
5909      inlined.  If we don't do this now, we can lose the information about the
5910      variables in the function when the blocks get blown away as soon as we
5911      remove the cgraph node.  */
5912   (*debug_hooks->outlining_inline_function) (old_decl);
5913 
5914   DECL_ARTIFICIAL (new_decl) = 1;
5915   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5916   if (DECL_ORIGIN (old_decl) == old_decl)
5917     old_version_node->used_as_abstract_origin = true;
5918   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5919 
5920   /* Prepare the data structures for the tree copy.  */
5921   memset (&id, 0, sizeof (id));
5922 
5923   /* Generate a new name for the new version. */
5924   id.statements_to_fold = new hash_set<gimple *>;
5925 
5926   id.decl_map = new hash_map<tree, tree>;
5927   id.debug_map = NULL;
5928   id.src_fn = old_decl;
5929   id.dst_fn = new_decl;
5930   id.src_node = old_version_node;
5931   id.dst_node = new_version_node;
5932   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5933   id.blocks_to_copy = blocks_to_copy;
5934 
5935   id.copy_decl = copy_decl_no_change;
5936   id.transform_call_graph_edges
5937     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5938   id.transform_new_cfg = true;
5939   id.transform_return_to_modify = false;
5940   id.transform_parameter = false;
5941   id.transform_lang_insert_block = NULL;
5942 
5943   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5944     (DECL_STRUCT_FUNCTION (old_decl));
5945   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5946   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5947   initialize_cfun (new_decl, old_decl,
5948 		   old_entry_block->count);
5949   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5950     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5951       = id.src_cfun->gimple_df->ipa_pta;
5952 
5953   /* Copy the function's static chain.  */
5954   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5955   if (p)
5956     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5957       = copy_static_chain (p, &id);
5958 
5959   /* If there's a tree_map, prepare for substitution.  */
5960   if (tree_map)
5961     for (i = 0; i < tree_map->length (); i++)
5962       {
5963 	gimple *init;
5964 	replace_info = (*tree_map)[i];
5965 	if (replace_info->replace_p)
5966 	  {
5967 	    int parm_num = -1;
5968 	    if (!replace_info->old_tree)
5969 	      {
5970 		int p = replace_info->parm_num;
5971 		tree parm;
5972 		tree req_type, new_type;
5973 
5974 		for (parm = DECL_ARGUMENTS (old_decl); p;
5975 		     parm = DECL_CHAIN (parm))
5976 		  p--;
5977 		replace_info->old_tree = parm;
5978 		parm_num = replace_info->parm_num;
5979 		req_type = TREE_TYPE (parm);
5980 		new_type = TREE_TYPE (replace_info->new_tree);
5981 		if (!useless_type_conversion_p (req_type, new_type))
5982 		  {
5983 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5984 		      replace_info->new_tree
5985 			= fold_build1 (NOP_EXPR, req_type,
5986 				       replace_info->new_tree);
5987 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5988 		      replace_info->new_tree
5989 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
5990 				       replace_info->new_tree);
5991 		    else
5992 		      {
5993 			if (dump_file)
5994 			  {
5995 			    fprintf (dump_file, "    const ");
5996 			    print_generic_expr (dump_file,
5997 						replace_info->new_tree, 0);
5998 			    fprintf (dump_file,
5999 				     "  can't be converted to param ");
6000 			    print_generic_expr (dump_file, parm, 0);
6001 			    fprintf (dump_file, "\n");
6002 			  }
6003 			replace_info->old_tree = NULL;
6004 		      }
6005 		  }
6006 	      }
6007 	    else
6008 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6009 	    if (replace_info->old_tree)
6010 	      {
6011 		init = setup_one_parameter (&id, replace_info->old_tree,
6012 					    replace_info->new_tree, id.src_fn,
6013 					    NULL,
6014 					    &vars);
6015 		if (init)
6016 		  init_stmts.safe_push (init);
6017 		if (MAY_HAVE_DEBUG_STMTS && args_to_skip)
6018 		  {
6019 		    if (parm_num == -1)
6020 		      {
6021 			tree parm;
6022 			int p;
6023 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6024 			     parm = DECL_CHAIN (parm), p++)
6025 			  if (parm == replace_info->old_tree)
6026 			    {
6027 			      parm_num = p;
6028 			      break;
6029 			    }
6030 		      }
6031 		    if (parm_num != -1)
6032 		      {
6033 			if (debug_args_to_skip == args_to_skip)
6034 			  {
6035 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
6036 			    bitmap_copy (debug_args_to_skip, args_to_skip);
6037 			  }
6038 			bitmap_clear_bit (debug_args_to_skip, parm_num);
6039 		      }
6040 		  }
6041 	      }
6042 	  }
6043       }
6044   /* Copy the function's arguments.  */
6045   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6046     DECL_ARGUMENTS (new_decl)
6047       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6048 				       args_to_skip, &vars);
6049 
6050   /* Remove omp declare simd attribute from the new attributes.  */
6051   if (tree a = lookup_attribute ("omp declare simd",
6052 				 DECL_ATTRIBUTES (new_decl)))
6053     {
6054       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
6055 	a = a2;
6056       a = TREE_CHAIN (a);
6057       for (tree *p = &DECL_ATTRIBUTES (new_decl); *p != a;)
6058 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
6059 	  *p = TREE_CHAIN (*p);
6060 	else
6061 	  {
6062 	    tree chain = TREE_CHAIN (*p);
6063 	    *p = copy_node (*p);
6064 	    p = &TREE_CHAIN (*p);
6065 	    *p = chain;
6066 	  }
6067     }
6068 
6069   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6070   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6071 
6072   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6073 
6074   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6075     /* Add local vars.  */
6076     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6077 
6078   if (DECL_RESULT (old_decl) == NULL_TREE)
6079     ;
6080   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6081     {
6082       DECL_RESULT (new_decl)
6083 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6084 		      RESULT_DECL, NULL_TREE, void_type_node);
6085       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6086       cfun->returns_struct = 0;
6087       cfun->returns_pcc_struct = 0;
6088     }
6089   else
6090     {
6091       tree old_name;
6092       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6093       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6094       if (gimple_in_ssa_p (id.src_cfun)
6095 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6096 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6097 	{
6098 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6099 	  insert_decl_map (&id, old_name, new_name);
6100 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6101 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6102 	}
6103     }
6104 
6105   /* Set up the destination functions loop tree.  */
6106   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6107     {
6108       cfun->curr_properties &= ~PROP_loops;
6109       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6110       cfun->curr_properties |= PROP_loops;
6111     }
6112 
6113   /* Copy the Function's body.  */
6114   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
6115 	     ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6116 	     new_entry);
6117 
6118   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6119   number_blocks (new_decl);
6120 
6121   /* We want to create the BB unconditionally, so that the addition of
6122      debug stmts doesn't affect BB count, which may in the end cause
6123      codegen differences.  */
6124   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6125   while (init_stmts.length ())
6126     insert_init_stmt (&id, bb, init_stmts.pop ());
6127   update_clone_info (&id);
6128 
6129   /* Remap the nonlocal_goto_save_area, if any.  */
6130   if (cfun->nonlocal_goto_save_area)
6131     {
6132       struct walk_stmt_info wi;
6133 
6134       memset (&wi, 0, sizeof (wi));
6135       wi.info = &id;
6136       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6137     }
6138 
6139   /* Clean up.  */
6140   delete id.decl_map;
6141   if (id.debug_map)
6142     delete id.debug_map;
6143   free_dominance_info (CDI_DOMINATORS);
6144   free_dominance_info (CDI_POST_DOMINATORS);
6145 
6146   fold_marked_statements (0, id.statements_to_fold);
6147   delete id.statements_to_fold;
6148   delete_unreachable_blocks_update_callgraph (&id);
6149   if (id.dst_node->definition)
6150     cgraph_edge::rebuild_references ();
6151   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6152     {
6153       calculate_dominance_info (CDI_DOMINATORS);
6154       fix_loop_structure (NULL);
6155     }
6156   update_ssa (TODO_update_ssa);
6157 
6158   /* After partial cloning we need to rescale frequencies, so they are
6159      within proper range in the cloned function.  */
6160   if (new_entry)
6161     {
6162       struct cgraph_edge *e;
6163       rebuild_frequencies ();
6164 
6165       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6166       for (e = new_version_node->callees; e; e = e->next_callee)
6167 	{
6168 	  basic_block bb = gimple_bb (e->call_stmt);
6169 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
6170 							 bb);
6171 	  e->count = bb->count;
6172 	}
6173       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6174 	{
6175 	  basic_block bb = gimple_bb (e->call_stmt);
6176 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
6177 							 bb);
6178 	  e->count = bb->count;
6179 	}
6180     }
6181 
6182   if (debug_args_to_skip && MAY_HAVE_DEBUG_STMTS)
6183     {
6184       tree parm;
6185       vec<tree, va_gc> **debug_args = NULL;
6186       unsigned int len = 0;
6187       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6188 	   parm; parm = DECL_CHAIN (parm), i++)
6189 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6190 	  {
6191 	    tree ddecl;
6192 
6193 	    if (debug_args == NULL)
6194 	      {
6195 		debug_args = decl_debug_args_insert (new_decl);
6196 		len = vec_safe_length (*debug_args);
6197 	      }
6198 	    ddecl = make_node (DEBUG_EXPR_DECL);
6199 	    DECL_ARTIFICIAL (ddecl) = 1;
6200 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6201 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
6202 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6203 	    vec_safe_push (*debug_args, ddecl);
6204 	  }
6205       if (debug_args != NULL)
6206 	{
6207 	  /* On the callee side, add
6208 	     DEBUG D#Y s=> parm
6209 	     DEBUG var => D#Y
6210 	     stmts to the first bb where var is a VAR_DECL created for the
6211 	     optimized away parameter in DECL_INITIAL block.  This hints
6212 	     in the debug info that var (whole DECL_ORIGIN is the parm
6213 	     PARM_DECL) is optimized away, but could be looked up at the
6214 	     call site as value of D#X there.  */
6215 	  tree var = vars, vexpr;
6216 	  gimple_stmt_iterator cgsi
6217 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6218 	  gimple *def_temp;
6219 	  var = vars;
6220 	  i = vec_safe_length (*debug_args);
6221 	  do
6222 	    {
6223 	      i -= 2;
6224 	      while (var != NULL_TREE
6225 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6226 		var = TREE_CHAIN (var);
6227 	      if (var == NULL_TREE)
6228 		break;
6229 	      vexpr = make_node (DEBUG_EXPR_DECL);
6230 	      parm = (**debug_args)[i];
6231 	      DECL_ARTIFICIAL (vexpr) = 1;
6232 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6233 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6234 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6235 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6236 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6237 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6238 	    }
6239 	  while (i > len);
6240 	}
6241     }
6242 
6243   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6244     BITMAP_FREE (debug_args_to_skip);
6245   free_dominance_info (CDI_DOMINATORS);
6246   free_dominance_info (CDI_POST_DOMINATORS);
6247 
6248   gcc_assert (!id.debug_stmts.exists ());
6249   pop_cfun ();
6250   return;
6251 }
6252 
6253 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6254    the callee and return the inlined body on success.  */
6255 
6256 tree
6257 maybe_inline_call_in_expr (tree exp)
6258 {
6259   tree fn = get_callee_fndecl (exp);
6260 
6261   /* We can only try to inline "const" functions.  */
6262   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6263     {
6264       call_expr_arg_iterator iter;
6265       copy_body_data id;
6266       tree param, arg, t;
6267       hash_map<tree, tree> decl_map;
6268 
6269       /* Remap the parameters.  */
6270       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6271 	   param;
6272 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6273 	decl_map.put (param, arg);
6274 
6275       memset (&id, 0, sizeof (id));
6276       id.src_fn = fn;
6277       id.dst_fn = current_function_decl;
6278       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6279       id.decl_map = &decl_map;
6280 
6281       id.copy_decl = copy_decl_no_change;
6282       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6283       id.transform_new_cfg = false;
6284       id.transform_return_to_modify = true;
6285       id.transform_parameter = true;
6286       id.transform_lang_insert_block = NULL;
6287 
6288       /* Make sure not to unshare trees behind the front-end's back
6289 	 since front-end specific mechanisms may rely on sharing.  */
6290       id.regimplify = false;
6291       id.do_not_unshare = true;
6292 
6293       /* We're not inside any EH region.  */
6294       id.eh_lp_nr = 0;
6295 
6296       t = copy_tree_body (&id);
6297 
6298       /* We can only return something suitable for use in a GENERIC
6299 	 expression tree.  */
6300       if (TREE_CODE (t) == MODIFY_EXPR)
6301 	return TREE_OPERAND (t, 1);
6302     }
6303 
6304    return NULL_TREE;
6305 }
6306 
6307 /* Duplicate a type, fields and all.  */
6308 
6309 tree
6310 build_duplicate_type (tree type)
6311 {
6312   struct copy_body_data id;
6313 
6314   memset (&id, 0, sizeof (id));
6315   id.src_fn = current_function_decl;
6316   id.dst_fn = current_function_decl;
6317   id.src_cfun = cfun;
6318   id.decl_map = new hash_map<tree, tree>;
6319   id.debug_map = NULL;
6320   id.copy_decl = copy_decl_no_change;
6321 
6322   type = remap_type_1 (type, &id);
6323 
6324   delete id.decl_map;
6325   if (id.debug_map)
6326     delete id.debug_map;
6327 
6328   TYPE_CANONICAL (type) = type;
6329 
6330   return type;
6331 }
6332 
6333 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6334    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6335    evaluation.  */
6336 
6337 tree
6338 copy_fn (tree fn, tree& parms, tree& result)
6339 {
6340   copy_body_data id;
6341   tree param;
6342   hash_map<tree, tree> decl_map;
6343 
6344   tree *p = &parms;
6345   *p = NULL_TREE;
6346 
6347   memset (&id, 0, sizeof (id));
6348   id.src_fn = fn;
6349   id.dst_fn = current_function_decl;
6350   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6351   id.decl_map = &decl_map;
6352 
6353   id.copy_decl = copy_decl_no_change;
6354   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6355   id.transform_new_cfg = false;
6356   id.transform_return_to_modify = false;
6357   id.transform_parameter = true;
6358   id.transform_lang_insert_block = NULL;
6359 
6360   /* Make sure not to unshare trees behind the front-end's back
6361      since front-end specific mechanisms may rely on sharing.  */
6362   id.regimplify = false;
6363   id.do_not_unshare = true;
6364 
6365   /* We're not inside any EH region.  */
6366   id.eh_lp_nr = 0;
6367 
6368   /* Remap the parameters and result and return them to the caller.  */
6369   for (param = DECL_ARGUMENTS (fn);
6370        param;
6371        param = DECL_CHAIN (param))
6372     {
6373       *p = remap_decl (param, &id);
6374       p = &DECL_CHAIN (*p);
6375     }
6376 
6377   if (DECL_RESULT (fn))
6378     result = remap_decl (DECL_RESULT (fn), &id);
6379   else
6380     result = NULL_TREE;
6381 
6382   return copy_tree_body (&id);
6383 }
6384