xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1 /* Tree inlining.
2    Copyright (C) 2001-2016 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "tree-chkp.h"
60 
61 
62 /* I'm not real happy about this, but we need to handle gimple and
63    non-gimple trees.  */
64 
65 /* Inlining, Cloning, Versioning, Parallelization
66 
67    Inlining: a function body is duplicated, but the PARM_DECLs are
68    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
69    MODIFY_EXPRs that store to a dedicated returned-value variable.
70    The duplicated eh_region info of the copy will later be appended
71    to the info for the caller; the eh_region info in copied throwing
72    statements and RESX statements are adjusted accordingly.
73 
74    Cloning: (only in C++) We have one body for a con/de/structor, and
75    multiple function decls, each with a unique parameter list.
76    Duplicate the body, using the given splay tree; some parameters
77    will become constants (like 0 or 1).
78 
79    Versioning: a function body is duplicated and the result is a new
80    function rather than into blocks of an existing function as with
81    inlining.  Some parameters will become constants.
82 
83    Parallelization: a region of a function is duplicated resulting in
84    a new function.  Variables may be replaced with complex expressions
85    to enable shared variable semantics.
86 
87    All of these will simultaneously lookup any callgraph edges.  If
88    we're going to inline the duplicated function body, and the given
89    function has some cloned callgraph nodes (one for each place this
90    function will be inlined) those callgraph edges will be duplicated.
91    If we're cloning the body, those callgraph edges will be
92    updated to point into the new body.  (Note that the original
93    callgraph node and edge list will not be altered.)
94 
95    See the CALL_EXPR handling case in copy_tree_body_r ().  */
96 
97 /* To Do:
98 
99    o In order to make inlining-on-trees work, we pessimized
100      function-local static constants.  In particular, they are now
101      always output, even when not addressed.  Fix this by treating
102      function-local static constants just like global static
103      constants; the back-end already knows not to output them if they
104      are not needed.
105 
106    o Provide heuristics to clamp inlining of recursive template
107      calls?  */
108 
109 
110 /* Weights that estimate_num_insns uses to estimate the size of the
111    produced code.  */
112 
113 eni_weights eni_size_weights;
114 
115 /* Weights that estimate_num_insns uses to estimate the time necessary
116    to execute the produced code.  */
117 
118 eni_weights eni_time_weights;
119 
120 /* Prototypes.  */
121 
122 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
123 				     basic_block);
124 static void remap_block (tree *, copy_body_data *);
125 static void copy_bind_expr (tree *, int *, copy_body_data *);
126 static void declare_inline_vars (tree, tree);
127 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
128 static void prepend_lexical_block (tree current_block, tree new_block);
129 static tree copy_decl_to_var (tree, copy_body_data *);
130 static tree copy_result_decl_to_var (tree, copy_body_data *);
131 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
132 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
133 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
134 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
135 
136 /* Insert a tree->tree mapping for ID.  Despite the name suggests
137    that the trees should be variables, it is used for more than that.  */
138 
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
141 {
142   id->decl_map->put (key, value);
143 
144   /* Always insert an identity map as well.  If we see this same new
145      node again, we won't want to duplicate it a second time.  */
146   if (key != value)
147     id->decl_map->put (value, value);
148 }
149 
150 /* Insert a tree->tree mapping for ID.  This is only used for
151    variables.  */
152 
153 static void
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
155 {
156   if (!gimple_in_ssa_p (id->src_cfun))
157     return;
158 
159   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
160     return;
161 
162   if (!target_for_debug_bind (key))
163     return;
164 
165   gcc_assert (TREE_CODE (key) == PARM_DECL);
166   gcc_assert (TREE_CODE (value) == VAR_DECL);
167 
168   if (!id->debug_map)
169     id->debug_map = new hash_map<tree, tree>;
170 
171   id->debug_map->put (key, value);
172 }
173 
174 /* If nonzero, we're remapping the contents of inlined debug
175    statements.  If negative, an error has occurred, such as a
176    reference to a variable that isn't available in the inlined
177    context.  */
178 static int processing_debug_stmt = 0;
179 
180 /* Construct new SSA name for old NAME. ID is the inline context.  */
181 
182 static tree
183 remap_ssa_name (tree name, copy_body_data *id)
184 {
185   tree new_tree, var;
186   tree *n;
187 
188   gcc_assert (TREE_CODE (name) == SSA_NAME);
189 
190   n = id->decl_map->get (name);
191   if (n)
192     return unshare_expr (*n);
193 
194   if (processing_debug_stmt)
195     {
196       if (SSA_NAME_IS_DEFAULT_DEF (name)
197 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
198 	  && id->entry_bb == NULL
199 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
200 	{
201 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
202 	  gimple *def_temp;
203 	  gimple_stmt_iterator gsi;
204 	  tree val = SSA_NAME_VAR (name);
205 
206 	  n = id->decl_map->get (val);
207 	  if (n != NULL)
208 	    val = *n;
209 	  if (TREE_CODE (val) != PARM_DECL)
210 	    {
211 	      processing_debug_stmt = -1;
212 	      return name;
213 	    }
214 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
215 	  DECL_ARTIFICIAL (vexpr) = 1;
216 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
217 	  DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
218 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
219 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
220 	  return vexpr;
221 	}
222 
223       processing_debug_stmt = -1;
224       return name;
225     }
226 
227   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
228   var = SSA_NAME_VAR (name);
229   if (!var
230       || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 	  && TREE_CODE (var) == VAR_DECL
232 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 	  && DECL_ARTIFICIAL (var)
234 	  && DECL_IGNORED_P (var)
235 	  && !DECL_NAME (var)))
236     {
237       struct ptr_info_def *pi;
238       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239       if (!var && SSA_NAME_IDENTIFIER (name))
240 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241       insert_decl_map (id, name, new_tree);
242       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244       /* At least IPA points-to info can be directly transferred.  */
245       if (id->src_cfun->gimple_df
246 	  && id->src_cfun->gimple_df->ipa_pta
247 	  && (pi = SSA_NAME_PTR_INFO (name))
248 	  && !pi->pt.anything)
249 	{
250 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
251 	  new_pi->pt = pi->pt;
252 	}
253       return new_tree;
254     }
255 
256   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
257      in copy_bb.  */
258   new_tree = remap_decl (var, id);
259 
260   /* We might've substituted constant or another SSA_NAME for
261      the variable.
262 
263      Replace the SSA name representing RESULT_DECL by variable during
264      inlining:  this saves us from need to introduce PHI node in a case
265      return value is just partly initialized.  */
266   if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
267       && (!SSA_NAME_VAR (name)
268 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
269 	  || !id->transform_return_to_modify))
270     {
271       struct ptr_info_def *pi;
272       new_tree = make_ssa_name (new_tree);
273       insert_decl_map (id, name, new_tree);
274       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
275 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
276       /* At least IPA points-to info can be directly transferred.  */
277       if (id->src_cfun->gimple_df
278 	  && id->src_cfun->gimple_df->ipa_pta
279 	  && (pi = SSA_NAME_PTR_INFO (name))
280 	  && !pi->pt.anything)
281 	{
282 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
283 	  new_pi->pt = pi->pt;
284 	}
285       if (SSA_NAME_IS_DEFAULT_DEF (name))
286 	{
287 	  /* By inlining function having uninitialized variable, we might
288 	     extend the lifetime (variable might get reused).  This cause
289 	     ICE in the case we end up extending lifetime of SSA name across
290 	     abnormal edge, but also increase register pressure.
291 
292 	     We simply initialize all uninitialized vars by 0 except
293 	     for case we are inlining to very first BB.  We can avoid
294 	     this for all BBs that are not inside strongly connected
295 	     regions of the CFG, but this is expensive to test.  */
296 	  if (id->entry_bb
297 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
298 	      && (!SSA_NAME_VAR (name)
299 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
300 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
301 					     0)->dest
302 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
303 	    {
304 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
305 	      gimple *init_stmt;
306 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
307 
308 	      init_stmt = gimple_build_assign (new_tree, zero);
309 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
310 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
311 	    }
312 	  else
313 	    {
314 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
315 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
316 	    }
317 	}
318     }
319   else
320     insert_decl_map (id, name, new_tree);
321   return new_tree;
322 }
323 
324 /* Remap DECL during the copying of the BLOCK tree for the function.  */
325 
326 tree
327 remap_decl (tree decl, copy_body_data *id)
328 {
329   tree *n;
330 
331   /* We only remap local variables in the current function.  */
332 
333   /* See if we have remapped this declaration.  */
334 
335   n = id->decl_map->get (decl);
336 
337   if (!n && processing_debug_stmt)
338     {
339       processing_debug_stmt = -1;
340       return decl;
341     }
342 
343   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
344      necessary DECLs have already been remapped and we do not want to duplicate
345      a decl coming from outside of the sequence we are copying.  */
346   if (!n
347       && id->prevent_decl_creation_for_types
348       && id->remapping_type_depth > 0
349       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
350     return decl;
351 
352   /* If we didn't already have an equivalent for this declaration, create one
353      now.  */
354   if (!n)
355     {
356       /* Make a copy of the variable or label.  */
357       tree t = id->copy_decl (decl, id);
358 
359       /* Remember it, so that if we encounter this local entity again
360 	 we can reuse this copy.  Do this early because remap_type may
361 	 need this decl for TYPE_STUB_DECL.  */
362       insert_decl_map (id, decl, t);
363 
364       if (!DECL_P (t))
365 	return t;
366 
367       /* Remap types, if necessary.  */
368       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
369       if (TREE_CODE (t) == TYPE_DECL)
370         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
371 
372       /* Remap sizes as necessary.  */
373       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
374       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
375 
376       /* If fields, do likewise for offset and qualifier.  */
377       if (TREE_CODE (t) == FIELD_DECL)
378 	{
379 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
380 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
381 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
382 	}
383 
384       return t;
385     }
386 
387   if (id->do_not_unshare)
388     return *n;
389   else
390     return unshare_expr (*n);
391 }
392 
393 static tree
394 remap_type_1 (tree type, copy_body_data *id)
395 {
396   tree new_tree, t;
397 
398   /* We do need a copy.  build and register it now.  If this is a pointer or
399      reference type, remap the designated type and make a new pointer or
400      reference type.  */
401   if (TREE_CODE (type) == POINTER_TYPE)
402     {
403       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
404 					 TYPE_MODE (type),
405 					 TYPE_REF_CAN_ALIAS_ALL (type));
406       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
407 	new_tree = build_type_attribute_qual_variant (new_tree,
408 						      TYPE_ATTRIBUTES (type),
409 						      TYPE_QUALS (type));
410       insert_decl_map (id, type, new_tree);
411       return new_tree;
412     }
413   else if (TREE_CODE (type) == REFERENCE_TYPE)
414     {
415       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
416 					    TYPE_MODE (type),
417 					    TYPE_REF_CAN_ALIAS_ALL (type));
418       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
419 	new_tree = build_type_attribute_qual_variant (new_tree,
420 						      TYPE_ATTRIBUTES (type),
421 						      TYPE_QUALS (type));
422       insert_decl_map (id, type, new_tree);
423       return new_tree;
424     }
425   else
426     new_tree = copy_node (type);
427 
428   insert_decl_map (id, type, new_tree);
429 
430   /* This is a new type, not a copy of an old type.  Need to reassociate
431      variants.  We can handle everything except the main variant lazily.  */
432   t = TYPE_MAIN_VARIANT (type);
433   if (type != t)
434     {
435       t = remap_type (t, id);
436       TYPE_MAIN_VARIANT (new_tree) = t;
437       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
438       TYPE_NEXT_VARIANT (t) = new_tree;
439     }
440   else
441     {
442       TYPE_MAIN_VARIANT (new_tree) = new_tree;
443       TYPE_NEXT_VARIANT (new_tree) = NULL;
444     }
445 
446   if (TYPE_STUB_DECL (type))
447     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
448 
449   /* Lazily create pointer and reference types.  */
450   TYPE_POINTER_TO (new_tree) = NULL;
451   TYPE_REFERENCE_TO (new_tree) = NULL;
452 
453   /* Copy all types that may contain references to local variables; be sure to
454      preserve sharing in between type and its main variant when possible.  */
455   switch (TREE_CODE (new_tree))
456     {
457     case INTEGER_TYPE:
458     case REAL_TYPE:
459     case FIXED_POINT_TYPE:
460     case ENUMERAL_TYPE:
461     case BOOLEAN_TYPE:
462       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
463 	{
464 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
465 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
466 
467 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
468 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
469 	}
470       else
471 	{
472 	  t = TYPE_MIN_VALUE (new_tree);
473 	  if (t && TREE_CODE (t) != INTEGER_CST)
474 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
475 
476 	  t = TYPE_MAX_VALUE (new_tree);
477 	  if (t && TREE_CODE (t) != INTEGER_CST)
478 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
479 	}
480       return new_tree;
481 
482     case FUNCTION_TYPE:
483       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
484 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
485 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
486       else
487         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
488       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
489 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
490 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
491       else
492         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
493       return new_tree;
494 
495     case ARRAY_TYPE:
496       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
497 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
498 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
499       else
500 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
501 
502       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
503 	{
504 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
505 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
506 	}
507       else
508 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
509       break;
510 
511     case RECORD_TYPE:
512     case UNION_TYPE:
513     case QUAL_UNION_TYPE:
514       if (TYPE_MAIN_VARIANT (type) != type
515 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
516 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
517       else
518 	{
519 	  tree f, nf = NULL;
520 
521 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
522 	    {
523 	      t = remap_decl (f, id);
524 	      DECL_CONTEXT (t) = new_tree;
525 	      DECL_CHAIN (t) = nf;
526 	      nf = t;
527 	    }
528 	  TYPE_FIELDS (new_tree) = nreverse (nf);
529 	}
530       break;
531 
532     case OFFSET_TYPE:
533     default:
534       /* Shouldn't have been thought variable sized.  */
535       gcc_unreachable ();
536     }
537 
538   /* All variants of type share the same size, so use the already remaped data.  */
539   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
540     {
541       gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
542       gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
543 
544       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
545       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
546     }
547   else
548     {
549       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
550       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
551     }
552 
553   return new_tree;
554 }
555 
556 tree
557 remap_type (tree type, copy_body_data *id)
558 {
559   tree *node;
560   tree tmp;
561 
562   if (type == NULL)
563     return type;
564 
565   /* See if we have remapped this type.  */
566   node = id->decl_map->get (type);
567   if (node)
568     return *node;
569 
570   /* The type only needs remapping if it's variably modified.  */
571   if (! variably_modified_type_p (type, id->src_fn))
572     {
573       insert_decl_map (id, type, type);
574       return type;
575     }
576 
577   id->remapping_type_depth++;
578   tmp = remap_type_1 (type, id);
579   id->remapping_type_depth--;
580 
581   return tmp;
582 }
583 
584 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
585 
586 static bool
587 can_be_nonlocal (tree decl, copy_body_data *id)
588 {
589   /* We can not duplicate function decls.  */
590   if (TREE_CODE (decl) == FUNCTION_DECL)
591     return true;
592 
593   /* Local static vars must be non-local or we get multiple declaration
594      problems.  */
595   if (TREE_CODE (decl) == VAR_DECL
596       && !auto_var_in_fn_p (decl, id->src_fn))
597     return true;
598 
599   return false;
600 }
601 
602 static tree
603 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
604 	     copy_body_data *id)
605 {
606   tree old_var;
607   tree new_decls = NULL_TREE;
608 
609   /* Remap its variables.  */
610   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
611     {
612       tree new_var;
613 
614       if (can_be_nonlocal (old_var, id))
615 	{
616 	  /* We need to add this variable to the local decls as otherwise
617 	     nothing else will do so.  */
618 	  if (TREE_CODE (old_var) == VAR_DECL
619 	      && ! DECL_EXTERNAL (old_var)
620 	      && cfun)
621 	    add_local_decl (cfun, old_var);
622 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
623 	      && !DECL_IGNORED_P (old_var)
624 	      && nonlocalized_list)
625 	    vec_safe_push (*nonlocalized_list, old_var);
626 	  continue;
627 	}
628 
629       /* Remap the variable.  */
630       new_var = remap_decl (old_var, id);
631 
632       /* If we didn't remap this variable, we can't mess with its
633 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
634 	 already declared somewhere else, so don't declare it here.  */
635 
636       if (new_var == id->retvar)
637 	;
638       else if (!new_var)
639         {
640 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
641 	      && !DECL_IGNORED_P (old_var)
642 	      && nonlocalized_list)
643 	    vec_safe_push (*nonlocalized_list, old_var);
644 	}
645       else
646 	{
647 	  gcc_assert (DECL_P (new_var));
648 	  DECL_CHAIN (new_var) = new_decls;
649 	  new_decls = new_var;
650 
651 	  /* Also copy value-expressions.  */
652 	  if (TREE_CODE (new_var) == VAR_DECL
653 	      && DECL_HAS_VALUE_EXPR_P (new_var))
654 	    {
655 	      tree tem = DECL_VALUE_EXPR (new_var);
656 	      bool old_regimplify = id->regimplify;
657 	      id->remapping_type_depth++;
658 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
659 	      id->remapping_type_depth--;
660 	      id->regimplify = old_regimplify;
661 	      SET_DECL_VALUE_EXPR (new_var, tem);
662 	    }
663 	}
664     }
665 
666   return nreverse (new_decls);
667 }
668 
669 /* Copy the BLOCK to contain remapped versions of the variables
670    therein.  And hook the new block into the block-tree.  */
671 
672 static void
673 remap_block (tree *block, copy_body_data *id)
674 {
675   tree old_block;
676   tree new_block;
677 
678   /* Make the new block.  */
679   old_block = *block;
680   new_block = make_node (BLOCK);
681   TREE_USED (new_block) = TREE_USED (old_block);
682   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
683   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
684   BLOCK_NONLOCALIZED_VARS (new_block)
685     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
686   *block = new_block;
687 
688   /* Remap its variables.  */
689   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
690   					&BLOCK_NONLOCALIZED_VARS (new_block),
691 					id);
692 
693   if (id->transform_lang_insert_block)
694     id->transform_lang_insert_block (new_block);
695 
696   /* Remember the remapped block.  */
697   insert_decl_map (id, old_block, new_block);
698 }
699 
700 /* Copy the whole block tree and root it in id->block.  */
701 static tree
702 remap_blocks (tree block, copy_body_data *id)
703 {
704   tree t;
705   tree new_tree = block;
706 
707   if (!block)
708     return NULL;
709 
710   remap_block (&new_tree, id);
711   gcc_assert (new_tree != block);
712   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
713     prepend_lexical_block (new_tree, remap_blocks (t, id));
714   /* Blocks are in arbitrary order, but make things slightly prettier and do
715      not swap order when producing a copy.  */
716   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
717   return new_tree;
718 }
719 
720 /* Remap the block tree rooted at BLOCK to nothing.  */
721 static void
722 remap_blocks_to_null (tree block, copy_body_data *id)
723 {
724   tree t;
725   insert_decl_map (id, block, NULL_TREE);
726   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
727     remap_blocks_to_null (t, id);
728 }
729 
730 static void
731 copy_statement_list (tree *tp)
732 {
733   tree_stmt_iterator oi, ni;
734   tree new_tree;
735 
736   new_tree = alloc_stmt_list ();
737   ni = tsi_start (new_tree);
738   oi = tsi_start (*tp);
739   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
740   *tp = new_tree;
741 
742   for (; !tsi_end_p (oi); tsi_next (&oi))
743     {
744       tree stmt = tsi_stmt (oi);
745       if (TREE_CODE (stmt) == STATEMENT_LIST)
746 	/* This copy is not redundant; tsi_link_after will smash this
747 	   STATEMENT_LIST into the end of the one we're building, and we
748 	   don't want to do that with the original.  */
749 	copy_statement_list (&stmt);
750       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
751     }
752 }
753 
754 static void
755 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
756 {
757   tree block = BIND_EXPR_BLOCK (*tp);
758   /* Copy (and replace) the statement.  */
759   copy_tree_r (tp, walk_subtrees, NULL);
760   if (block)
761     {
762       remap_block (&block, id);
763       BIND_EXPR_BLOCK (*tp) = block;
764     }
765 
766   if (BIND_EXPR_VARS (*tp))
767     /* This will remap a lot of the same decls again, but this should be
768        harmless.  */
769     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
770 }
771 
772 
773 /* Create a new gimple_seq by remapping all the statements in BODY
774    using the inlining information in ID.  */
775 
776 static gimple_seq
777 remap_gimple_seq (gimple_seq body, copy_body_data *id)
778 {
779   gimple_stmt_iterator si;
780   gimple_seq new_body = NULL;
781 
782   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
783     {
784       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
785       gimple_seq_add_seq (&new_body, new_stmts);
786     }
787 
788   return new_body;
789 }
790 
791 
792 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
793    block using the mapping information in ID.  */
794 
795 static gimple *
796 copy_gimple_bind (gbind *stmt, copy_body_data *id)
797 {
798   gimple *new_bind;
799   tree new_block, new_vars;
800   gimple_seq body, new_body;
801 
802   /* Copy the statement.  Note that we purposely don't use copy_stmt
803      here because we need to remap statements as we copy.  */
804   body = gimple_bind_body (stmt);
805   new_body = remap_gimple_seq (body, id);
806 
807   new_block = gimple_bind_block (stmt);
808   if (new_block)
809     remap_block (&new_block, id);
810 
811   /* This will remap a lot of the same decls again, but this should be
812      harmless.  */
813   new_vars = gimple_bind_vars (stmt);
814   if (new_vars)
815     new_vars = remap_decls (new_vars, NULL, id);
816 
817   new_bind = gimple_build_bind (new_vars, new_body, new_block);
818 
819   return new_bind;
820 }
821 
822 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
823 
824 static bool
825 is_parm (tree decl)
826 {
827   if (TREE_CODE (decl) == SSA_NAME)
828     {
829       decl = SSA_NAME_VAR (decl);
830       if (!decl)
831 	return false;
832     }
833 
834   return (TREE_CODE (decl) == PARM_DECL);
835 }
836 
837 /* Remap the dependence CLIQUE from the source to the destination function
838    as specified in ID.  */
839 
840 static unsigned short
841 remap_dependence_clique (copy_body_data *id, unsigned short clique)
842 {
843   if (clique == 0 || processing_debug_stmt)
844     return 0;
845   if (!id->dependence_map)
846     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
847   bool existed;
848   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
849   if (!existed)
850     newc = ++cfun->last_clique;
851   return newc;
852 }
853 
854 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
855    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
856    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
857    recursing into the children nodes of *TP.  */
858 
859 static tree
860 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
861 {
862   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
863   copy_body_data *id = (copy_body_data *) wi_p->info;
864   tree fn = id->src_fn;
865 
866   if (TREE_CODE (*tp) == SSA_NAME)
867     {
868       *tp = remap_ssa_name (*tp, id);
869       *walk_subtrees = 0;
870       return NULL;
871     }
872   else if (auto_var_in_fn_p (*tp, fn))
873     {
874       /* Local variables and labels need to be replaced by equivalent
875 	 variables.  We don't want to copy static variables; there's
876 	 only one of those, no matter how many times we inline the
877 	 containing function.  Similarly for globals from an outer
878 	 function.  */
879       tree new_decl;
880 
881       /* Remap the declaration.  */
882       new_decl = remap_decl (*tp, id);
883       gcc_assert (new_decl);
884       /* Replace this variable with the copy.  */
885       STRIP_TYPE_NOPS (new_decl);
886       /* ???  The C++ frontend uses void * pointer zero to initialize
887          any other type.  This confuses the middle-end type verification.
888 	 As cloned bodies do not go through gimplification again the fixup
889 	 there doesn't trigger.  */
890       if (TREE_CODE (new_decl) == INTEGER_CST
891 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
892 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
893       *tp = new_decl;
894       *walk_subtrees = 0;
895     }
896   else if (TREE_CODE (*tp) == STATEMENT_LIST)
897     gcc_unreachable ();
898   else if (TREE_CODE (*tp) == SAVE_EXPR)
899     gcc_unreachable ();
900   else if (TREE_CODE (*tp) == LABEL_DECL
901 	   && (!DECL_CONTEXT (*tp)
902 	       || decl_function_context (*tp) == id->src_fn))
903     /* These may need to be remapped for EH handling.  */
904     *tp = remap_decl (*tp, id);
905   else if (TREE_CODE (*tp) == FIELD_DECL)
906     {
907       /* If the enclosing record type is variably_modified_type_p, the field
908 	 has already been remapped.  Otherwise, it need not be.  */
909       tree *n = id->decl_map->get (*tp);
910       if (n)
911 	*tp = *n;
912       *walk_subtrees = 0;
913     }
914   else if (TYPE_P (*tp))
915     /* Types may need remapping as well.  */
916     *tp = remap_type (*tp, id);
917   else if (CONSTANT_CLASS_P (*tp))
918     {
919       /* If this is a constant, we have to copy the node iff the type
920 	 will be remapped.  copy_tree_r will not copy a constant.  */
921       tree new_type = remap_type (TREE_TYPE (*tp), id);
922 
923       if (new_type == TREE_TYPE (*tp))
924 	*walk_subtrees = 0;
925 
926       else if (TREE_CODE (*tp) == INTEGER_CST)
927 	*tp = wide_int_to_tree (new_type, *tp);
928       else
929 	{
930 	  *tp = copy_node (*tp);
931 	  TREE_TYPE (*tp) = new_type;
932 	}
933     }
934   else
935     {
936       /* Otherwise, just copy the node.  Note that copy_tree_r already
937 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
938 
939       if (TREE_CODE (*tp) == MEM_REF)
940 	{
941 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
942 	     that can happen when a pointer argument is an ADDR_EXPR.
943 	     Recurse here manually to allow that.  */
944 	  tree ptr = TREE_OPERAND (*tp, 0);
945 	  tree type = remap_type (TREE_TYPE (*tp), id);
946 	  tree old = *tp;
947 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
948 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
949 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
950 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
951 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
952 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
953 	    {
954 	      MR_DEPENDENCE_CLIQUE (*tp)
955 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
956 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
957 	    }
958 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
959 	     remapped a parameter as the property might be valid only
960 	     for the parameter itself.  */
961 	  if (TREE_THIS_NOTRAP (old)
962 	      && (!is_parm (TREE_OPERAND (old, 0))
963 		  || (!id->transform_parameter && is_parm (ptr))))
964 	    TREE_THIS_NOTRAP (*tp) = 1;
965 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
966 	  *walk_subtrees = 0;
967 	  return NULL;
968 	}
969 
970       /* Here is the "usual case".  Copy this tree node, and then
971 	 tweak some special cases.  */
972       copy_tree_r (tp, walk_subtrees, NULL);
973 
974       if (TREE_CODE (*tp) != OMP_CLAUSE)
975 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
976 
977       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
978 	{
979 	  /* The copied TARGET_EXPR has never been expanded, even if the
980 	     original node was expanded already.  */
981 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
982 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
983 	}
984       else if (TREE_CODE (*tp) == ADDR_EXPR)
985 	{
986 	  /* Variable substitution need not be simple.  In particular,
987 	     the MEM_REF substitution above.  Make sure that
988 	     TREE_CONSTANT and friends are up-to-date.  */
989 	  int invariant = is_gimple_min_invariant (*tp);
990 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
991 	  recompute_tree_invariant_for_addr_expr (*tp);
992 
993 	  /* If this used to be invariant, but is not any longer,
994 	     then regimplification is probably needed.  */
995 	  if (invariant && !is_gimple_min_invariant (*tp))
996 	    id->regimplify = true;
997 
998 	  *walk_subtrees = 0;
999 	}
1000     }
1001 
1002   /* Update the TREE_BLOCK for the cloned expr.  */
1003   if (EXPR_P (*tp))
1004     {
1005       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1006       tree old_block = TREE_BLOCK (*tp);
1007       if (old_block)
1008 	{
1009 	  tree *n;
1010 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1011 	  if (n)
1012 	    new_block = *n;
1013 	}
1014       TREE_SET_BLOCK (*tp, new_block);
1015     }
1016 
1017   /* Keep iterating.  */
1018   return NULL_TREE;
1019 }
1020 
1021 
1022 /* Called from copy_body_id via walk_tree.  DATA is really a
1023    `copy_body_data *'.  */
1024 
1025 tree
1026 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1027 {
1028   copy_body_data *id = (copy_body_data *) data;
1029   tree fn = id->src_fn;
1030   tree new_block;
1031 
1032   /* Begin by recognizing trees that we'll completely rewrite for the
1033      inlining context.  Our output for these trees is completely
1034      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1035      into an edge).  Further down, we'll handle trees that get
1036      duplicated and/or tweaked.  */
1037 
1038   /* When requested, RETURN_EXPRs should be transformed to just the
1039      contained MODIFY_EXPR.  The branch semantics of the return will
1040      be handled elsewhere by manipulating the CFG rather than a statement.  */
1041   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1042     {
1043       tree assignment = TREE_OPERAND (*tp, 0);
1044 
1045       /* If we're returning something, just turn that into an
1046 	 assignment into the equivalent of the original RESULT_DECL.
1047 	 If the "assignment" is just the result decl, the result
1048 	 decl has already been set (e.g. a recent "foo (&result_decl,
1049 	 ...)"); just toss the entire RETURN_EXPR.  */
1050       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1051 	{
1052 	  /* Replace the RETURN_EXPR with (a copy of) the
1053 	     MODIFY_EXPR hanging underneath.  */
1054 	  *tp = copy_node (assignment);
1055 	}
1056       else /* Else the RETURN_EXPR returns no value.  */
1057 	{
1058 	  *tp = NULL;
1059 	  return (tree) (void *)1;
1060 	}
1061     }
1062   else if (TREE_CODE (*tp) == SSA_NAME)
1063     {
1064       *tp = remap_ssa_name (*tp, id);
1065       *walk_subtrees = 0;
1066       return NULL;
1067     }
1068 
1069   /* Local variables and labels need to be replaced by equivalent
1070      variables.  We don't want to copy static variables; there's only
1071      one of those, no matter how many times we inline the containing
1072      function.  Similarly for globals from an outer function.  */
1073   else if (auto_var_in_fn_p (*tp, fn))
1074     {
1075       tree new_decl;
1076 
1077       /* Remap the declaration.  */
1078       new_decl = remap_decl (*tp, id);
1079       gcc_assert (new_decl);
1080       /* Replace this variable with the copy.  */
1081       STRIP_TYPE_NOPS (new_decl);
1082       *tp = new_decl;
1083       *walk_subtrees = 0;
1084     }
1085   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1086     copy_statement_list (tp);
1087   else if (TREE_CODE (*tp) == SAVE_EXPR
1088 	   || TREE_CODE (*tp) == TARGET_EXPR)
1089     remap_save_expr (tp, id->decl_map, walk_subtrees);
1090   else if (TREE_CODE (*tp) == LABEL_DECL
1091 	   && (! DECL_CONTEXT (*tp)
1092 	       || decl_function_context (*tp) == id->src_fn))
1093     /* These may need to be remapped for EH handling.  */
1094     *tp = remap_decl (*tp, id);
1095   else if (TREE_CODE (*tp) == BIND_EXPR)
1096     copy_bind_expr (tp, walk_subtrees, id);
1097   /* Types may need remapping as well.  */
1098   else if (TYPE_P (*tp))
1099     *tp = remap_type (*tp, id);
1100 
1101   /* If this is a constant, we have to copy the node iff the type will be
1102      remapped.  copy_tree_r will not copy a constant.  */
1103   else if (CONSTANT_CLASS_P (*tp))
1104     {
1105       tree new_type = remap_type (TREE_TYPE (*tp), id);
1106 
1107       if (new_type == TREE_TYPE (*tp))
1108 	*walk_subtrees = 0;
1109 
1110       else if (TREE_CODE (*tp) == INTEGER_CST)
1111 	*tp = wide_int_to_tree (new_type, *tp);
1112       else
1113 	{
1114 	  *tp = copy_node (*tp);
1115 	  TREE_TYPE (*tp) = new_type;
1116 	}
1117     }
1118 
1119   /* Otherwise, just copy the node.  Note that copy_tree_r already
1120      knows not to copy VAR_DECLs, etc., so this is safe.  */
1121   else
1122     {
1123       /* Here we handle trees that are not completely rewritten.
1124 	 First we detect some inlining-induced bogosities for
1125 	 discarding.  */
1126       if (TREE_CODE (*tp) == MODIFY_EXPR
1127 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1128 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1129 	{
1130 	  /* Some assignments VAR = VAR; don't generate any rtl code
1131 	     and thus don't count as variable modification.  Avoid
1132 	     keeping bogosities like 0 = 0.  */
1133 	  tree decl = TREE_OPERAND (*tp, 0), value;
1134 	  tree *n;
1135 
1136 	  n = id->decl_map->get (decl);
1137 	  if (n)
1138 	    {
1139 	      value = *n;
1140 	      STRIP_TYPE_NOPS (value);
1141 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1142 		{
1143 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1144 		  return copy_tree_body_r (tp, walk_subtrees, data);
1145 		}
1146 	    }
1147 	}
1148       else if (TREE_CODE (*tp) == INDIRECT_REF)
1149 	{
1150 	  /* Get rid of *& from inline substitutions that can happen when a
1151 	     pointer argument is an ADDR_EXPR.  */
1152 	  tree decl = TREE_OPERAND (*tp, 0);
1153 	  tree *n = id->decl_map->get (decl);
1154 	  if (n)
1155 	    {
1156 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1157 	         it manually here as we'll eventually get ADDR_EXPRs
1158 		 which lie about their types pointed to.  In this case
1159 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1160 		 but we absolutely rely on that.  As fold_indirect_ref
1161 	         does other useful transformations, try that first, though.  */
1162 	      tree type = TREE_TYPE (*tp);
1163 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1164 	      tree old = *tp;
1165 	      *tp = gimple_fold_indirect_ref (ptr);
1166 	      if (! *tp)
1167 	        {
1168 		  type = remap_type (type, id);
1169 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1170 		    {
1171 		      *tp
1172 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1173 		      /* ???  We should either assert here or build
1174 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1175 			 incompatible types to our IL.  */
1176 		      if (! *tp)
1177 			*tp = TREE_OPERAND (ptr, 0);
1178 		    }
1179 	          else
1180 		    {
1181 	              *tp = build1 (INDIRECT_REF, type, ptr);
1182 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1183 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1184 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1185 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1186 			 have remapped a parameter as the property might be
1187 			 valid only for the parameter itself.  */
1188 		      if (TREE_THIS_NOTRAP (old)
1189 			  && (!is_parm (TREE_OPERAND (old, 0))
1190 			      || (!id->transform_parameter && is_parm (ptr))))
1191 		        TREE_THIS_NOTRAP (*tp) = 1;
1192 		    }
1193 		}
1194 	      *walk_subtrees = 0;
1195 	      return NULL;
1196 	    }
1197 	}
1198       else if (TREE_CODE (*tp) == MEM_REF)
1199 	{
1200 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1201 	     that can happen when a pointer argument is an ADDR_EXPR.
1202 	     Recurse here manually to allow that.  */
1203 	  tree ptr = TREE_OPERAND (*tp, 0);
1204 	  tree type = remap_type (TREE_TYPE (*tp), id);
1205 	  tree old = *tp;
1206 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1207 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1208 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1209 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1210 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1211 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1212 	    {
1213 	      MR_DEPENDENCE_CLIQUE (*tp)
1214 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1215 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1216 	    }
1217 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1218 	     remapped a parameter as the property might be valid only
1219 	     for the parameter itself.  */
1220 	  if (TREE_THIS_NOTRAP (old)
1221 	      && (!is_parm (TREE_OPERAND (old, 0))
1222 		  || (!id->transform_parameter && is_parm (ptr))))
1223 	    TREE_THIS_NOTRAP (*tp) = 1;
1224 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1225 	  *walk_subtrees = 0;
1226 	  return NULL;
1227 	}
1228 
1229       /* Here is the "usual case".  Copy this tree node, and then
1230 	 tweak some special cases.  */
1231       copy_tree_r (tp, walk_subtrees, NULL);
1232 
1233       /* If EXPR has block defined, map it to newly constructed block.
1234          When inlining we want EXPRs without block appear in the block
1235 	 of function call if we are not remapping a type.  */
1236       if (EXPR_P (*tp))
1237 	{
1238 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1239 	  if (TREE_BLOCK (*tp))
1240 	    {
1241 	      tree *n;
1242 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1243 	      if (n)
1244 		new_block = *n;
1245 	    }
1246 	  TREE_SET_BLOCK (*tp, new_block);
1247 	}
1248 
1249       if (TREE_CODE (*tp) != OMP_CLAUSE)
1250 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1251 
1252       /* The copied TARGET_EXPR has never been expanded, even if the
1253 	 original node was expanded already.  */
1254       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1255 	{
1256 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1257 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1258 	}
1259 
1260       /* Variable substitution need not be simple.  In particular, the
1261 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1262 	 and friends are up-to-date.  */
1263       else if (TREE_CODE (*tp) == ADDR_EXPR)
1264 	{
1265 	  int invariant = is_gimple_min_invariant (*tp);
1266 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1267 
1268 	  /* Handle the case where we substituted an INDIRECT_REF
1269 	     into the operand of the ADDR_EXPR.  */
1270 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1271 	    {
1272 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1273 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1274 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1275 	      *tp = t;
1276 	    }
1277 	  else
1278 	    recompute_tree_invariant_for_addr_expr (*tp);
1279 
1280 	  /* If this used to be invariant, but is not any longer,
1281 	     then regimplification is probably needed.  */
1282 	  if (invariant && !is_gimple_min_invariant (*tp))
1283 	    id->regimplify = true;
1284 
1285 	  *walk_subtrees = 0;
1286 	}
1287     }
1288 
1289   /* Keep iterating.  */
1290   return NULL_TREE;
1291 }
1292 
1293 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1294    source function, map that to the duplicate EH region number in
1295    the destination function.  */
1296 
1297 static int
1298 remap_eh_region_nr (int old_nr, copy_body_data *id)
1299 {
1300   eh_region old_r, new_r;
1301 
1302   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1303   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1304 
1305   return new_r->index;
1306 }
1307 
1308 /* Similar, but operate on INTEGER_CSTs.  */
1309 
1310 static tree
1311 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1312 {
1313   int old_nr, new_nr;
1314 
1315   old_nr = tree_to_shwi (old_t_nr);
1316   new_nr = remap_eh_region_nr (old_nr, id);
1317 
1318   return build_int_cst (integer_type_node, new_nr);
1319 }
1320 
1321 /* Helper for copy_bb.  Remap statement STMT using the inlining
1322    information in ID.  Return the new statement copy.  */
1323 
1324 static gimple_seq
1325 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1326 {
1327   gimple *copy = NULL;
1328   struct walk_stmt_info wi;
1329   bool skip_first = false;
1330   gimple_seq stmts = NULL;
1331 
1332   if (is_gimple_debug (stmt)
1333       && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1334     return stmts;
1335 
1336   /* Begin by recognizing trees that we'll completely rewrite for the
1337      inlining context.  Our output for these trees is completely
1338      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1339      into an edge).  Further down, we'll handle trees that get
1340      duplicated and/or tweaked.  */
1341 
1342   /* When requested, GIMPLE_RETURNs should be transformed to just the
1343      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1344      be handled elsewhere by manipulating the CFG rather than the
1345      statement.  */
1346   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1347     {
1348       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1349       tree retbnd = gimple_return_retbnd (stmt);
1350       tree bndslot = id->retbnd;
1351 
1352       if (retbnd && bndslot)
1353 	{
1354 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1355 	  memset (&wi, 0, sizeof (wi));
1356 	  wi.info = id;
1357 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1358 	  gimple_seq_add_stmt (&stmts, bndcopy);
1359 	}
1360 
1361       /* If we're returning something, just turn that into an
1362 	 assignment into the equivalent of the original RESULT_DECL.
1363 	 If RETVAL is just the result decl, the result decl has
1364 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1365 	 just toss the entire GIMPLE_RETURN.  */
1366       if (retval
1367 	  && (TREE_CODE (retval) != RESULT_DECL
1368 	      && (TREE_CODE (retval) != SSA_NAME
1369 		  || ! SSA_NAME_VAR (retval)
1370 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1371         {
1372 	  copy = gimple_build_assign (id->do_not_unshare
1373 				      ? id->retvar : unshare_expr (id->retvar),
1374 				      retval);
1375 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1376 	  skip_first = true;
1377 
1378 	  /* We need to copy bounds if return structure with pointers into
1379 	     instrumented function.  */
1380 	  if (chkp_function_instrumented_p (id->dst_fn)
1381 	      && !bndslot
1382 	      && !BOUNDED_P (id->retvar)
1383 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1384 	    id->assign_stmts.safe_push (copy);
1385 
1386 	}
1387       else
1388 	return stmts;
1389     }
1390   else if (gimple_has_substatements (stmt))
1391     {
1392       gimple_seq s1, s2;
1393 
1394       /* When cloning bodies from the C++ front end, we will be handed bodies
1395 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1396 	 have embedded statements.  */
1397       switch (gimple_code (stmt))
1398 	{
1399 	case GIMPLE_BIND:
1400 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1401 	  break;
1402 
1403 	case GIMPLE_CATCH:
1404 	  {
1405 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1406 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1407 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1408 	  }
1409 	  break;
1410 
1411 	case GIMPLE_EH_FILTER:
1412 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1413 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1414 	  break;
1415 
1416 	case GIMPLE_TRY:
1417 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1418 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1419 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1420 	  break;
1421 
1422 	case GIMPLE_WITH_CLEANUP_EXPR:
1423 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1424 	  copy = gimple_build_wce (s1);
1425 	  break;
1426 
1427 	case GIMPLE_OMP_PARALLEL:
1428 	  {
1429 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1430 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1431 	    copy = gimple_build_omp_parallel
1432 	             (s1,
1433 		      gimple_omp_parallel_clauses (omp_par_stmt),
1434 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1435 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1436 	  }
1437 	  break;
1438 
1439 	case GIMPLE_OMP_TASK:
1440 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1441 	  copy = gimple_build_omp_task
1442 	           (s1,
1443 		    gimple_omp_task_clauses (stmt),
1444 		    gimple_omp_task_child_fn (stmt),
1445 		    gimple_omp_task_data_arg (stmt),
1446 		    gimple_omp_task_copy_fn (stmt),
1447 		    gimple_omp_task_arg_size (stmt),
1448 		    gimple_omp_task_arg_align (stmt));
1449 	  break;
1450 
1451 	case GIMPLE_OMP_FOR:
1452 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1453 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1454 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1455 				       gimple_omp_for_clauses (stmt),
1456 				       gimple_omp_for_collapse (stmt), s2);
1457 	  {
1458 	    size_t i;
1459 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1460 	      {
1461 		gimple_omp_for_set_index (copy, i,
1462 					  gimple_omp_for_index (stmt, i));
1463 		gimple_omp_for_set_initial (copy, i,
1464 					    gimple_omp_for_initial (stmt, i));
1465 		gimple_omp_for_set_final (copy, i,
1466 					  gimple_omp_for_final (stmt, i));
1467 		gimple_omp_for_set_incr (copy, i,
1468 					 gimple_omp_for_incr (stmt, i));
1469 		gimple_omp_for_set_cond (copy, i,
1470 					 gimple_omp_for_cond (stmt, i));
1471 	      }
1472 	  }
1473 	  break;
1474 
1475 	case GIMPLE_OMP_MASTER:
1476 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1477 	  copy = gimple_build_omp_master (s1);
1478 	  break;
1479 
1480 	case GIMPLE_OMP_TASKGROUP:
1481 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1482 	  copy = gimple_build_omp_taskgroup (s1);
1483 	  break;
1484 
1485 	case GIMPLE_OMP_ORDERED:
1486 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1487 	  copy = gimple_build_omp_ordered
1488 		   (s1,
1489 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1490 	  break;
1491 
1492 	case GIMPLE_OMP_SECTION:
1493 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1494 	  copy = gimple_build_omp_section (s1);
1495 	  break;
1496 
1497 	case GIMPLE_OMP_SECTIONS:
1498 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1499 	  copy = gimple_build_omp_sections
1500 	           (s1, gimple_omp_sections_clauses (stmt));
1501 	  break;
1502 
1503 	case GIMPLE_OMP_SINGLE:
1504 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1505 	  copy = gimple_build_omp_single
1506 	           (s1, gimple_omp_single_clauses (stmt));
1507 	  break;
1508 
1509 	case GIMPLE_OMP_TARGET:
1510 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1511 	  copy = gimple_build_omp_target
1512 		   (s1, gimple_omp_target_kind (stmt),
1513 		    gimple_omp_target_clauses (stmt));
1514 	  break;
1515 
1516 	case GIMPLE_OMP_TEAMS:
1517 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1518 	  copy = gimple_build_omp_teams
1519 		   (s1, gimple_omp_teams_clauses (stmt));
1520 	  break;
1521 
1522 	case GIMPLE_OMP_CRITICAL:
1523 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1524 	  copy = gimple_build_omp_critical (s1,
1525 					    gimple_omp_critical_name
1526 					      (as_a <gomp_critical *> (stmt)),
1527 					    gimple_omp_critical_clauses
1528 					      (as_a <gomp_critical *> (stmt)));
1529 	  break;
1530 
1531 	case GIMPLE_TRANSACTION:
1532 	  {
1533 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1534 	    gtransaction *new_trans_stmt;
1535 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1536 				   id);
1537 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1538 	    gimple_transaction_set_subcode (new_trans_stmt,
1539 	      gimple_transaction_subcode (old_trans_stmt));
1540 	    gimple_transaction_set_label_norm (new_trans_stmt,
1541 	      gimple_transaction_label_norm (old_trans_stmt));
1542 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1543 	      gimple_transaction_label_uninst (old_trans_stmt));
1544 	    gimple_transaction_set_label_over (new_trans_stmt,
1545 	      gimple_transaction_label_over (old_trans_stmt));
1546 	  }
1547 	  break;
1548 
1549 	default:
1550 	  gcc_unreachable ();
1551 	}
1552     }
1553   else
1554     {
1555       if (gimple_assign_copy_p (stmt)
1556 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1557 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1558 	{
1559 	  /* Here we handle statements that are not completely rewritten.
1560 	     First we detect some inlining-induced bogosities for
1561 	     discarding.  */
1562 
1563 	  /* Some assignments VAR = VAR; don't generate any rtl code
1564 	     and thus don't count as variable modification.  Avoid
1565 	     keeping bogosities like 0 = 0.  */
1566 	  tree decl = gimple_assign_lhs (stmt), value;
1567 	  tree *n;
1568 
1569 	  n = id->decl_map->get (decl);
1570 	  if (n)
1571 	    {
1572 	      value = *n;
1573 	      STRIP_TYPE_NOPS (value);
1574 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1575 		return NULL;
1576 	    }
1577 	}
1578 
1579       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1580 	 in a block that we aren't copying during tree_function_versioning,
1581 	 just drop the clobber stmt.  */
1582       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1583 	{
1584 	  tree lhs = gimple_assign_lhs (stmt);
1585 	  if (TREE_CODE (lhs) == MEM_REF
1586 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1587 	    {
1588 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1589 	      if (gimple_bb (def_stmt)
1590 		  && !bitmap_bit_p (id->blocks_to_copy,
1591 				    gimple_bb (def_stmt)->index))
1592 		return NULL;
1593 	    }
1594 	}
1595 
1596       if (gimple_debug_bind_p (stmt))
1597 	{
1598 	  gdebug *copy
1599 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1600 				       gimple_debug_bind_get_value (stmt),
1601 				       stmt);
1602 	  id->debug_stmts.safe_push (copy);
1603 	  gimple_seq_add_stmt (&stmts, copy);
1604 	  return stmts;
1605 	}
1606       if (gimple_debug_source_bind_p (stmt))
1607 	{
1608 	  gdebug *copy = gimple_build_debug_source_bind
1609 	                   (gimple_debug_source_bind_get_var (stmt),
1610 			    gimple_debug_source_bind_get_value (stmt),
1611 			    stmt);
1612 	  id->debug_stmts.safe_push (copy);
1613 	  gimple_seq_add_stmt (&stmts, copy);
1614 	  return stmts;
1615 	}
1616 
1617       /* Create a new deep copy of the statement.  */
1618       copy = gimple_copy (stmt);
1619 
1620       /* Clear flags that need revisiting.  */
1621       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1622         {
1623 	  if (gimple_call_tail_p (call_stmt))
1624 	    gimple_call_set_tail (call_stmt, false);
1625 	  if (gimple_call_from_thunk_p (call_stmt))
1626 	    gimple_call_set_from_thunk (call_stmt, false);
1627 	  if (gimple_call_internal_p (call_stmt))
1628 	    switch (gimple_call_internal_fn (call_stmt))
1629 	      {
1630 	      case IFN_GOMP_SIMD_LANE:
1631 	      case IFN_GOMP_SIMD_VF:
1632 	      case IFN_GOMP_SIMD_LAST_LANE:
1633 	      case IFN_GOMP_SIMD_ORDERED_START:
1634 	      case IFN_GOMP_SIMD_ORDERED_END:
1635 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1636 	        break;
1637 	      default:
1638 		break;
1639 	      }
1640 	}
1641 
1642       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1643 	 RESX and EH_DISPATCH.  */
1644       if (id->eh_map)
1645 	switch (gimple_code (copy))
1646 	  {
1647 	  case GIMPLE_CALL:
1648 	    {
1649 	      tree r, fndecl = gimple_call_fndecl (copy);
1650 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1651 		switch (DECL_FUNCTION_CODE (fndecl))
1652 		  {
1653 		  case BUILT_IN_EH_COPY_VALUES:
1654 		    r = gimple_call_arg (copy, 1);
1655 		    r = remap_eh_region_tree_nr (r, id);
1656 		    gimple_call_set_arg (copy, 1, r);
1657 		    /* FALLTHRU */
1658 
1659 		  case BUILT_IN_EH_POINTER:
1660 		  case BUILT_IN_EH_FILTER:
1661 		    r = gimple_call_arg (copy, 0);
1662 		    r = remap_eh_region_tree_nr (r, id);
1663 		    gimple_call_set_arg (copy, 0, r);
1664 		    break;
1665 
1666 		  default:
1667 		    break;
1668 		  }
1669 
1670 	      /* Reset alias info if we didn't apply measures to
1671 		 keep it valid over inlining by setting DECL_PT_UID.  */
1672 	      if (!id->src_cfun->gimple_df
1673 		  || !id->src_cfun->gimple_df->ipa_pta)
1674 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1675 	    }
1676 	    break;
1677 
1678 	  case GIMPLE_RESX:
1679 	    {
1680 	      gresx *resx_stmt = as_a <gresx *> (copy);
1681 	      int r = gimple_resx_region (resx_stmt);
1682 	      r = remap_eh_region_nr (r, id);
1683 	      gimple_resx_set_region (resx_stmt, r);
1684 	    }
1685 	    break;
1686 
1687 	  case GIMPLE_EH_DISPATCH:
1688 	    {
1689 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1690 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1691 	      r = remap_eh_region_nr (r, id);
1692 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1693 	    }
1694 	    break;
1695 
1696 	  default:
1697 	    break;
1698 	  }
1699     }
1700 
1701   /* If STMT has a block defined, map it to the newly constructed
1702      block.  */
1703   if (gimple_block (copy))
1704     {
1705       tree *n;
1706       n = id->decl_map->get (gimple_block (copy));
1707       gcc_assert (n);
1708       gimple_set_block (copy, *n);
1709     }
1710 
1711   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1712     {
1713       gimple_seq_add_stmt (&stmts, copy);
1714       return stmts;
1715     }
1716 
1717   /* Remap all the operands in COPY.  */
1718   memset (&wi, 0, sizeof (wi));
1719   wi.info = id;
1720   if (skip_first)
1721     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1722   else
1723     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1724 
1725   /* Clear the copied virtual operands.  We are not remapping them here
1726      but are going to recreate them from scratch.  */
1727   if (gimple_has_mem_ops (copy))
1728     {
1729       gimple_set_vdef (copy, NULL_TREE);
1730       gimple_set_vuse (copy, NULL_TREE);
1731     }
1732 
1733   gimple_seq_add_stmt (&stmts, copy);
1734   return stmts;
1735 }
1736 
1737 
1738 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1739    later  */
1740 
1741 static basic_block
1742 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1743          gcov_type count_scale)
1744 {
1745   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1746   basic_block copy_basic_block;
1747   tree decl;
1748   gcov_type freq;
1749   basic_block prev;
1750 
1751   /* Search for previous copied basic block.  */
1752   prev = bb->prev_bb;
1753   while (!prev->aux)
1754     prev = prev->prev_bb;
1755 
1756   /* create_basic_block() will append every new block to
1757      basic_block_info automatically.  */
1758   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1759   copy_basic_block->count = apply_scale (bb->count, count_scale);
1760 
1761   /* We are going to rebuild frequencies from scratch.  These values
1762      have just small importance to drive canonicalize_loop_headers.  */
1763   freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1764 
1765   /* We recompute frequencies after inlining, so this is quite safe.  */
1766   if (freq > BB_FREQ_MAX)
1767     freq = BB_FREQ_MAX;
1768   copy_basic_block->frequency = freq;
1769 
1770   copy_gsi = gsi_start_bb (copy_basic_block);
1771 
1772   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1773     {
1774       gimple_seq stmts;
1775       gimple *stmt = gsi_stmt (gsi);
1776       gimple *orig_stmt = stmt;
1777       gimple_stmt_iterator stmts_gsi;
1778       bool stmt_added = false;
1779 
1780       id->regimplify = false;
1781       stmts = remap_gimple_stmt (stmt, id);
1782 
1783       if (gimple_seq_empty_p (stmts))
1784 	continue;
1785 
1786       seq_gsi = copy_gsi;
1787 
1788       for (stmts_gsi = gsi_start (stmts);
1789 	   !gsi_end_p (stmts_gsi); )
1790 	{
1791 	  stmt = gsi_stmt (stmts_gsi);
1792 
1793 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1794 	  gsi_next (&stmts_gsi);
1795 
1796 	  if (gimple_nop_p (stmt))
1797 	      continue;
1798 
1799 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1800 					    orig_stmt);
1801 
1802 	  /* With return slot optimization we can end up with
1803 	     non-gimple (foo *)&this->m, fix that here.  */
1804 	  if (is_gimple_assign (stmt)
1805 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1806 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1807 	    {
1808 	      tree new_rhs;
1809 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1810 						  gimple_assign_rhs1 (stmt),
1811 						  true, NULL, false,
1812 						  GSI_CONTINUE_LINKING);
1813 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1814 	      id->regimplify = false;
1815 	    }
1816 
1817 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1818 
1819 	  if (id->regimplify)
1820 	    gimple_regimplify_operands (stmt, &seq_gsi);
1821 
1822 	  stmt_added = true;
1823 	}
1824 
1825       if (!stmt_added)
1826 	continue;
1827 
1828       /* If copy_basic_block has been empty at the start of this iteration,
1829 	 call gsi_start_bb again to get at the newly added statements.  */
1830       if (gsi_end_p (copy_gsi))
1831 	copy_gsi = gsi_start_bb (copy_basic_block);
1832       else
1833 	gsi_next (&copy_gsi);
1834 
1835       /* Process the new statement.  The call to gimple_regimplify_operands
1836 	 possibly turned the statement into multiple statements, we
1837 	 need to process all of them.  */
1838       do
1839 	{
1840 	  tree fn;
1841 	  gcall *call_stmt;
1842 
1843 	  stmt = gsi_stmt (copy_gsi);
1844 	  call_stmt = dyn_cast <gcall *> (stmt);
1845 	  if (call_stmt
1846 	      && gimple_call_va_arg_pack_p (call_stmt)
1847 	      && id->call_stmt
1848 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1849 	    {
1850 	      /* __builtin_va_arg_pack () should be replaced by
1851 		 all arguments corresponding to ... in the caller.  */
1852 	      tree p;
1853 	      gcall *new_call;
1854 	      vec<tree> argarray;
1855 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1856 	      size_t n, i, nargs_to_copy;
1857 	      bool remove_bounds = false;
1858 
1859 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1860 		nargs--;
1861 
1862 	      /* Bounds should be removed from arg pack in case
1863 		 we handle not instrumented call in instrumented
1864 		 function.  */
1865 	      nargs_to_copy = nargs;
1866 	      if (gimple_call_with_bounds_p (id->call_stmt)
1867 		  && !gimple_call_with_bounds_p (stmt))
1868 		{
1869 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1870 		       i < gimple_call_num_args (id->call_stmt);
1871 		       i++)
1872 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1873 		      nargs_to_copy--;
1874 		  remove_bounds = true;
1875 		}
1876 
1877 	      /* Create the new array of arguments.  */
1878 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1879 	      argarray.create (n);
1880 	      argarray.safe_grow_cleared (n);
1881 
1882 	      /* Copy all the arguments before '...'  */
1883 	      memcpy (argarray.address (),
1884 		      gimple_call_arg_ptr (call_stmt, 0),
1885 		      gimple_call_num_args (call_stmt) * sizeof (tree));
1886 
1887 	      if (remove_bounds)
1888 		{
1889 		  /* Append the rest of arguments removing bounds.  */
1890 		  unsigned cur = gimple_call_num_args (call_stmt);
1891 		  i = gimple_call_num_args (id->call_stmt) - nargs;
1892 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1893 		       i < gimple_call_num_args (id->call_stmt);
1894 		       i++)
1895 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1896 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1897 		  gcc_assert (cur == n);
1898 		}
1899 	      else
1900 		{
1901 		  /* Append the arguments passed in '...'  */
1902 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1903 			  gimple_call_arg_ptr (id->call_stmt, 0)
1904 			  + (gimple_call_num_args (id->call_stmt) - nargs),
1905 			  nargs * sizeof (tree));
1906 		}
1907 
1908 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1909 						argarray);
1910 
1911 	      argarray.release ();
1912 
1913 	      /* Copy all GIMPLE_CALL flags, location and block, except
1914 		 GF_CALL_VA_ARG_PACK.  */
1915 	      gimple_call_copy_flags (new_call, call_stmt);
1916 	      gimple_call_set_va_arg_pack (new_call, false);
1917 	      gimple_set_location (new_call, gimple_location (stmt));
1918 	      gimple_set_block (new_call, gimple_block (stmt));
1919 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1920 
1921 	      gsi_replace (&copy_gsi, new_call, false);
1922 	      stmt = new_call;
1923 	    }
1924 	  else if (call_stmt
1925 		   && id->call_stmt
1926 		   && (decl = gimple_call_fndecl (stmt))
1927 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1928 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1929 	    {
1930 	      /* __builtin_va_arg_pack_len () should be replaced by
1931 		 the number of anonymous arguments.  */
1932 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
1933 	      tree count, p;
1934 	      gimple *new_stmt;
1935 
1936 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1937 		nargs--;
1938 
1939 	      /* For instrumented calls we should ignore bounds.  */
1940 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
1941 		   i < gimple_call_num_args (id->call_stmt);
1942 		   i++)
1943 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1944 		  nargs--;
1945 
1946 	      if (!gimple_call_lhs (stmt))
1947 		{
1948 		  /* Drop unused calls.  */
1949 		  gsi_remove (&copy_gsi, false);
1950 		  continue;
1951 		}
1952 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1953 		{
1954 		  count = build_int_cst (integer_type_node, nargs);
1955 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1956 		  gsi_replace (&copy_gsi, new_stmt, false);
1957 		  stmt = new_stmt;
1958 		}
1959 	      else if (nargs != 0)
1960 		{
1961 		  tree newlhs;
1962 		  if (gimple_in_ssa_p (cfun))
1963 		    newlhs = make_ssa_name (integer_type_node, NULL);
1964 		  else
1965 		    newlhs = create_tmp_reg (integer_type_node);
1966 		  count = build_int_cst (integer_type_node, nargs);
1967 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1968 						  PLUS_EXPR, newlhs, count);
1969 		  gimple_call_set_lhs (stmt, newlhs);
1970 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1971 		}
1972 	    }
1973 	  else if (call_stmt
1974 		   && id->call_stmt
1975 		   && gimple_call_internal_p (stmt)
1976 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1977 	    {
1978 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
1979 	      gsi_remove (&copy_gsi, false);
1980 	      continue;
1981 	    }
1982 
1983 	  /* Statements produced by inlining can be unfolded, especially
1984 	     when we constant propagated some operands.  We can't fold
1985 	     them right now for two reasons:
1986 	     1) folding require SSA_NAME_DEF_STMTs to be correct
1987 	     2) we can't change function calls to builtins.
1988 	     So we just mark statement for later folding.  We mark
1989 	     all new statements, instead just statements that has changed
1990 	     by some nontrivial substitution so even statements made
1991 	     foldable indirectly are updated.  If this turns out to be
1992 	     expensive, copy_body can be told to watch for nontrivial
1993 	     changes.  */
1994 	  if (id->statements_to_fold)
1995 	    id->statements_to_fold->add (stmt);
1996 
1997 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1998 	     callgraph edges and update or duplicate them.  */
1999 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2000 	    {
2001 	      struct cgraph_edge *edge;
2002 
2003 	      switch (id->transform_call_graph_edges)
2004 		{
2005 		case CB_CGE_DUPLICATE:
2006 		  edge = id->src_node->get_edge (orig_stmt);
2007 		  if (edge)
2008 		    {
2009 		      int edge_freq = edge->frequency;
2010 		      int new_freq;
2011 		      struct cgraph_edge *old_edge = edge;
2012 		      edge = edge->clone (id->dst_node, call_stmt,
2013 					  gimple_uid (stmt),
2014 					  REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2015 					  true);
2016 		      /* We could also just rescale the frequency, but
2017 		         doing so would introduce roundoff errors and make
2018 			 verifier unhappy.  */
2019 		      new_freq  = compute_call_stmt_bb_frequency (id->dst_node->decl,
2020 								  copy_basic_block);
2021 
2022 		      /* Speculative calls consist of two edges - direct and indirect.
2023 			 Duplicate the whole thing and distribute frequencies accordingly.  */
2024 		      if (edge->speculative)
2025 			{
2026 			  struct cgraph_edge *direct, *indirect;
2027 			  struct ipa_ref *ref;
2028 
2029 			  gcc_assert (!edge->indirect_unknown_callee);
2030 			  old_edge->speculative_call_info (direct, indirect, ref);
2031 			  indirect = indirect->clone (id->dst_node, call_stmt,
2032 						      gimple_uid (stmt),
2033 						      REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2034 						      true);
2035 			  if (old_edge->frequency + indirect->frequency)
2036 			    {
2037 			      edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2038 						           (old_edge->frequency + indirect->frequency)),
2039 						     CGRAPH_FREQ_MAX);
2040 			      indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2041 							       (old_edge->frequency + indirect->frequency)),
2042 							 CGRAPH_FREQ_MAX);
2043 			    }
2044 			  id->dst_node->clone_reference (ref, stmt);
2045 			}
2046 		      else
2047 			{
2048 			  edge->frequency = new_freq;
2049 			  if (dump_file
2050 			      && profile_status_for_fn (cfun) != PROFILE_ABSENT
2051 			      && (edge_freq > edge->frequency + 10
2052 				  || edge_freq < edge->frequency - 10))
2053 			    {
2054 			      fprintf (dump_file, "Edge frequency estimated by "
2055 				       "cgraph %i diverge from inliner's estimate %i\n",
2056 				       edge_freq,
2057 				       edge->frequency);
2058 			      fprintf (dump_file,
2059 				       "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2060 				       bb->index,
2061 				       bb->frequency,
2062 				       copy_basic_block->frequency);
2063 			    }
2064 			}
2065 		    }
2066 		  break;
2067 
2068 		case CB_CGE_MOVE_CLONES:
2069 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2070 								call_stmt);
2071 		  edge = id->dst_node->get_edge (stmt);
2072 		  break;
2073 
2074 		case CB_CGE_MOVE:
2075 		  edge = id->dst_node->get_edge (orig_stmt);
2076 		  if (edge)
2077 		    edge->set_call_stmt (call_stmt);
2078 		  break;
2079 
2080 		default:
2081 		  gcc_unreachable ();
2082 		}
2083 
2084 	      /* Constant propagation on argument done during inlining
2085 		 may create new direct call.  Produce an edge for it.  */
2086 	      if ((!edge
2087 		   || (edge->indirect_inlining_edge
2088 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2089 		  && id->dst_node->definition
2090 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2091 		{
2092 		  struct cgraph_node *dest = cgraph_node::get (fn);
2093 
2094 		  /* We have missing edge in the callgraph.  This can happen
2095 		     when previous inlining turned an indirect call into a
2096 		     direct call by constant propagating arguments or we are
2097 		     producing dead clone (for further cloning).  In all
2098 		     other cases we hit a bug (incorrect node sharing is the
2099 		     most common reason for missing edges).  */
2100 		  gcc_assert (!dest->definition
2101 			      || dest->address_taken
2102 		  	      || !id->src_node->definition
2103 			      || !id->dst_node->definition);
2104 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2105 		    id->dst_node->create_edge_including_clones
2106 		      (dest, orig_stmt, call_stmt, bb->count,
2107 		       compute_call_stmt_bb_frequency (id->dst_node->decl,
2108 		       				       copy_basic_block),
2109 		       CIF_ORIGINALLY_INDIRECT_CALL);
2110 		  else
2111 		    id->dst_node->create_edge (dest, call_stmt,
2112 					bb->count,
2113 					compute_call_stmt_bb_frequency
2114 					  (id->dst_node->decl,
2115 					   copy_basic_block))->inline_failed
2116 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2117 		  if (dump_file)
2118 		    {
2119 		      fprintf (dump_file, "Created new direct edge to %s\n",
2120 			       dest->name ());
2121 		    }
2122 		}
2123 
2124 	      notice_special_calls (as_a <gcall *> (stmt));
2125 	    }
2126 
2127 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2128 				      id->eh_map, id->eh_lp_nr);
2129 
2130 	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2131 	    {
2132 	      ssa_op_iter i;
2133 	      tree def;
2134 
2135 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2136 		if (TREE_CODE (def) == SSA_NAME)
2137 		  SSA_NAME_DEF_STMT (def) = stmt;
2138 	    }
2139 
2140 	  gsi_next (&copy_gsi);
2141 	}
2142       while (!gsi_end_p (copy_gsi));
2143 
2144       copy_gsi = gsi_last_bb (copy_basic_block);
2145     }
2146 
2147   return copy_basic_block;
2148 }
2149 
2150 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2151    form is quite easy, since dominator relationship for old basic blocks does
2152    not change.
2153 
2154    There is however exception where inlining might change dominator relation
2155    across EH edges from basic block within inlined functions destinating
2156    to landing pads in function we inline into.
2157 
2158    The function fills in PHI_RESULTs of such PHI nodes if they refer
2159    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2160    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2161    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2162    set, and this means that there will be no overlapping live ranges
2163    for the underlying symbol.
2164 
2165    This might change in future if we allow redirecting of EH edges and
2166    we might want to change way build CFG pre-inlining to include
2167    all the possible edges then.  */
2168 static void
2169 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2170 				  bool can_throw, bool nonlocal_goto)
2171 {
2172   edge e;
2173   edge_iterator ei;
2174 
2175   FOR_EACH_EDGE (e, ei, bb->succs)
2176     if (!e->dest->aux
2177 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2178       {
2179 	gphi *phi;
2180 	gphi_iterator si;
2181 
2182 	if (!nonlocal_goto)
2183 	  gcc_assert (e->flags & EDGE_EH);
2184 
2185 	if (!can_throw)
2186 	  gcc_assert (!(e->flags & EDGE_EH));
2187 
2188 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2189 	  {
2190 	    edge re;
2191 
2192 	    phi = si.phi ();
2193 
2194 	    /* For abnormal goto/call edges the receiver can be the
2195 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2196 
2197 	    gcc_assert ((e->flags & EDGE_EH)
2198 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2199 
2200 	    re = find_edge (ret_bb, e->dest);
2201 	    gcc_checking_assert (re);
2202 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2203 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2204 
2205 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2206 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2207 	  }
2208       }
2209 }
2210 
2211 
2212 /* Copy edges from BB into its copy constructed earlier, scale profile
2213    accordingly.  Edges will be taken care of later.  Assume aux
2214    pointers to point to the copies of each BB.  Return true if any
2215    debug stmts are left after a statement that must end the basic block.  */
2216 
2217 static bool
2218 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2219 		   basic_block abnormal_goto_dest)
2220 {
2221   basic_block new_bb = (basic_block) bb->aux;
2222   edge_iterator ei;
2223   edge old_edge;
2224   gimple_stmt_iterator si;
2225   int flags;
2226   bool need_debug_cleanup = false;
2227 
2228   /* Use the indices from the original blocks to create edges for the
2229      new ones.  */
2230   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2231     if (!(old_edge->flags & EDGE_EH))
2232       {
2233 	edge new_edge;
2234 
2235 	flags = old_edge->flags;
2236 
2237 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2238 	if (old_edge->dest->index == EXIT_BLOCK
2239 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2240 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2241 	  flags |= EDGE_FALLTHRU;
2242 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2243 	new_edge->count = apply_scale (old_edge->count, count_scale);
2244 	new_edge->probability = old_edge->probability;
2245       }
2246 
2247   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2248     return false;
2249 
2250   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2251     {
2252       gimple *copy_stmt;
2253       bool can_throw, nonlocal_goto;
2254 
2255       copy_stmt = gsi_stmt (si);
2256       if (!is_gimple_debug (copy_stmt))
2257 	update_stmt (copy_stmt);
2258 
2259       /* Do this before the possible split_block.  */
2260       gsi_next (&si);
2261 
2262       /* If this tree could throw an exception, there are two
2263          cases where we need to add abnormal edge(s): the
2264          tree wasn't in a region and there is a "current
2265          region" in the caller; or the original tree had
2266          EH edges.  In both cases split the block after the tree,
2267          and add abnormal edge(s) as needed; we need both
2268          those from the callee and the caller.
2269          We check whether the copy can throw, because the const
2270          propagation can change an INDIRECT_REF which throws
2271          into a COMPONENT_REF which doesn't.  If the copy
2272          can throw, the original could also throw.  */
2273       can_throw = stmt_can_throw_internal (copy_stmt);
2274       nonlocal_goto
2275 	= (stmt_can_make_abnormal_goto (copy_stmt)
2276 	   && !computed_goto_p (copy_stmt));
2277 
2278       if (can_throw || nonlocal_goto)
2279 	{
2280 	  if (!gsi_end_p (si))
2281 	    {
2282 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2283 		gsi_next (&si);
2284 	      if (gsi_end_p (si))
2285 		need_debug_cleanup = true;
2286 	    }
2287 	  if (!gsi_end_p (si))
2288 	    /* Note that bb's predecessor edges aren't necessarily
2289 	       right at this point; split_block doesn't care.  */
2290 	    {
2291 	      edge e = split_block (new_bb, copy_stmt);
2292 
2293 	      new_bb = e->dest;
2294 	      new_bb->aux = e->src->aux;
2295 	      si = gsi_start_bb (new_bb);
2296 	    }
2297 	}
2298 
2299       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2300 	make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2301       else if (can_throw)
2302 	make_eh_edges (copy_stmt);
2303 
2304       /* If the call we inline cannot make abnormal goto do not add
2305          additional abnormal edges but only retain those already present
2306 	 in the original function body.  */
2307       if (abnormal_goto_dest == NULL)
2308 	nonlocal_goto = false;
2309       if (nonlocal_goto)
2310 	{
2311 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2312 
2313 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2314 	    nonlocal_goto = false;
2315 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2316 	     in OpenMP regions which aren't allowed to be left abnormally.
2317 	     So, no need to add abnormal edge in that case.  */
2318 	  else if (is_gimple_call (copy_stmt)
2319 		   && gimple_call_internal_p (copy_stmt)
2320 		   && (gimple_call_internal_fn (copy_stmt)
2321 		       == IFN_ABNORMAL_DISPATCHER)
2322 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2323 	    nonlocal_goto = false;
2324 	  else
2325 	    make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2326 	}
2327 
2328       if ((can_throw || nonlocal_goto)
2329 	  && gimple_in_ssa_p (cfun))
2330 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2331 					  can_throw, nonlocal_goto);
2332     }
2333   return need_debug_cleanup;
2334 }
2335 
2336 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2337    was possibly split and new outgoing EH edges inserted.
2338    BB points to the block of original function and AUX pointers links
2339    the original and newly copied blocks.  */
2340 
2341 static void
2342 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2343 {
2344   basic_block const new_bb = (basic_block) bb->aux;
2345   edge_iterator ei;
2346   gphi *phi;
2347   gphi_iterator si;
2348   edge new_edge;
2349   bool inserted = false;
2350 
2351   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2352     {
2353       tree res, new_res;
2354       gphi *new_phi;
2355 
2356       phi = si.phi ();
2357       res = PHI_RESULT (phi);
2358       new_res = res;
2359       if (!virtual_operand_p (res))
2360 	{
2361 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2362 	  new_phi = create_phi_node (new_res, new_bb);
2363 	  if (EDGE_COUNT (new_bb->preds) == 0)
2364 	    {
2365 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2366 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2367 	    }
2368 	  else
2369 	    FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2370 	      {
2371 		edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2372 		tree arg;
2373 		tree new_arg;
2374 		edge_iterator ei2;
2375 		location_t locus;
2376 
2377 		/* When doing partial cloning, we allow PHIs on the entry block
2378 		   as long as all the arguments are the same.  Find any input
2379 		   edge to see argument to copy.  */
2380 		if (!old_edge)
2381 		  FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2382 		    if (!old_edge->src->aux)
2383 		      break;
2384 
2385 		arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2386 		new_arg = arg;
2387 		walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2388 		gcc_assert (new_arg);
2389 		/* With return slot optimization we can end up with
2390 		   non-gimple (foo *)&this->m, fix that here.  */
2391 		if (TREE_CODE (new_arg) != SSA_NAME
2392 		    && TREE_CODE (new_arg) != FUNCTION_DECL
2393 		    && !is_gimple_val (new_arg))
2394 		  {
2395 		    gimple_seq stmts = NULL;
2396 		    new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2397 		    gsi_insert_seq_on_edge (new_edge, stmts);
2398 		    inserted = true;
2399 		  }
2400 		locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2401 		if (LOCATION_BLOCK (locus))
2402 		  {
2403 		    tree *n;
2404 		    n = id->decl_map->get (LOCATION_BLOCK (locus));
2405 		    gcc_assert (n);
2406 		    locus = set_block (locus, *n);
2407 		  }
2408 		else
2409 		  locus = LOCATION_LOCUS (locus);
2410 
2411 		add_phi_arg (new_phi, new_arg, new_edge, locus);
2412 	      }
2413 	}
2414     }
2415 
2416   /* Commit the delayed edge insertions.  */
2417   if (inserted)
2418     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2419       gsi_commit_one_edge_insert (new_edge, NULL);
2420 }
2421 
2422 
2423 /* Wrapper for remap_decl so it can be used as a callback.  */
2424 
2425 static tree
2426 remap_decl_1 (tree decl, void *data)
2427 {
2428   return remap_decl (decl, (copy_body_data *) data);
2429 }
2430 
2431 /* Build struct function and associated datastructures for the new clone
2432    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2433    the cfun to the function of new_fndecl (and current_function_decl too).  */
2434 
2435 static void
2436 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2437 {
2438   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2439   gcov_type count_scale;
2440 
2441   if (!DECL_ARGUMENTS (new_fndecl))
2442     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2443   if (!DECL_RESULT (new_fndecl))
2444     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2445 
2446   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2447     count_scale
2448         = GCOV_COMPUTE_SCALE (count,
2449                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2450   else
2451     count_scale = REG_BR_PROB_BASE;
2452 
2453   /* Register specific tree functions.  */
2454   gimple_register_cfg_hooks ();
2455 
2456   /* Get clean struct function.  */
2457   push_struct_function (new_fndecl);
2458 
2459   /* We will rebuild these, so just sanity check that they are empty.  */
2460   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2461   gcc_assert (cfun->local_decls == NULL);
2462   gcc_assert (cfun->cfg == NULL);
2463   gcc_assert (cfun->decl == new_fndecl);
2464 
2465   /* Copy items we preserve during cloning.  */
2466   cfun->static_chain_decl = src_cfun->static_chain_decl;
2467   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2468   cfun->function_end_locus = src_cfun->function_end_locus;
2469   cfun->curr_properties = src_cfun->curr_properties;
2470   cfun->last_verified = src_cfun->last_verified;
2471   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2472   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2473   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2474   cfun->stdarg = src_cfun->stdarg;
2475   cfun->after_inlining = src_cfun->after_inlining;
2476   cfun->can_throw_non_call_exceptions
2477     = src_cfun->can_throw_non_call_exceptions;
2478   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2479   cfun->returns_struct = src_cfun->returns_struct;
2480   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2481 
2482   init_empty_tree_cfg ();
2483 
2484   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2485   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2486     (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2487      REG_BR_PROB_BASE);
2488   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2489     = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2490   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2491     (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2492      REG_BR_PROB_BASE);
2493   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2494     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2495   if (src_cfun->eh)
2496     init_eh_for_function ();
2497 
2498   if (src_cfun->gimple_df)
2499     {
2500       init_tree_ssa (cfun);
2501       cfun->gimple_df->in_ssa_p = true;
2502       init_ssa_operands (cfun);
2503     }
2504 }
2505 
2506 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2507    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2508    successor has multiple predecessors, reset them, otherwise keep
2509    their value.  */
2510 
2511 static void
2512 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2513 {
2514   edge e;
2515   edge_iterator ei;
2516   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2517 
2518   if (gsi_end_p (si)
2519       || gsi_one_before_end_p (si)
2520       || !(stmt_can_throw_internal (gsi_stmt (si))
2521 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2522     return;
2523 
2524   FOR_EACH_EDGE (e, ei, new_bb->succs)
2525     {
2526       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2527       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2528       while (is_gimple_debug (gsi_stmt (ssi)))
2529 	{
2530 	  gimple *stmt = gsi_stmt (ssi);
2531 	  gdebug *new_stmt;
2532 	  tree var;
2533 	  tree value;
2534 
2535 	  /* For the last edge move the debug stmts instead of copying
2536 	     them.  */
2537 	  if (ei_one_before_end_p (ei))
2538 	    {
2539 	      si = ssi;
2540 	      gsi_prev (&ssi);
2541 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2542 		gimple_debug_bind_reset_value (stmt);
2543 	      gsi_remove (&si, false);
2544 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2545 	      continue;
2546 	    }
2547 
2548 	  if (gimple_debug_bind_p (stmt))
2549 	    {
2550 	      var = gimple_debug_bind_get_var (stmt);
2551 	      if (single_pred_p (e->dest))
2552 		{
2553 		  value = gimple_debug_bind_get_value (stmt);
2554 		  value = unshare_expr (value);
2555 		}
2556 	      else
2557 		value = NULL_TREE;
2558 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2559 	    }
2560 	  else if (gimple_debug_source_bind_p (stmt))
2561 	    {
2562 	      var = gimple_debug_source_bind_get_var (stmt);
2563 	      value = gimple_debug_source_bind_get_value (stmt);
2564 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2565 	    }
2566 	  else
2567 	    gcc_unreachable ();
2568 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2569 	  id->debug_stmts.safe_push (new_stmt);
2570 	  gsi_prev (&ssi);
2571 	}
2572     }
2573 }
2574 
2575 /* Make a copy of the sub-loops of SRC_PARENT and place them
2576    as siblings of DEST_PARENT.  */
2577 
2578 static void
2579 copy_loops (copy_body_data *id,
2580 	    struct loop *dest_parent, struct loop *src_parent)
2581 {
2582   struct loop *src_loop = src_parent->inner;
2583   while (src_loop)
2584     {
2585       if (!id->blocks_to_copy
2586 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2587 	{
2588 	  struct loop *dest_loop = alloc_loop ();
2589 
2590 	  /* Assign the new loop its header and latch and associate
2591 	     those with the new loop.  */
2592 	  dest_loop->header = (basic_block)src_loop->header->aux;
2593 	  dest_loop->header->loop_father = dest_loop;
2594 	  if (src_loop->latch != NULL)
2595 	    {
2596 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2597 	      dest_loop->latch->loop_father = dest_loop;
2598 	    }
2599 
2600 	  /* Copy loop meta-data.  */
2601 	  copy_loop_info (src_loop, dest_loop);
2602 
2603 	  /* Finally place it into the loop array and the loop tree.  */
2604 	  place_new_loop (cfun, dest_loop);
2605 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2606 
2607 	  dest_loop->safelen = src_loop->safelen;
2608 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2609 	  if (src_loop->force_vectorize)
2610 	    {
2611 	      dest_loop->force_vectorize = true;
2612 	      cfun->has_force_vectorize_loops = true;
2613 	    }
2614 	  if (src_loop->simduid)
2615 	    {
2616 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2617 	      cfun->has_simduid_loops = true;
2618 	    }
2619 
2620 	  /* Recurse.  */
2621 	  copy_loops (id, dest_loop, src_loop);
2622 	}
2623       src_loop = src_loop->next;
2624     }
2625 }
2626 
2627 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2628 
2629 void
2630 redirect_all_calls (copy_body_data * id, basic_block bb)
2631 {
2632   gimple_stmt_iterator si;
2633   gimple *last = last_stmt (bb);
2634   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2635     {
2636       gimple *stmt = gsi_stmt (si);
2637       if (is_gimple_call (stmt))
2638 	{
2639 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2640 	  if (edge)
2641 	    {
2642 	      edge->redirect_call_stmt_to_callee ();
2643 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2644 		gimple_purge_dead_eh_edges (bb);
2645 	    }
2646 	}
2647     }
2648 }
2649 
2650 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2651    with each bb's frequency. Used when NODE has a 0-weight entry
2652    but we are about to inline it into a non-zero count call bb.
2653    See the comments for handle_missing_profiles() in predict.c for
2654    when this can happen for COMDATs.  */
2655 
2656 void
2657 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2658 {
2659   basic_block bb;
2660   edge_iterator ei;
2661   edge e;
2662   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2663 
2664   FOR_ALL_BB_FN(bb, fn)
2665     {
2666       bb->count = apply_scale (count,
2667                                GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2668       FOR_EACH_EDGE (e, ei, bb->succs)
2669         e->count = apply_probability (e->src->count, e->probability);
2670     }
2671 }
2672 
2673 /* Make a copy of the body of FN so that it can be inserted inline in
2674    another function.  Walks FN via CFG, returns new fndecl.  */
2675 
2676 static tree
2677 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2678 	       basic_block entry_block_map, basic_block exit_block_map,
2679 	       basic_block new_entry)
2680 {
2681   tree callee_fndecl = id->src_fn;
2682   /* Original cfun for the callee, doesn't change.  */
2683   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2684   struct function *cfun_to_copy;
2685   basic_block bb;
2686   tree new_fndecl = NULL;
2687   bool need_debug_cleanup = false;
2688   gcov_type count_scale;
2689   int last;
2690   int incoming_frequency = 0;
2691   gcov_type incoming_count = 0;
2692 
2693   /* This can happen for COMDAT routines that end up with 0 counts
2694      despite being called (see the comments for handle_missing_profiles()
2695      in predict.c as to why). Apply counts to the blocks in the callee
2696      before inlining, using the guessed edge frequencies, so that we don't
2697      end up with a 0-count inline body which can confuse downstream
2698      optimizations such as function splitting.  */
2699   if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2700     {
2701       /* Apply the larger of the call bb count and the total incoming
2702          call edge count to the callee.  */
2703       gcov_type in_count = 0;
2704       struct cgraph_edge *in_edge;
2705       for (in_edge = id->src_node->callers; in_edge;
2706            in_edge = in_edge->next_caller)
2707         in_count += in_edge->count;
2708       freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2709     }
2710 
2711   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2712     count_scale
2713         = GCOV_COMPUTE_SCALE (count,
2714                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2715   else
2716     count_scale = REG_BR_PROB_BASE;
2717 
2718   /* Register specific tree functions.  */
2719   gimple_register_cfg_hooks ();
2720 
2721   /* If we are inlining just region of the function, make sure to connect
2722      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2723      part of loop, we must compute frequency and probability of
2724      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2725      probabilities of edges incoming from nonduplicated region.  */
2726   if (new_entry)
2727     {
2728       edge e;
2729       edge_iterator ei;
2730 
2731       FOR_EACH_EDGE (e, ei, new_entry->preds)
2732 	if (!e->src->aux)
2733 	  {
2734 	    incoming_frequency += EDGE_FREQUENCY (e);
2735 	    incoming_count += e->count;
2736 	  }
2737       incoming_count = apply_scale (incoming_count, count_scale);
2738       incoming_frequency
2739 	= apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2740       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2741       ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2742     }
2743 
2744   /* Must have a CFG here at this point.  */
2745   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2746 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2747 
2748   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2749 
2750   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2751   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2752   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2753   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2754 
2755   /* Duplicate any exception-handling regions.  */
2756   if (cfun->eh)
2757     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2758 				       remap_decl_1, id);
2759 
2760   /* Use aux pointers to map the original blocks to copy.  */
2761   FOR_EACH_BB_FN (bb, cfun_to_copy)
2762     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2763       {
2764 	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2765 	bb->aux = new_bb;
2766 	new_bb->aux = bb;
2767 	new_bb->loop_father = entry_block_map->loop_father;
2768       }
2769 
2770   last = last_basic_block_for_fn (cfun);
2771 
2772   /* Now that we've duplicated the blocks, duplicate their edges.  */
2773   basic_block abnormal_goto_dest = NULL;
2774   if (id->call_stmt
2775       && stmt_can_make_abnormal_goto (id->call_stmt))
2776     {
2777       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2778 
2779       bb = gimple_bb (id->call_stmt);
2780       gsi_next (&gsi);
2781       if (gsi_end_p (gsi))
2782 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2783     }
2784   FOR_ALL_BB_FN (bb, cfun_to_copy)
2785     if (!id->blocks_to_copy
2786 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2787       need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2788 					       abnormal_goto_dest);
2789 
2790   if (new_entry)
2791     {
2792       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2793       e->probability = REG_BR_PROB_BASE;
2794       e->count = incoming_count;
2795     }
2796 
2797   /* Duplicate the loop tree, if available and wanted.  */
2798   if (loops_for_fn (src_cfun) != NULL
2799       && current_loops != NULL)
2800     {
2801       copy_loops (id, entry_block_map->loop_father,
2802 		  get_loop (src_cfun, 0));
2803       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2804       loops_state_set (LOOPS_NEED_FIXUP);
2805     }
2806 
2807   /* If the loop tree in the source function needed fixup, mark the
2808      destination loop tree for fixup, too.  */
2809   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2810     loops_state_set (LOOPS_NEED_FIXUP);
2811 
2812   if (gimple_in_ssa_p (cfun))
2813     FOR_ALL_BB_FN (bb, cfun_to_copy)
2814       if (!id->blocks_to_copy
2815 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2816 	copy_phis_for_bb (bb, id);
2817 
2818   FOR_ALL_BB_FN (bb, cfun_to_copy)
2819     if (bb->aux)
2820       {
2821 	if (need_debug_cleanup
2822 	    && bb->index != ENTRY_BLOCK
2823 	    && bb->index != EXIT_BLOCK)
2824 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2825 	/* Update call edge destinations.  This can not be done before loop
2826 	   info is updated, because we may split basic blocks.  */
2827 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2828 	    && bb->index != ENTRY_BLOCK
2829 	    && bb->index != EXIT_BLOCK)
2830 	  redirect_all_calls (id, (basic_block)bb->aux);
2831 	((basic_block)bb->aux)->aux = NULL;
2832 	bb->aux = NULL;
2833       }
2834 
2835   /* Zero out AUX fields of newly created block during EH edge
2836      insertion. */
2837   for (; last < last_basic_block_for_fn (cfun); last++)
2838     {
2839       if (need_debug_cleanup)
2840 	maybe_move_debug_stmts_to_successors (id,
2841 					      BASIC_BLOCK_FOR_FN (cfun, last));
2842       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2843       /* Update call edge destinations.  This can not be done before loop
2844 	 info is updated, because we may split basic blocks.  */
2845       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2846 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2847     }
2848   entry_block_map->aux = NULL;
2849   exit_block_map->aux = NULL;
2850 
2851   if (id->eh_map)
2852     {
2853       delete id->eh_map;
2854       id->eh_map = NULL;
2855     }
2856   if (id->dependence_map)
2857     {
2858       delete id->dependence_map;
2859       id->dependence_map = NULL;
2860     }
2861 
2862   return new_fndecl;
2863 }
2864 
2865 /* Copy the debug STMT using ID.  We deal with these statements in a
2866    special way: if any variable in their VALUE expression wasn't
2867    remapped yet, we won't remap it, because that would get decl uids
2868    out of sync, causing codegen differences between -g and -g0.  If
2869    this arises, we drop the VALUE expression altogether.  */
2870 
2871 static void
2872 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2873 {
2874   tree t, *n;
2875   struct walk_stmt_info wi;
2876 
2877   if (gimple_block (stmt))
2878     {
2879       n = id->decl_map->get (gimple_block (stmt));
2880       gimple_set_block (stmt, n ? *n : id->block);
2881     }
2882 
2883   /* Remap all the operands in COPY.  */
2884   memset (&wi, 0, sizeof (wi));
2885   wi.info = id;
2886 
2887   processing_debug_stmt = 1;
2888 
2889   if (gimple_debug_source_bind_p (stmt))
2890     t = gimple_debug_source_bind_get_var (stmt);
2891   else
2892     t = gimple_debug_bind_get_var (stmt);
2893 
2894   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2895       && (n = id->debug_map->get (t)))
2896     {
2897       gcc_assert (TREE_CODE (*n) == VAR_DECL);
2898       t = *n;
2899     }
2900   else if (TREE_CODE (t) == VAR_DECL
2901 	   && !is_global_var (t)
2902 	   && !id->decl_map->get (t))
2903     /* T is a non-localized variable.  */;
2904   else
2905     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2906 
2907   if (gimple_debug_bind_p (stmt))
2908     {
2909       gimple_debug_bind_set_var (stmt, t);
2910 
2911       if (gimple_debug_bind_has_value_p (stmt))
2912 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2913 		   remap_gimple_op_r, &wi, NULL);
2914 
2915       /* Punt if any decl couldn't be remapped.  */
2916       if (processing_debug_stmt < 0)
2917 	gimple_debug_bind_reset_value (stmt);
2918     }
2919   else if (gimple_debug_source_bind_p (stmt))
2920     {
2921       gimple_debug_source_bind_set_var (stmt, t);
2922       /* When inlining and source bind refers to one of the optimized
2923 	 away parameters, change the source bind into normal debug bind
2924 	 referring to the corresponding DEBUG_EXPR_DECL that should have
2925 	 been bound before the call stmt.  */
2926       t = gimple_debug_source_bind_get_value (stmt);
2927       if (t != NULL_TREE
2928 	  && TREE_CODE (t) == PARM_DECL
2929 	  && id->call_stmt)
2930 	{
2931 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2932 	  unsigned int i;
2933 	  if (debug_args != NULL)
2934 	    {
2935 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2936 		if ((**debug_args)[i] == DECL_ORIGIN (t)
2937 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2938 		  {
2939 		    t = (**debug_args)[i + 1];
2940 		    stmt->subcode = GIMPLE_DEBUG_BIND;
2941 		    gimple_debug_bind_set_value (stmt, t);
2942 		    break;
2943 		  }
2944 	    }
2945 	}
2946       if (gimple_debug_source_bind_p (stmt))
2947 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2948 		   remap_gimple_op_r, &wi, NULL);
2949     }
2950 
2951   processing_debug_stmt = 0;
2952 
2953   update_stmt (stmt);
2954 }
2955 
2956 /* Process deferred debug stmts.  In order to give values better odds
2957    of being successfully remapped, we delay the processing of debug
2958    stmts until all other stmts that might require remapping are
2959    processed.  */
2960 
2961 static void
2962 copy_debug_stmts (copy_body_data *id)
2963 {
2964   size_t i;
2965   gdebug *stmt;
2966 
2967   if (!id->debug_stmts.exists ())
2968     return;
2969 
2970   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2971     copy_debug_stmt (stmt, id);
2972 
2973   id->debug_stmts.release ();
2974 }
2975 
2976 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2977    another function.  */
2978 
2979 static tree
2980 copy_tree_body (copy_body_data *id)
2981 {
2982   tree fndecl = id->src_fn;
2983   tree body = DECL_SAVED_TREE (fndecl);
2984 
2985   walk_tree (&body, copy_tree_body_r, id, NULL);
2986 
2987   return body;
2988 }
2989 
2990 /* Make a copy of the body of FN so that it can be inserted inline in
2991    another function.  */
2992 
2993 static tree
2994 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2995 	   basic_block entry_block_map, basic_block exit_block_map,
2996 	   basic_block new_entry)
2997 {
2998   tree fndecl = id->src_fn;
2999   tree body;
3000 
3001   /* If this body has a CFG, walk CFG and copy.  */
3002   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3003   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
3004 			new_entry);
3005   copy_debug_stmts (id);
3006 
3007   return body;
3008 }
3009 
3010 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3011    defined in function FN, or of a data member thereof.  */
3012 
3013 static bool
3014 self_inlining_addr_expr (tree value, tree fn)
3015 {
3016   tree var;
3017 
3018   if (TREE_CODE (value) != ADDR_EXPR)
3019     return false;
3020 
3021   var = get_base_address (TREE_OPERAND (value, 0));
3022 
3023   return var && auto_var_in_fn_p (var, fn);
3024 }
3025 
3026 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3027    lexical block and line number information from base_stmt, if given,
3028    or from the last stmt of the block otherwise.  */
3029 
3030 static gimple *
3031 insert_init_debug_bind (copy_body_data *id,
3032 			basic_block bb, tree var, tree value,
3033 			gimple *base_stmt)
3034 {
3035   gimple *note;
3036   gimple_stmt_iterator gsi;
3037   tree tracked_var;
3038 
3039   if (!gimple_in_ssa_p (id->src_cfun))
3040     return NULL;
3041 
3042   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3043     return NULL;
3044 
3045   tracked_var = target_for_debug_bind (var);
3046   if (!tracked_var)
3047     return NULL;
3048 
3049   if (bb)
3050     {
3051       gsi = gsi_last_bb (bb);
3052       if (!base_stmt && !gsi_end_p (gsi))
3053 	base_stmt = gsi_stmt (gsi);
3054     }
3055 
3056   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3057 
3058   if (bb)
3059     {
3060       if (!gsi_end_p (gsi))
3061 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3062       else
3063 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3064     }
3065 
3066   return note;
3067 }
3068 
3069 static void
3070 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3071 {
3072   /* If VAR represents a zero-sized variable, it's possible that the
3073      assignment statement may result in no gimple statements.  */
3074   if (init_stmt)
3075     {
3076       gimple_stmt_iterator si = gsi_last_bb (bb);
3077 
3078       /* We can end up with init statements that store to a non-register
3079          from a rhs with a conversion.  Handle that here by forcing the
3080 	 rhs into a temporary.  gimple_regimplify_operands is not
3081 	 prepared to do this for us.  */
3082       if (!is_gimple_debug (init_stmt)
3083 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3084 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3085 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3086 	{
3087 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3088 			     gimple_expr_type (init_stmt),
3089 			     gimple_assign_rhs1 (init_stmt));
3090 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3091 					  GSI_NEW_STMT);
3092 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3093 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3094 	}
3095       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3096       gimple_regimplify_operands (init_stmt, &si);
3097 
3098       if (!is_gimple_debug (init_stmt))
3099 	{
3100 	  tree def = gimple_assign_lhs (init_stmt);
3101 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3102 	}
3103     }
3104 }
3105 
3106 /* Initialize parameter P with VALUE.  If needed, produce init statement
3107    at the end of BB.  When BB is NULL, we return init statement to be
3108    output later.  */
3109 static gimple *
3110 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3111 		     basic_block bb, tree *vars)
3112 {
3113   gimple *init_stmt = NULL;
3114   tree var;
3115   tree rhs = value;
3116   tree def = (gimple_in_ssa_p (cfun)
3117 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3118 
3119   if (value
3120       && value != error_mark_node
3121       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3122     {
3123       /* If we can match up types by promotion/demotion do so.  */
3124       if (fold_convertible_p (TREE_TYPE (p), value))
3125 	rhs = fold_convert (TREE_TYPE (p), value);
3126       else
3127 	{
3128 	  /* ???  For valid programs we should not end up here.
3129 	     Still if we end up with truly mismatched types here, fall back
3130 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3131 	     GIMPLE to the following passes.  */
3132 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3133 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3134 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3135 	  else
3136 	    rhs = build_zero_cst (TREE_TYPE (p));
3137 	}
3138     }
3139 
3140   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3141      here since the type of this decl must be visible to the calling
3142      function.  */
3143   var = copy_decl_to_var (p, id);
3144 
3145   /* Declare this new variable.  */
3146   DECL_CHAIN (var) = *vars;
3147   *vars = var;
3148 
3149   /* Make gimplifier happy about this variable.  */
3150   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3151 
3152   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3153      we would not need to create a new variable here at all, if it
3154      weren't for debug info.  Still, we can just use the argument
3155      value.  */
3156   if (TREE_READONLY (p)
3157       && !TREE_ADDRESSABLE (p)
3158       && value && !TREE_SIDE_EFFECTS (value)
3159       && !def)
3160     {
3161       /* We may produce non-gimple trees by adding NOPs or introduce
3162 	 invalid sharing when operand is not really constant.
3163 	 It is not big deal to prohibit constant propagation here as
3164 	 we will constant propagate in DOM1 pass anyway.  */
3165       if (is_gimple_min_invariant (value)
3166 	  && useless_type_conversion_p (TREE_TYPE (p),
3167 						 TREE_TYPE (value))
3168 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3169 	     the base variable isn't a local variable of the inlined
3170 	     function, e.g., when doing recursive inlining, direct or
3171 	     mutually-recursive or whatever, which is why we don't
3172 	     just test whether fn == current_function_decl.  */
3173 	  && ! self_inlining_addr_expr (value, fn))
3174 	{
3175 	  insert_decl_map (id, p, value);
3176 	  insert_debug_decl_map (id, p, var);
3177 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3178 	}
3179     }
3180 
3181   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3182      that way, when the PARM_DECL is encountered, it will be
3183      automatically replaced by the VAR_DECL.  */
3184   insert_decl_map (id, p, var);
3185 
3186   /* Even if P was TREE_READONLY, the new VAR should not be.
3187      In the original code, we would have constructed a
3188      temporary, and then the function body would have never
3189      changed the value of P.  However, now, we will be
3190      constructing VAR directly.  The constructor body may
3191      change its value multiple times as it is being
3192      constructed.  Therefore, it must not be TREE_READONLY;
3193      the back-end assumes that TREE_READONLY variable is
3194      assigned to only once.  */
3195   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3196     TREE_READONLY (var) = 0;
3197 
3198   /* If there is no setup required and we are in SSA, take the easy route
3199      replacing all SSA names representing the function parameter by the
3200      SSA name passed to function.
3201 
3202      We need to construct map for the variable anyway as it might be used
3203      in different SSA names when parameter is set in function.
3204 
3205      Do replacement at -O0 for const arguments replaced by constant.
3206      This is important for builtin_constant_p and other construct requiring
3207      constant argument to be visible in inlined function body.  */
3208   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3209       && (optimize
3210           || (TREE_READONLY (p)
3211 	      && is_gimple_min_invariant (rhs)))
3212       && (TREE_CODE (rhs) == SSA_NAME
3213 	  || is_gimple_min_invariant (rhs))
3214       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3215     {
3216       insert_decl_map (id, def, rhs);
3217       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3218     }
3219 
3220   /* If the value of argument is never used, don't care about initializing
3221      it.  */
3222   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3223     {
3224       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3225       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3226     }
3227 
3228   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3229      the argument to the proper type in case it was promoted.  */
3230   if (value)
3231     {
3232       if (rhs == error_mark_node)
3233 	{
3234 	  insert_decl_map (id, p, var);
3235 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3236 	}
3237 
3238       STRIP_USELESS_TYPE_CONVERSION (rhs);
3239 
3240       /* If we are in SSA form properly remap the default definition
3241          or assign to a dummy SSA name if the parameter is unused and
3242 	 we are not optimizing.  */
3243       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3244 	{
3245 	  if (def)
3246 	    {
3247 	      def = remap_ssa_name (def, id);
3248 	      init_stmt = gimple_build_assign (def, rhs);
3249 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3250 	      set_ssa_default_def (cfun, var, NULL);
3251 	    }
3252 	  else if (!optimize)
3253 	    {
3254 	      def = make_ssa_name (var);
3255 	      init_stmt = gimple_build_assign (def, rhs);
3256 	    }
3257 	}
3258       else
3259         init_stmt = gimple_build_assign (var, rhs);
3260 
3261       if (bb && init_stmt)
3262         insert_init_stmt (id, bb, init_stmt);
3263     }
3264   return init_stmt;
3265 }
3266 
3267 /* Generate code to initialize the parameters of the function at the
3268    top of the stack in ID from the GIMPLE_CALL STMT.  */
3269 
3270 static void
3271 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3272 			       tree fn, basic_block bb)
3273 {
3274   tree parms;
3275   size_t i;
3276   tree p;
3277   tree vars = NULL_TREE;
3278   tree static_chain = gimple_call_chain (stmt);
3279 
3280   /* Figure out what the parameters are.  */
3281   parms = DECL_ARGUMENTS (fn);
3282 
3283   /* Loop through the parameter declarations, replacing each with an
3284      equivalent VAR_DECL, appropriately initialized.  */
3285   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3286     {
3287       tree val;
3288       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3289       setup_one_parameter (id, p, val, fn, bb, &vars);
3290     }
3291   /* After remapping parameters remap their types.  This has to be done
3292      in a second loop over all parameters to appropriately remap
3293      variable sized arrays when the size is specified in a
3294      parameter following the array.  */
3295   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3296     {
3297       tree *varp = id->decl_map->get (p);
3298       if (varp
3299 	  && TREE_CODE (*varp) == VAR_DECL)
3300 	{
3301 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3302 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3303 	  tree var = *varp;
3304 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3305 	  /* Also remap the default definition if it was remapped
3306 	     to the default definition of the parameter replacement
3307 	     by the parameter setup.  */
3308 	  if (def)
3309 	    {
3310 	      tree *defp = id->decl_map->get (def);
3311 	      if (defp
3312 		  && TREE_CODE (*defp) == SSA_NAME
3313 		  && SSA_NAME_VAR (*defp) == var)
3314 		TREE_TYPE (*defp) = TREE_TYPE (var);
3315 	    }
3316 	}
3317     }
3318 
3319   /* Initialize the static chain.  */
3320   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3321   gcc_assert (fn != current_function_decl);
3322   if (p)
3323     {
3324       /* No static chain?  Seems like a bug in tree-nested.c.  */
3325       gcc_assert (static_chain);
3326 
3327       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3328     }
3329 
3330   declare_inline_vars (id->block, vars);
3331 }
3332 
3333 
3334 /* Declare a return variable to replace the RESULT_DECL for the
3335    function we are calling.  An appropriate DECL_STMT is returned.
3336    The USE_STMT is filled to contain a use of the declaration to
3337    indicate the return value of the function.
3338 
3339    RETURN_SLOT, if non-null is place where to store the result.  It
3340    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3341    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3342 
3343    RETURN_BOUNDS holds a destination for returned bounds.
3344 
3345    The return value is a (possibly null) value that holds the result
3346    as seen by the caller.  */
3347 
3348 static tree
3349 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3350 			 tree return_bounds, basic_block entry_bb)
3351 {
3352   tree callee = id->src_fn;
3353   tree result = DECL_RESULT (callee);
3354   tree callee_type = TREE_TYPE (result);
3355   tree caller_type;
3356   tree var, use;
3357 
3358   /* Handle type-mismatches in the function declaration return type
3359      vs. the call expression.  */
3360   if (modify_dest)
3361     caller_type = TREE_TYPE (modify_dest);
3362   else
3363     caller_type = TREE_TYPE (TREE_TYPE (callee));
3364 
3365   /* We don't need to do anything for functions that don't return anything.  */
3366   if (VOID_TYPE_P (callee_type))
3367     return NULL_TREE;
3368 
3369   /* If there was a return slot, then the return value is the
3370      dereferenced address of that object.  */
3371   if (return_slot)
3372     {
3373       /* The front end shouldn't have used both return_slot and
3374 	 a modify expression.  */
3375       gcc_assert (!modify_dest);
3376       if (DECL_BY_REFERENCE (result))
3377 	{
3378 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3379 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3380 
3381 	  /* We are going to construct *&return_slot and we can't do that
3382 	     for variables believed to be not addressable.
3383 
3384 	     FIXME: This check possibly can match, because values returned
3385 	     via return slot optimization are not believed to have address
3386 	     taken by alias analysis.  */
3387 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3388 	  var = return_slot_addr;
3389 	}
3390       else
3391 	{
3392 	  var = return_slot;
3393 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3394 	  if (TREE_ADDRESSABLE (result))
3395 	    mark_addressable (var);
3396 	}
3397       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3398            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3399 	  && !DECL_GIMPLE_REG_P (result)
3400 	  && DECL_P (var))
3401 	DECL_GIMPLE_REG_P (var) = 0;
3402       use = NULL;
3403       goto done;
3404     }
3405 
3406   /* All types requiring non-trivial constructors should have been handled.  */
3407   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3408 
3409   /* Attempt to avoid creating a new temporary variable.  */
3410   if (modify_dest
3411       && TREE_CODE (modify_dest) != SSA_NAME)
3412     {
3413       bool use_it = false;
3414 
3415       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3416       if (!useless_type_conversion_p (callee_type, caller_type))
3417 	use_it = false;
3418 
3419       /* ??? If we're assigning to a variable sized type, then we must
3420 	 reuse the destination variable, because we've no good way to
3421 	 create variable sized temporaries at this point.  */
3422       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3423 	use_it = true;
3424 
3425       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3426 	 reuse it as the result of the call directly.  Don't do this if
3427 	 it would promote MODIFY_DEST to addressable.  */
3428       else if (TREE_ADDRESSABLE (result))
3429 	use_it = false;
3430       else
3431 	{
3432 	  tree base_m = get_base_address (modify_dest);
3433 
3434 	  /* If the base isn't a decl, then it's a pointer, and we don't
3435 	     know where that's going to go.  */
3436 	  if (!DECL_P (base_m))
3437 	    use_it = false;
3438 	  else if (is_global_var (base_m))
3439 	    use_it = false;
3440 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3441 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3442 		   && !DECL_GIMPLE_REG_P (result)
3443 		   && DECL_GIMPLE_REG_P (base_m))
3444 	    use_it = false;
3445 	  else if (!TREE_ADDRESSABLE (base_m))
3446 	    use_it = true;
3447 	}
3448 
3449       if (use_it)
3450 	{
3451 	  var = modify_dest;
3452 	  use = NULL;
3453 	  goto done;
3454 	}
3455     }
3456 
3457   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3458 
3459   var = copy_result_decl_to_var (result, id);
3460   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3461 
3462   /* Do not have the rest of GCC warn about this variable as it should
3463      not be visible to the user.  */
3464   TREE_NO_WARNING (var) = 1;
3465 
3466   declare_inline_vars (id->block, var);
3467 
3468   /* Build the use expr.  If the return type of the function was
3469      promoted, convert it back to the expected type.  */
3470   use = var;
3471   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3472     {
3473       /* If we can match up types by promotion/demotion do so.  */
3474       if (fold_convertible_p (caller_type, var))
3475 	use = fold_convert (caller_type, var);
3476       else
3477 	{
3478 	  /* ???  For valid programs we should not end up here.
3479 	     Still if we end up with truly mismatched types here, fall back
3480 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3481 	     passes.  */
3482 	  /* Prevent var from being written into SSA form.  */
3483 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3484 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3485 	    DECL_GIMPLE_REG_P (var) = false;
3486 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3487 	    TREE_ADDRESSABLE (var) = true;
3488 	  use = fold_build2 (MEM_REF, caller_type,
3489 			     build_fold_addr_expr (var),
3490 			     build_int_cst (ptr_type_node, 0));
3491 	}
3492     }
3493 
3494   STRIP_USELESS_TYPE_CONVERSION (use);
3495 
3496   if (DECL_BY_REFERENCE (result))
3497     {
3498       TREE_ADDRESSABLE (var) = 1;
3499       var = build_fold_addr_expr (var);
3500     }
3501 
3502  done:
3503   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3504      way, when the RESULT_DECL is encountered, it will be
3505      automatically replaced by the VAR_DECL.
3506 
3507      When returning by reference, ensure that RESULT_DECL remaps to
3508      gimple_val.  */
3509   if (DECL_BY_REFERENCE (result)
3510       && !is_gimple_val (var))
3511     {
3512       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3513       insert_decl_map (id, result, temp);
3514       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3515 	 it's default_def SSA_NAME.  */
3516       if (gimple_in_ssa_p (id->src_cfun)
3517 	  && is_gimple_reg (result))
3518 	{
3519 	  temp = make_ssa_name (temp);
3520 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3521 	}
3522       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3523     }
3524   else
3525     insert_decl_map (id, result, var);
3526 
3527   /* Remember this so we can ignore it in remap_decls.  */
3528   id->retvar = var;
3529 
3530   /* If returned bounds are used, then make var for them.  */
3531   if (return_bounds)
3532   {
3533     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3534     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3535     TREE_NO_WARNING (bndtemp) = 1;
3536     declare_inline_vars (id->block, bndtemp);
3537 
3538     id->retbnd = bndtemp;
3539     insert_init_stmt (id, entry_bb,
3540 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3541   }
3542 
3543   return use;
3544 }
3545 
3546 /* Determine if the function can be copied.  If so return NULL.  If
3547    not return a string describng the reason for failure.  */
3548 
3549 const char *
3550 copy_forbidden (struct function *fun)
3551 {
3552   const char *reason = fun->cannot_be_copied_reason;
3553 
3554   /* Only examine the function once.  */
3555   if (fun->cannot_be_copied_set)
3556     return reason;
3557 
3558   /* We cannot copy a function that receives a non-local goto
3559      because we cannot remap the destination label used in the
3560      function that is performing the non-local goto.  */
3561   /* ??? Actually, this should be possible, if we work at it.
3562      No doubt there's just a handful of places that simply
3563      assume it doesn't happen and don't substitute properly.  */
3564   if (fun->has_nonlocal_label)
3565     {
3566       reason = G_("function %q+F can never be copied "
3567 		  "because it receives a non-local goto");
3568       goto fail;
3569     }
3570 
3571   if (fun->has_forced_label_in_static)
3572     {
3573       reason = G_("function %q+F can never be copied because it saves "
3574 		  "address of local label in a static variable");
3575       goto fail;
3576     }
3577 
3578  fail:
3579   fun->cannot_be_copied_reason = reason;
3580   fun->cannot_be_copied_set = true;
3581   return reason;
3582 }
3583 
3584 
3585 static const char *inline_forbidden_reason;
3586 
3587 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3588    iff a function can not be inlined.  Also sets the reason why. */
3589 
3590 static tree
3591 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3592 			 struct walk_stmt_info *wip)
3593 {
3594   tree fn = (tree) wip->info;
3595   tree t;
3596   gimple *stmt = gsi_stmt (*gsi);
3597 
3598   switch (gimple_code (stmt))
3599     {
3600     case GIMPLE_CALL:
3601       /* Refuse to inline alloca call unless user explicitly forced so as
3602 	 this may change program's memory overhead drastically when the
3603 	 function using alloca is called in loop.  In GCC present in
3604 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3605 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3606 	 VLA objects as those can't cause unbounded growth (they're always
3607 	 wrapped inside stack_save/stack_restore regions.  */
3608       if (gimple_alloca_call_p (stmt)
3609 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3610 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3611 	{
3612 	  inline_forbidden_reason
3613 	    = G_("function %q+F can never be inlined because it uses "
3614 		 "alloca (override using the always_inline attribute)");
3615 	  *handled_ops_p = true;
3616 	  return fn;
3617 	}
3618 
3619       t = gimple_call_fndecl (stmt);
3620       if (t == NULL_TREE)
3621 	break;
3622 
3623       /* We cannot inline functions that call setjmp.  */
3624       if (setjmp_call_p (t))
3625 	{
3626 	  inline_forbidden_reason
3627 	    = G_("function %q+F can never be inlined because it uses setjmp");
3628 	  *handled_ops_p = true;
3629 	  return t;
3630 	}
3631 
3632       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3633 	switch (DECL_FUNCTION_CODE (t))
3634 	  {
3635 	    /* We cannot inline functions that take a variable number of
3636 	       arguments.  */
3637 	  case BUILT_IN_VA_START:
3638 	  case BUILT_IN_NEXT_ARG:
3639 	  case BUILT_IN_VA_END:
3640 	    inline_forbidden_reason
3641 	      = G_("function %q+F can never be inlined because it "
3642 		   "uses variable argument lists");
3643 	    *handled_ops_p = true;
3644 	    return t;
3645 
3646 	  case BUILT_IN_LONGJMP:
3647 	    /* We can't inline functions that call __builtin_longjmp at
3648 	       all.  The non-local goto machinery really requires the
3649 	       destination be in a different function.  If we allow the
3650 	       function calling __builtin_longjmp to be inlined into the
3651 	       function calling __builtin_setjmp, Things will Go Awry.  */
3652 	    inline_forbidden_reason
3653 	      = G_("function %q+F can never be inlined because "
3654 		   "it uses setjmp-longjmp exception handling");
3655 	    *handled_ops_p = true;
3656 	    return t;
3657 
3658 	  case BUILT_IN_NONLOCAL_GOTO:
3659 	    /* Similarly.  */
3660 	    inline_forbidden_reason
3661 	      = G_("function %q+F can never be inlined because "
3662 		   "it uses non-local goto");
3663 	    *handled_ops_p = true;
3664 	    return t;
3665 
3666 	  case BUILT_IN_RETURN:
3667 	  case BUILT_IN_APPLY_ARGS:
3668 	    /* If a __builtin_apply_args caller would be inlined,
3669 	       it would be saving arguments of the function it has
3670 	       been inlined into.  Similarly __builtin_return would
3671 	       return from the function the inline has been inlined into.  */
3672 	    inline_forbidden_reason
3673 	      = G_("function %q+F can never be inlined because "
3674 		   "it uses __builtin_return or __builtin_apply_args");
3675 	    *handled_ops_p = true;
3676 	    return t;
3677 
3678 	  default:
3679 	    break;
3680 	  }
3681       break;
3682 
3683     case GIMPLE_GOTO:
3684       t = gimple_goto_dest (stmt);
3685 
3686       /* We will not inline a function which uses computed goto.  The
3687 	 addresses of its local labels, which may be tucked into
3688 	 global storage, are of course not constant across
3689 	 instantiations, which causes unexpected behavior.  */
3690       if (TREE_CODE (t) != LABEL_DECL)
3691 	{
3692 	  inline_forbidden_reason
3693 	    = G_("function %q+F can never be inlined "
3694 		 "because it contains a computed goto");
3695 	  *handled_ops_p = true;
3696 	  return t;
3697 	}
3698       break;
3699 
3700     default:
3701       break;
3702     }
3703 
3704   *handled_ops_p = false;
3705   return NULL_TREE;
3706 }
3707 
3708 /* Return true if FNDECL is a function that cannot be inlined into
3709    another one.  */
3710 
3711 static bool
3712 inline_forbidden_p (tree fndecl)
3713 {
3714   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3715   struct walk_stmt_info wi;
3716   basic_block bb;
3717   bool forbidden_p = false;
3718 
3719   /* First check for shared reasons not to copy the code.  */
3720   inline_forbidden_reason = copy_forbidden (fun);
3721   if (inline_forbidden_reason != NULL)
3722     return true;
3723 
3724   /* Next, walk the statements of the function looking for
3725      constraucts we can't handle, or are non-optimal for inlining.  */
3726   hash_set<tree> visited_nodes;
3727   memset (&wi, 0, sizeof (wi));
3728   wi.info = (void *) fndecl;
3729   wi.pset = &visited_nodes;
3730 
3731   FOR_EACH_BB_FN (bb, fun)
3732     {
3733       gimple *ret;
3734       gimple_seq seq = bb_seq (bb);
3735       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3736       forbidden_p = (ret != NULL);
3737       if (forbidden_p)
3738 	break;
3739     }
3740 
3741   return forbidden_p;
3742 }
3743 
3744 /* Return false if the function FNDECL cannot be inlined on account of its
3745    attributes, true otherwise.  */
3746 static bool
3747 function_attribute_inlinable_p (const_tree fndecl)
3748 {
3749   if (targetm.attribute_table)
3750     {
3751       const_tree a;
3752 
3753       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3754 	{
3755 	  const_tree name = TREE_PURPOSE (a);
3756 	  int i;
3757 
3758 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3759 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3760 	      return targetm.function_attribute_inlinable_p (fndecl);
3761 	}
3762     }
3763 
3764   return true;
3765 }
3766 
3767 /* Returns nonzero if FN is a function that does not have any
3768    fundamental inline blocking properties.  */
3769 
3770 bool
3771 tree_inlinable_function_p (tree fn)
3772 {
3773   bool inlinable = true;
3774   bool do_warning;
3775   tree always_inline;
3776 
3777   /* If we've already decided this function shouldn't be inlined,
3778      there's no need to check again.  */
3779   if (DECL_UNINLINABLE (fn))
3780     return false;
3781 
3782   /* We only warn for functions declared `inline' by the user.  */
3783   do_warning = (warn_inline
3784 		&& DECL_DECLARED_INLINE_P (fn)
3785 		&& !DECL_NO_INLINE_WARNING_P (fn)
3786 		&& !DECL_IN_SYSTEM_HEADER (fn));
3787 
3788   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3789 
3790   if (flag_no_inline
3791       && always_inline == NULL)
3792     {
3793       if (do_warning)
3794         warning (OPT_Winline, "function %q+F can never be inlined because it "
3795                  "is suppressed using -fno-inline", fn);
3796       inlinable = false;
3797     }
3798 
3799   else if (!function_attribute_inlinable_p (fn))
3800     {
3801       if (do_warning)
3802         warning (OPT_Winline, "function %q+F can never be inlined because it "
3803                  "uses attributes conflicting with inlining", fn);
3804       inlinable = false;
3805     }
3806 
3807   else if (inline_forbidden_p (fn))
3808     {
3809       /* See if we should warn about uninlinable functions.  Previously,
3810 	 some of these warnings would be issued while trying to expand
3811 	 the function inline, but that would cause multiple warnings
3812 	 about functions that would for example call alloca.  But since
3813 	 this a property of the function, just one warning is enough.
3814 	 As a bonus we can now give more details about the reason why a
3815 	 function is not inlinable.  */
3816       if (always_inline)
3817 	error (inline_forbidden_reason, fn);
3818       else if (do_warning)
3819 	warning (OPT_Winline, inline_forbidden_reason, fn);
3820 
3821       inlinable = false;
3822     }
3823 
3824   /* Squirrel away the result so that we don't have to check again.  */
3825   DECL_UNINLINABLE (fn) = !inlinable;
3826 
3827   return inlinable;
3828 }
3829 
3830 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3831    word size and take possible memcpy call into account and return
3832    cost based on whether optimizing for size or speed according to SPEED_P.  */
3833 
3834 int
3835 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3836 {
3837   HOST_WIDE_INT size;
3838 
3839   gcc_assert (!VOID_TYPE_P (type));
3840 
3841   if (TREE_CODE (type) == VECTOR_TYPE)
3842     {
3843       machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3844       machine_mode simd
3845 	= targetm.vectorize.preferred_simd_mode (inner);
3846       int simd_mode_size = GET_MODE_SIZE (simd);
3847       return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3848 	      / simd_mode_size);
3849     }
3850 
3851   size = int_size_in_bytes (type);
3852 
3853   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3854     /* Cost of a memcpy call, 3 arguments and the call.  */
3855     return 4;
3856   else
3857     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3858 }
3859 
3860 /* Returns cost of operation CODE, according to WEIGHTS  */
3861 
3862 static int
3863 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3864 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3865 {
3866   switch (code)
3867     {
3868     /* These are "free" conversions, or their presumed cost
3869        is folded into other operations.  */
3870     case RANGE_EXPR:
3871     CASE_CONVERT:
3872     case COMPLEX_EXPR:
3873     case PAREN_EXPR:
3874     case VIEW_CONVERT_EXPR:
3875       return 0;
3876 
3877     /* Assign cost of 1 to usual operations.
3878        ??? We may consider mapping RTL costs to this.  */
3879     case COND_EXPR:
3880     case VEC_COND_EXPR:
3881     case VEC_PERM_EXPR:
3882 
3883     case PLUS_EXPR:
3884     case POINTER_PLUS_EXPR:
3885     case MINUS_EXPR:
3886     case MULT_EXPR:
3887     case MULT_HIGHPART_EXPR:
3888     case FMA_EXPR:
3889 
3890     case ADDR_SPACE_CONVERT_EXPR:
3891     case FIXED_CONVERT_EXPR:
3892     case FIX_TRUNC_EXPR:
3893 
3894     case NEGATE_EXPR:
3895     case FLOAT_EXPR:
3896     case MIN_EXPR:
3897     case MAX_EXPR:
3898     case ABS_EXPR:
3899 
3900     case LSHIFT_EXPR:
3901     case RSHIFT_EXPR:
3902     case LROTATE_EXPR:
3903     case RROTATE_EXPR:
3904 
3905     case BIT_IOR_EXPR:
3906     case BIT_XOR_EXPR:
3907     case BIT_AND_EXPR:
3908     case BIT_NOT_EXPR:
3909 
3910     case TRUTH_ANDIF_EXPR:
3911     case TRUTH_ORIF_EXPR:
3912     case TRUTH_AND_EXPR:
3913     case TRUTH_OR_EXPR:
3914     case TRUTH_XOR_EXPR:
3915     case TRUTH_NOT_EXPR:
3916 
3917     case LT_EXPR:
3918     case LE_EXPR:
3919     case GT_EXPR:
3920     case GE_EXPR:
3921     case EQ_EXPR:
3922     case NE_EXPR:
3923     case ORDERED_EXPR:
3924     case UNORDERED_EXPR:
3925 
3926     case UNLT_EXPR:
3927     case UNLE_EXPR:
3928     case UNGT_EXPR:
3929     case UNGE_EXPR:
3930     case UNEQ_EXPR:
3931     case LTGT_EXPR:
3932 
3933     case CONJ_EXPR:
3934 
3935     case PREDECREMENT_EXPR:
3936     case PREINCREMENT_EXPR:
3937     case POSTDECREMENT_EXPR:
3938     case POSTINCREMENT_EXPR:
3939 
3940     case REALIGN_LOAD_EXPR:
3941 
3942     case REDUC_MAX_EXPR:
3943     case REDUC_MIN_EXPR:
3944     case REDUC_PLUS_EXPR:
3945     case WIDEN_SUM_EXPR:
3946     case WIDEN_MULT_EXPR:
3947     case DOT_PROD_EXPR:
3948     case SAD_EXPR:
3949     case WIDEN_MULT_PLUS_EXPR:
3950     case WIDEN_MULT_MINUS_EXPR:
3951     case WIDEN_LSHIFT_EXPR:
3952 
3953     case VEC_WIDEN_MULT_HI_EXPR:
3954     case VEC_WIDEN_MULT_LO_EXPR:
3955     case VEC_WIDEN_MULT_EVEN_EXPR:
3956     case VEC_WIDEN_MULT_ODD_EXPR:
3957     case VEC_UNPACK_HI_EXPR:
3958     case VEC_UNPACK_LO_EXPR:
3959     case VEC_UNPACK_FLOAT_HI_EXPR:
3960     case VEC_UNPACK_FLOAT_LO_EXPR:
3961     case VEC_PACK_TRUNC_EXPR:
3962     case VEC_PACK_SAT_EXPR:
3963     case VEC_PACK_FIX_TRUNC_EXPR:
3964     case VEC_WIDEN_LSHIFT_HI_EXPR:
3965     case VEC_WIDEN_LSHIFT_LO_EXPR:
3966 
3967       return 1;
3968 
3969     /* Few special cases of expensive operations.  This is useful
3970        to avoid inlining on functions having too many of these.  */
3971     case TRUNC_DIV_EXPR:
3972     case CEIL_DIV_EXPR:
3973     case FLOOR_DIV_EXPR:
3974     case ROUND_DIV_EXPR:
3975     case EXACT_DIV_EXPR:
3976     case TRUNC_MOD_EXPR:
3977     case CEIL_MOD_EXPR:
3978     case FLOOR_MOD_EXPR:
3979     case ROUND_MOD_EXPR:
3980     case RDIV_EXPR:
3981       if (TREE_CODE (op2) != INTEGER_CST)
3982         return weights->div_mod_cost;
3983       return 1;
3984 
3985     default:
3986       /* We expect a copy assignment with no operator.  */
3987       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3988       return 0;
3989     }
3990 }
3991 
3992 
3993 /* Estimate number of instructions that will be created by expanding
3994    the statements in the statement sequence STMTS.
3995    WEIGHTS contains weights attributed to various constructs.  */
3996 
3997 int
3998 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3999 {
4000   int cost;
4001   gimple_stmt_iterator gsi;
4002 
4003   cost = 0;
4004   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4005     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4006 
4007   return cost;
4008 }
4009 
4010 
4011 /* Estimate number of instructions that will be created by expanding STMT.
4012    WEIGHTS contains weights attributed to various constructs.  */
4013 
4014 int
4015 estimate_num_insns (gimple *stmt, eni_weights *weights)
4016 {
4017   unsigned cost, i;
4018   enum gimple_code code = gimple_code (stmt);
4019   tree lhs;
4020   tree rhs;
4021 
4022   switch (code)
4023     {
4024     case GIMPLE_ASSIGN:
4025       /* Try to estimate the cost of assignments.  We have three cases to
4026 	 deal with:
4027 	 1) Simple assignments to registers;
4028 	 2) Stores to things that must live in memory.  This includes
4029 	    "normal" stores to scalars, but also assignments of large
4030 	    structures, or constructors of big arrays;
4031 
4032 	 Let us look at the first two cases, assuming we have "a = b + C":
4033 	 <GIMPLE_ASSIGN <var_decl "a">
4034 	        <plus_expr <var_decl "b"> <constant C>>
4035 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4036 	 any target, because "a" usually ends up in a real register.  Hence
4037 	 the only cost of this expression comes from the PLUS_EXPR, and we
4038 	 can ignore the GIMPLE_ASSIGN.
4039 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4040 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4041 	 of moving something into "a", which we compute using the function
4042 	 estimate_move_cost.  */
4043       if (gimple_clobber_p (stmt))
4044 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4045 
4046       lhs = gimple_assign_lhs (stmt);
4047       rhs = gimple_assign_rhs1 (stmt);
4048 
4049       cost = 0;
4050 
4051       /* Account for the cost of moving to / from memory.  */
4052       if (gimple_store_p (stmt))
4053 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4054       if (gimple_assign_load_p (stmt))
4055 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4056 
4057       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4058       				      gimple_assign_rhs1 (stmt),
4059 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4060 				      == GIMPLE_BINARY_RHS
4061 				      ? gimple_assign_rhs2 (stmt) : NULL);
4062       break;
4063 
4064     case GIMPLE_COND:
4065       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4066       				         gimple_op (stmt, 0),
4067 				         gimple_op (stmt, 1));
4068       break;
4069 
4070     case GIMPLE_SWITCH:
4071       {
4072 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4073 	/* Take into account cost of the switch + guess 2 conditional jumps for
4074 	   each case label.
4075 
4076 	   TODO: once the switch expansion logic is sufficiently separated, we can
4077 	   do better job on estimating cost of the switch.  */
4078 	if (weights->time_based)
4079 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4080 	else
4081 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4082       }
4083       break;
4084 
4085     case GIMPLE_CALL:
4086       {
4087 	tree decl;
4088 
4089 	if (gimple_call_internal_p (stmt))
4090 	  return 0;
4091 	else if ((decl = gimple_call_fndecl (stmt))
4092 		 && DECL_BUILT_IN (decl))
4093 	  {
4094 	    /* Do not special case builtins where we see the body.
4095 	       This just confuse inliner.  */
4096 	    struct cgraph_node *node;
4097 	    if (!(node = cgraph_node::get (decl))
4098 		|| node->definition)
4099 	      ;
4100 	    /* For buitins that are likely expanded to nothing or
4101 	       inlined do not account operand costs.  */
4102 	    else if (is_simple_builtin (decl))
4103 	      return 0;
4104 	    else if (is_inexpensive_builtin (decl))
4105 	      return weights->target_builtin_call_cost;
4106 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4107 	      {
4108 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4109 		   specialize the cheap expansion we do here.
4110 		   ???  This asks for a more general solution.  */
4111 		switch (DECL_FUNCTION_CODE (decl))
4112 		  {
4113 		    case BUILT_IN_POW:
4114 		    case BUILT_IN_POWF:
4115 		    case BUILT_IN_POWL:
4116 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4117 			  && (real_equal
4118 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4119 			       &dconst2)))
4120 			return estimate_operator_cost
4121 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4122 			     gimple_call_arg (stmt, 0));
4123 		      break;
4124 
4125 		    default:
4126 		      break;
4127 		  }
4128 	      }
4129 	  }
4130 
4131 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4132 	if (gimple_call_lhs (stmt))
4133 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4134 				      weights->time_based);
4135 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4136 	  {
4137 	    tree arg = gimple_call_arg (stmt, i);
4138 	    cost += estimate_move_cost (TREE_TYPE (arg),
4139 					weights->time_based);
4140 	  }
4141 	break;
4142       }
4143 
4144     case GIMPLE_RETURN:
4145       return weights->return_cost;
4146 
4147     case GIMPLE_GOTO:
4148     case GIMPLE_LABEL:
4149     case GIMPLE_NOP:
4150     case GIMPLE_PHI:
4151     case GIMPLE_PREDICT:
4152     case GIMPLE_DEBUG:
4153       return 0;
4154 
4155     case GIMPLE_ASM:
4156       {
4157 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4158 	/* 1000 means infinity. This avoids overflows later
4159 	   with very long asm statements.  */
4160 	if (count > 1000)
4161 	  count = 1000;
4162 	return count;
4163       }
4164 
4165     case GIMPLE_RESX:
4166       /* This is either going to be an external function call with one
4167 	 argument, or two register copy statements plus a goto.  */
4168       return 2;
4169 
4170     case GIMPLE_EH_DISPATCH:
4171       /* ??? This is going to turn into a switch statement.  Ideally
4172 	 we'd have a look at the eh region and estimate the number of
4173 	 edges involved.  */
4174       return 10;
4175 
4176     case GIMPLE_BIND:
4177       return estimate_num_insns_seq (
4178 	       gimple_bind_body (as_a <gbind *> (stmt)),
4179 	       weights);
4180 
4181     case GIMPLE_EH_FILTER:
4182       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4183 
4184     case GIMPLE_CATCH:
4185       return estimate_num_insns_seq (gimple_catch_handler (
4186 				       as_a <gcatch *> (stmt)),
4187 				     weights);
4188 
4189     case GIMPLE_TRY:
4190       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4191               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4192 
4193     /* OMP directives are generally very expensive.  */
4194 
4195     case GIMPLE_OMP_RETURN:
4196     case GIMPLE_OMP_SECTIONS_SWITCH:
4197     case GIMPLE_OMP_ATOMIC_STORE:
4198     case GIMPLE_OMP_CONTINUE:
4199       /* ...except these, which are cheap.  */
4200       return 0;
4201 
4202     case GIMPLE_OMP_ATOMIC_LOAD:
4203       return weights->omp_cost;
4204 
4205     case GIMPLE_OMP_FOR:
4206       return (weights->omp_cost
4207               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4208               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4209 
4210     case GIMPLE_OMP_PARALLEL:
4211     case GIMPLE_OMP_TASK:
4212     case GIMPLE_OMP_CRITICAL:
4213     case GIMPLE_OMP_MASTER:
4214     case GIMPLE_OMP_TASKGROUP:
4215     case GIMPLE_OMP_ORDERED:
4216     case GIMPLE_OMP_SECTION:
4217     case GIMPLE_OMP_SECTIONS:
4218     case GIMPLE_OMP_SINGLE:
4219     case GIMPLE_OMP_TARGET:
4220     case GIMPLE_OMP_TEAMS:
4221       return (weights->omp_cost
4222               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4223 
4224     case GIMPLE_TRANSACTION:
4225       return (weights->tm_cost
4226 	      + estimate_num_insns_seq (gimple_transaction_body (
4227 					  as_a <gtransaction *> (stmt)),
4228 					weights));
4229 
4230     default:
4231       gcc_unreachable ();
4232     }
4233 
4234   return cost;
4235 }
4236 
4237 /* Estimate number of instructions that will be created by expanding
4238    function FNDECL.  WEIGHTS contains weights attributed to various
4239    constructs.  */
4240 
4241 int
4242 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4243 {
4244   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4245   gimple_stmt_iterator bsi;
4246   basic_block bb;
4247   int n = 0;
4248 
4249   gcc_assert (my_function && my_function->cfg);
4250   FOR_EACH_BB_FN (bb, my_function)
4251     {
4252       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4253 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4254     }
4255 
4256   return n;
4257 }
4258 
4259 
4260 /* Initializes weights used by estimate_num_insns.  */
4261 
4262 void
4263 init_inline_once (void)
4264 {
4265   eni_size_weights.call_cost = 1;
4266   eni_size_weights.indirect_call_cost = 3;
4267   eni_size_weights.target_builtin_call_cost = 1;
4268   eni_size_weights.div_mod_cost = 1;
4269   eni_size_weights.omp_cost = 40;
4270   eni_size_weights.tm_cost = 10;
4271   eni_size_weights.time_based = false;
4272   eni_size_weights.return_cost = 1;
4273 
4274   /* Estimating time for call is difficult, since we have no idea what the
4275      called function does.  In the current uses of eni_time_weights,
4276      underestimating the cost does less harm than overestimating it, so
4277      we choose a rather small value here.  */
4278   eni_time_weights.call_cost = 10;
4279   eni_time_weights.indirect_call_cost = 15;
4280   eni_time_weights.target_builtin_call_cost = 1;
4281   eni_time_weights.div_mod_cost = 10;
4282   eni_time_weights.omp_cost = 40;
4283   eni_time_weights.tm_cost = 40;
4284   eni_time_weights.time_based = true;
4285   eni_time_weights.return_cost = 2;
4286 }
4287 
4288 
4289 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4290 
4291 static void
4292 prepend_lexical_block (tree current_block, tree new_block)
4293 {
4294   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4295   BLOCK_SUBBLOCKS (current_block) = new_block;
4296   BLOCK_SUPERCONTEXT (new_block) = current_block;
4297 }
4298 
4299 /* Add local variables from CALLEE to CALLER.  */
4300 
4301 static inline void
4302 add_local_variables (struct function *callee, struct function *caller,
4303 		     copy_body_data *id)
4304 {
4305   tree var;
4306   unsigned ix;
4307 
4308   FOR_EACH_LOCAL_DECL (callee, ix, var)
4309     if (!can_be_nonlocal (var, id))
4310       {
4311         tree new_var = remap_decl (var, id);
4312 
4313         /* Remap debug-expressions.  */
4314 	if (TREE_CODE (new_var) == VAR_DECL
4315 	    && DECL_HAS_DEBUG_EXPR_P (var)
4316 	    && new_var != var)
4317 	  {
4318 	    tree tem = DECL_DEBUG_EXPR (var);
4319 	    bool old_regimplify = id->regimplify;
4320 	    id->remapping_type_depth++;
4321 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4322 	    id->remapping_type_depth--;
4323 	    id->regimplify = old_regimplify;
4324 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4325 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4326 	  }
4327 	add_local_decl (caller, new_var);
4328       }
4329 }
4330 
4331 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4332    have brought in or introduced any debug stmts for SRCVAR.  */
4333 
4334 static inline void
4335 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4336 {
4337   tree *remappedvarp = id->decl_map->get (srcvar);
4338 
4339   if (!remappedvarp)
4340     return;
4341 
4342   if (TREE_CODE (*remappedvarp) != VAR_DECL)
4343     return;
4344 
4345   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4346     return;
4347 
4348   tree tvar = target_for_debug_bind (*remappedvarp);
4349   if (!tvar)
4350     return;
4351 
4352   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4353 					  id->call_stmt);
4354   gimple_seq_add_stmt (bindings, stmt);
4355 }
4356 
4357 /* For each inlined variable for which we may have debug bind stmts,
4358    add before GSI a final debug stmt resetting it, marking the end of
4359    its life, so that var-tracking knows it doesn't have to compute
4360    further locations for it.  */
4361 
4362 static inline void
4363 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4364 {
4365   tree var;
4366   unsigned ix;
4367   gimple_seq bindings = NULL;
4368 
4369   if (!gimple_in_ssa_p (id->src_cfun))
4370     return;
4371 
4372   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4373     return;
4374 
4375   for (var = DECL_ARGUMENTS (id->src_fn);
4376        var; var = DECL_CHAIN (var))
4377     reset_debug_binding (id, var, &bindings);
4378 
4379   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4380     reset_debug_binding (id, var, &bindings);
4381 
4382   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4383 }
4384 
4385 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4386 
4387 static bool
4388 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4389 {
4390   tree use_retvar;
4391   tree fn;
4392   hash_map<tree, tree> *dst;
4393   hash_map<tree, tree> *st = NULL;
4394   tree return_slot;
4395   tree modify_dest;
4396   tree return_bounds = NULL;
4397   struct cgraph_edge *cg_edge;
4398   cgraph_inline_failed_t reason;
4399   basic_block return_block;
4400   edge e;
4401   gimple_stmt_iterator gsi, stmt_gsi;
4402   bool successfully_inlined = false;
4403   bool purge_dead_abnormal_edges;
4404   gcall *call_stmt;
4405   unsigned int i;
4406 
4407   /* The gimplifier uses input_location in too many places, such as
4408      internal_get_tmp_var ().  */
4409   location_t saved_location = input_location;
4410   input_location = gimple_location (stmt);
4411 
4412   /* From here on, we're only interested in CALL_EXPRs.  */
4413   call_stmt = dyn_cast <gcall *> (stmt);
4414   if (!call_stmt)
4415     goto egress;
4416 
4417   cg_edge = id->dst_node->get_edge (stmt);
4418   gcc_checking_assert (cg_edge);
4419   /* First, see if we can figure out what function is being called.
4420      If we cannot, then there is no hope of inlining the function.  */
4421   if (cg_edge->indirect_unknown_callee)
4422     goto egress;
4423   fn = cg_edge->callee->decl;
4424   gcc_checking_assert (fn);
4425 
4426   /* If FN is a declaration of a function in a nested scope that was
4427      globally declared inline, we don't set its DECL_INITIAL.
4428      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4429      C++ front-end uses it for cdtors to refer to their internal
4430      declarations, that are not real functions.  Fortunately those
4431      don't have trees to be saved, so we can tell by checking their
4432      gimple_body.  */
4433   if (!DECL_INITIAL (fn)
4434       && DECL_ABSTRACT_ORIGIN (fn)
4435       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4436     fn = DECL_ABSTRACT_ORIGIN (fn);
4437 
4438   /* Don't try to inline functions that are not well-suited to inlining.  */
4439   if (cg_edge->inline_failed)
4440     {
4441       reason = cg_edge->inline_failed;
4442       /* If this call was originally indirect, we do not want to emit any
4443 	 inlining related warnings or sorry messages because there are no
4444 	 guarantees regarding those.  */
4445       if (cg_edge->indirect_inlining_edge)
4446 	goto egress;
4447 
4448       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4449           /* For extern inline functions that get redefined we always
4450 	     silently ignored always_inline flag. Better behavior would
4451 	     be to be able to keep both bodies and use extern inline body
4452 	     for inlining, but we can't do that because frontends overwrite
4453 	     the body.  */
4454 	  && !cg_edge->callee->local.redefined_extern_inline
4455 	  /* During early inline pass, report only when optimization is
4456 	     not turned on.  */
4457 	  && (symtab->global_info_ready
4458 	      || !optimize
4459 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4460 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4461 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4462 	{
4463 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4464 		 cgraph_inline_failed_string (reason));
4465 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4466 	    inform (gimple_location (stmt), "called from here");
4467 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4468 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4469                    "called from this function");
4470 	}
4471       else if (warn_inline
4472 	       && DECL_DECLARED_INLINE_P (fn)
4473 	       && !DECL_NO_INLINE_WARNING_P (fn)
4474 	       && !DECL_IN_SYSTEM_HEADER (fn)
4475 	       && reason != CIF_UNSPECIFIED
4476 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4477 	       /* Do not warn about not inlined recursive calls.  */
4478 	       && !cg_edge->recursive_p ()
4479 	       /* Avoid warnings during early inline pass. */
4480 	       && symtab->global_info_ready)
4481 	{
4482 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4483 		       fn, _(cgraph_inline_failed_string (reason))))
4484 	    {
4485 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4486 		inform (gimple_location (stmt), "called from here");
4487 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4488 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4489                        "called from this function");
4490 	    }
4491 	}
4492       goto egress;
4493     }
4494   fn = cg_edge->callee->decl;
4495   cg_edge->callee->get_untransformed_body ();
4496 
4497   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4498     cg_edge->callee->verify ();
4499 
4500   /* We will be inlining this callee.  */
4501   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4502   id->assign_stmts.create (0);
4503 
4504   /* Update the callers EH personality.  */
4505   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4506     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4507       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4508 
4509   /* Split the block holding the GIMPLE_CALL.  */
4510   e = split_block (bb, stmt);
4511   bb = e->src;
4512   return_block = e->dest;
4513   remove_edge (e);
4514 
4515   /* split_block splits after the statement; work around this by
4516      moving the call into the second block manually.  Not pretty,
4517      but seems easier than doing the CFG manipulation by hand
4518      when the GIMPLE_CALL is in the last statement of BB.  */
4519   stmt_gsi = gsi_last_bb (bb);
4520   gsi_remove (&stmt_gsi, false);
4521 
4522   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4523      been the source of abnormal edges.  In this case, schedule
4524      the removal of dead abnormal edges.  */
4525   gsi = gsi_start_bb (return_block);
4526   if (gsi_end_p (gsi))
4527     {
4528       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4529       purge_dead_abnormal_edges = true;
4530     }
4531   else
4532     {
4533       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4534       purge_dead_abnormal_edges = false;
4535     }
4536 
4537   stmt_gsi = gsi_start_bb (return_block);
4538 
4539   /* Build a block containing code to initialize the arguments, the
4540      actual inline expansion of the body, and a label for the return
4541      statements within the function to jump to.  The type of the
4542      statement expression is the return type of the function call.
4543      ???  If the call does not have an associated block then we will
4544      remap all callee blocks to NULL, effectively dropping most of
4545      its debug information.  This should only happen for calls to
4546      artificial decls inserted by the compiler itself.  We need to
4547      either link the inlined blocks into the caller block tree or
4548      not refer to them in any way to not break GC for locations.  */
4549   if (gimple_block (stmt))
4550     {
4551       id->block = make_node (BLOCK);
4552       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4553       BLOCK_SOURCE_LOCATION (id->block)
4554 	= LOCATION_LOCUS (gimple_location (stmt));
4555       prepend_lexical_block (gimple_block (stmt), id->block);
4556     }
4557 
4558   /* Local declarations will be replaced by their equivalents in this
4559      map.  */
4560   st = id->decl_map;
4561   id->decl_map = new hash_map<tree, tree>;
4562   dst = id->debug_map;
4563   id->debug_map = NULL;
4564 
4565   /* Record the function we are about to inline.  */
4566   id->src_fn = fn;
4567   id->src_node = cg_edge->callee;
4568   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4569   id->call_stmt = call_stmt;
4570 
4571   /* If the src function contains an IFN_VA_ARG, then so will the dst
4572      function after inlining.  */
4573   if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4574     {
4575       struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4576       dst_cfun->curr_properties &= ~PROP_gimple_lva;
4577     }
4578 
4579   gcc_assert (!id->src_cfun->after_inlining);
4580 
4581   id->entry_bb = bb;
4582   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4583     {
4584       gimple_stmt_iterator si = gsi_last_bb (bb);
4585       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4586       						   NOT_TAKEN),
4587 			GSI_NEW_STMT);
4588     }
4589   initialize_inlined_parameters (id, stmt, fn, bb);
4590 
4591   if (DECL_INITIAL (fn))
4592     {
4593       if (gimple_block (stmt))
4594 	{
4595 	  tree *var;
4596 
4597 	  prepend_lexical_block (id->block,
4598 				 remap_blocks (DECL_INITIAL (fn), id));
4599 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4600 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4601 				   == NULL_TREE));
4602 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4603 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4604 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4605 	     under it.  The parameters can be then evaluated in the debugger,
4606 	     but don't show in backtraces.  */
4607 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4608 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4609 	      {
4610 		tree v = *var;
4611 		*var = TREE_CHAIN (v);
4612 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4613 		BLOCK_VARS (id->block) = v;
4614 	      }
4615 	    else
4616 	      var = &TREE_CHAIN (*var);
4617 	}
4618       else
4619 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4620     }
4621 
4622   /* Return statements in the function body will be replaced by jumps
4623      to the RET_LABEL.  */
4624   gcc_assert (DECL_INITIAL (fn));
4625   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4626 
4627   /* Find the LHS to which the result of this call is assigned.  */
4628   return_slot = NULL;
4629   if (gimple_call_lhs (stmt))
4630     {
4631       modify_dest = gimple_call_lhs (stmt);
4632 
4633       /* Remember where to copy returned bounds.  */
4634       if (gimple_call_with_bounds_p (stmt)
4635 	  && TREE_CODE (modify_dest) == SSA_NAME)
4636 	{
4637 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4638 	  if (retbnd)
4639 	    {
4640 	      return_bounds = gimple_call_lhs (retbnd);
4641 	      /* If returned bounds are not used then just
4642 		 remove unused call.  */
4643 	      if (!return_bounds)
4644 		{
4645 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4646 		  gsi_remove (&iter, true);
4647 		}
4648 	    }
4649 	}
4650 
4651       /* The function which we are inlining might not return a value,
4652 	 in which case we should issue a warning that the function
4653 	 does not return a value.  In that case the optimizers will
4654 	 see that the variable to which the value is assigned was not
4655 	 initialized.  We do not want to issue a warning about that
4656 	 uninitialized variable.  */
4657       if (DECL_P (modify_dest))
4658 	TREE_NO_WARNING (modify_dest) = 1;
4659 
4660       if (gimple_call_return_slot_opt_p (call_stmt))
4661 	{
4662 	  return_slot = modify_dest;
4663 	  modify_dest = NULL;
4664 	}
4665     }
4666   else
4667     modify_dest = NULL;
4668 
4669   /* If we are inlining a call to the C++ operator new, we don't want
4670      to use type based alias analysis on the return value.  Otherwise
4671      we may get confused if the compiler sees that the inlined new
4672      function returns a pointer which was just deleted.  See bug
4673      33407.  */
4674   if (DECL_IS_OPERATOR_NEW (fn))
4675     {
4676       return_slot = NULL;
4677       modify_dest = NULL;
4678     }
4679 
4680   /* Declare the return variable for the function.  */
4681   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4682 					return_bounds, bb);
4683 
4684   /* Add local vars in this inlined callee to caller.  */
4685   add_local_variables (id->src_cfun, cfun, id);
4686 
4687   if (dump_file && (dump_flags & TDF_DETAILS))
4688     {
4689       fprintf (dump_file, "Inlining ");
4690       print_generic_expr (dump_file, id->src_fn, 0);
4691       fprintf (dump_file, " to ");
4692       print_generic_expr (dump_file, id->dst_fn, 0);
4693       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4694     }
4695 
4696   /* This is it.  Duplicate the callee body.  Assume callee is
4697      pre-gimplified.  Note that we must not alter the caller
4698      function in any way before this point, as this CALL_EXPR may be
4699      a self-referential call; if we're calling ourselves, we need to
4700      duplicate our body before altering anything.  */
4701   copy_body (id, cg_edge->callee->count,
4702   	     GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4703 	     bb, return_block, NULL);
4704 
4705   reset_debug_bindings (id, stmt_gsi);
4706 
4707   /* Reset the escaped solution.  */
4708   if (cfun->gimple_df)
4709     pt_solution_reset (&cfun->gimple_df->escaped);
4710 
4711   /* Clean up.  */
4712   if (id->debug_map)
4713     {
4714       delete id->debug_map;
4715       id->debug_map = dst;
4716     }
4717   delete id->decl_map;
4718   id->decl_map = st;
4719 
4720   /* Unlink the calls virtual operands before replacing it.  */
4721   unlink_stmt_vdef (stmt);
4722   if (gimple_vdef (stmt)
4723       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4724     release_ssa_name (gimple_vdef (stmt));
4725 
4726   /* If the inlined function returns a result that we care about,
4727      substitute the GIMPLE_CALL with an assignment of the return
4728      variable to the LHS of the call.  That is, if STMT was
4729      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4730   if (use_retvar && gimple_call_lhs (stmt))
4731     {
4732       gimple *old_stmt = stmt;
4733       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4734       gsi_replace (&stmt_gsi, stmt, false);
4735       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4736 
4737       /* Copy bounds if we copy structure with bounds.  */
4738       if (chkp_function_instrumented_p (id->dst_fn)
4739 	  && !BOUNDED_P (use_retvar)
4740 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4741 	id->assign_stmts.safe_push (stmt);
4742     }
4743   else
4744     {
4745       /* Handle the case of inlining a function with no return
4746 	 statement, which causes the return value to become undefined.  */
4747       if (gimple_call_lhs (stmt)
4748 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4749 	{
4750 	  tree name = gimple_call_lhs (stmt);
4751 	  tree var = SSA_NAME_VAR (name);
4752 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4753 
4754 	  if (def)
4755 	    {
4756 	      /* If the variable is used undefined, make this name
4757 		 undefined via a move.  */
4758 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4759 	      gsi_replace (&stmt_gsi, stmt, true);
4760 	    }
4761 	  else
4762 	    {
4763 	      if (!var)
4764 		{
4765 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4766 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4767 		}
4768 	      /* Otherwise make this variable undefined.  */
4769 	      gsi_remove (&stmt_gsi, true);
4770 	      set_ssa_default_def (cfun, var, name);
4771 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4772 	    }
4773 	}
4774       else
4775         gsi_remove (&stmt_gsi, true);
4776     }
4777 
4778   /* Put returned bounds into the correct place if required.  */
4779   if (return_bounds)
4780     {
4781       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4782       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4783       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4784       unlink_stmt_vdef (old_stmt);
4785       gsi_replace (&bnd_gsi, new_stmt, false);
4786       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4787       cgraph_update_edges_for_call_stmt (old_stmt,
4788 					 gimple_call_fndecl (old_stmt),
4789 					 new_stmt);
4790     }
4791 
4792   if (purge_dead_abnormal_edges)
4793     {
4794       gimple_purge_dead_eh_edges (return_block);
4795       gimple_purge_dead_abnormal_call_edges (return_block);
4796     }
4797 
4798   /* If the value of the new expression is ignored, that's OK.  We
4799      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4800      the equivalent inlined version either.  */
4801   if (is_gimple_assign (stmt))
4802     {
4803       gcc_assert (gimple_assign_single_p (stmt)
4804 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4805       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4806     }
4807 
4808   /* Copy bounds for all generated assigns that need it.  */
4809   for (i = 0; i < id->assign_stmts.length (); i++)
4810     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4811   id->assign_stmts.release ();
4812 
4813   /* Output the inlining info for this abstract function, since it has been
4814      inlined.  If we don't do this now, we can lose the information about the
4815      variables in the function when the blocks get blown away as soon as we
4816      remove the cgraph node.  */
4817   if (gimple_block (stmt))
4818     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4819 
4820   /* Update callgraph if needed.  */
4821   cg_edge->callee->remove ();
4822 
4823   id->block = NULL_TREE;
4824   successfully_inlined = true;
4825 
4826  egress:
4827   input_location = saved_location;
4828   return successfully_inlined;
4829 }
4830 
4831 /* Expand call statements reachable from STMT_P.
4832    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4833    in a MODIFY_EXPR.  */
4834 
4835 static bool
4836 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4837 {
4838   gimple_stmt_iterator gsi;
4839   bool inlined = false;
4840 
4841   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4842     {
4843       gimple *stmt = gsi_stmt (gsi);
4844       gsi_prev (&gsi);
4845 
4846       if (is_gimple_call (stmt)
4847 	  && !gimple_call_internal_p (stmt))
4848 	inlined |= expand_call_inline (bb, stmt, id);
4849     }
4850 
4851   return inlined;
4852 }
4853 
4854 
4855 /* Walk all basic blocks created after FIRST and try to fold every statement
4856    in the STATEMENTS pointer set.  */
4857 
4858 static void
4859 fold_marked_statements (int first, hash_set<gimple *> *statements)
4860 {
4861   for (; first < n_basic_blocks_for_fn (cfun); first++)
4862     if (BASIC_BLOCK_FOR_FN (cfun, first))
4863       {
4864         gimple_stmt_iterator gsi;
4865 
4866 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4867 	     !gsi_end_p (gsi);
4868 	     gsi_next (&gsi))
4869 	  if (statements->contains (gsi_stmt (gsi)))
4870 	    {
4871 	      gimple *old_stmt = gsi_stmt (gsi);
4872 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4873 
4874 	      if (old_decl && DECL_BUILT_IN (old_decl))
4875 		{
4876 		  /* Folding builtins can create multiple instructions,
4877 		     we need to look at all of them.  */
4878 		  gimple_stmt_iterator i2 = gsi;
4879 		  gsi_prev (&i2);
4880 		  if (fold_stmt (&gsi))
4881 		    {
4882 		      gimple *new_stmt;
4883 		      /* If a builtin at the end of a bb folded into nothing,
4884 			 the following loop won't work.  */
4885 		      if (gsi_end_p (gsi))
4886 			{
4887 			  cgraph_update_edges_for_call_stmt (old_stmt,
4888 							     old_decl, NULL);
4889 			  break;
4890 			}
4891 		      if (gsi_end_p (i2))
4892 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4893 		      else
4894 			gsi_next (&i2);
4895 		      while (1)
4896 			{
4897 			  new_stmt = gsi_stmt (i2);
4898 			  update_stmt (new_stmt);
4899 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4900 							     new_stmt);
4901 
4902 			  if (new_stmt == gsi_stmt (gsi))
4903 			    {
4904 			      /* It is okay to check only for the very last
4905 				 of these statements.  If it is a throwing
4906 				 statement nothing will change.  If it isn't
4907 				 this can remove EH edges.  If that weren't
4908 				 correct then because some intermediate stmts
4909 				 throw, but not the last one.  That would mean
4910 				 we'd have to split the block, which we can't
4911 				 here and we'd loose anyway.  And as builtins
4912 				 probably never throw, this all
4913 				 is mood anyway.  */
4914 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
4915 								  new_stmt))
4916 				gimple_purge_dead_eh_edges (
4917 				  BASIC_BLOCK_FOR_FN (cfun, first));
4918 			      break;
4919 			    }
4920 			  gsi_next (&i2);
4921 			}
4922 		    }
4923 		}
4924 	      else if (fold_stmt (&gsi))
4925 		{
4926 		  /* Re-read the statement from GSI as fold_stmt() may
4927 		     have changed it.  */
4928 		  gimple *new_stmt = gsi_stmt (gsi);
4929 		  update_stmt (new_stmt);
4930 
4931 		  if (is_gimple_call (old_stmt)
4932 		      || is_gimple_call (new_stmt))
4933 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4934 						       new_stmt);
4935 
4936 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4937 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4938 								    first));
4939 		}
4940 	    }
4941       }
4942 }
4943 
4944 /* Expand calls to inline functions in the body of FN.  */
4945 
4946 unsigned int
4947 optimize_inline_calls (tree fn)
4948 {
4949   copy_body_data id;
4950   basic_block bb;
4951   int last = n_basic_blocks_for_fn (cfun);
4952   bool inlined_p = false;
4953 
4954   /* Clear out ID.  */
4955   memset (&id, 0, sizeof (id));
4956 
4957   id.src_node = id.dst_node = cgraph_node::get (fn);
4958   gcc_assert (id.dst_node->definition);
4959   id.dst_fn = fn;
4960   /* Or any functions that aren't finished yet.  */
4961   if (current_function_decl)
4962     id.dst_fn = current_function_decl;
4963 
4964   id.copy_decl = copy_decl_maybe_to_var;
4965   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4966   id.transform_new_cfg = false;
4967   id.transform_return_to_modify = true;
4968   id.transform_parameter = true;
4969   id.transform_lang_insert_block = NULL;
4970   id.statements_to_fold = new hash_set<gimple *>;
4971 
4972   push_gimplify_context ();
4973 
4974   /* We make no attempts to keep dominance info up-to-date.  */
4975   free_dominance_info (CDI_DOMINATORS);
4976   free_dominance_info (CDI_POST_DOMINATORS);
4977 
4978   /* Register specific gimple functions.  */
4979   gimple_register_cfg_hooks ();
4980 
4981   /* Reach the trees by walking over the CFG, and note the
4982      enclosing basic-blocks in the call edges.  */
4983   /* We walk the blocks going forward, because inlined function bodies
4984      will split id->current_basic_block, and the new blocks will
4985      follow it; we'll trudge through them, processing their CALL_EXPRs
4986      along the way.  */
4987   FOR_EACH_BB_FN (bb, cfun)
4988     inlined_p |= gimple_expand_calls_inline (bb, &id);
4989 
4990   pop_gimplify_context (NULL);
4991 
4992   if (flag_checking)
4993     {
4994       struct cgraph_edge *e;
4995 
4996       id.dst_node->verify ();
4997 
4998       /* Double check that we inlined everything we are supposed to inline.  */
4999       for (e = id.dst_node->callees; e; e = e->next_callee)
5000 	gcc_assert (e->inline_failed);
5001     }
5002 
5003   /* Fold queued statements.  */
5004   fold_marked_statements (last, id.statements_to_fold);
5005   delete id.statements_to_fold;
5006 
5007   gcc_assert (!id.debug_stmts.exists ());
5008 
5009   /* If we didn't inline into the function there is nothing to do.  */
5010   if (!inlined_p)
5011     return 0;
5012 
5013   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5014   number_blocks (fn);
5015 
5016   delete_unreachable_blocks_update_callgraph (&id);
5017   if (flag_checking)
5018     id.dst_node->verify ();
5019 
5020   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5021      not possible yet - the IPA passes might make various functions to not
5022      throw and they don't care to proactively update local EH info.  This is
5023      done later in fixup_cfg pass that also execute the verification.  */
5024   return (TODO_update_ssa
5025 	  | TODO_cleanup_cfg
5026 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5027 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5028 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5029 	     ? TODO_rebuild_frequencies : 0));
5030 }
5031 
5032 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5033 
5034 tree
5035 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5036 {
5037   enum tree_code code = TREE_CODE (*tp);
5038   enum tree_code_class cl = TREE_CODE_CLASS (code);
5039 
5040   /* We make copies of most nodes.  */
5041   if (IS_EXPR_CODE_CLASS (cl)
5042       || code == TREE_LIST
5043       || code == TREE_VEC
5044       || code == TYPE_DECL
5045       || code == OMP_CLAUSE)
5046     {
5047       /* Because the chain gets clobbered when we make a copy, we save it
5048 	 here.  */
5049       tree chain = NULL_TREE, new_tree;
5050 
5051       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5052 	chain = TREE_CHAIN (*tp);
5053 
5054       /* Copy the node.  */
5055       new_tree = copy_node (*tp);
5056 
5057       *tp = new_tree;
5058 
5059       /* Now, restore the chain, if appropriate.  That will cause
5060 	 walk_tree to walk into the chain as well.  */
5061       if (code == PARM_DECL
5062 	  || code == TREE_LIST
5063 	  || code == OMP_CLAUSE)
5064 	TREE_CHAIN (*tp) = chain;
5065 
5066       /* For now, we don't update BLOCKs when we make copies.  So, we
5067 	 have to nullify all BIND_EXPRs.  */
5068       if (TREE_CODE (*tp) == BIND_EXPR)
5069 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5070     }
5071   else if (code == CONSTRUCTOR)
5072     {
5073       /* CONSTRUCTOR nodes need special handling because
5074          we need to duplicate the vector of elements.  */
5075       tree new_tree;
5076 
5077       new_tree = copy_node (*tp);
5078       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5079       *tp = new_tree;
5080     }
5081   else if (code == STATEMENT_LIST)
5082     /* We used to just abort on STATEMENT_LIST, but we can run into them
5083        with statement-expressions (c++/40975).  */
5084     copy_statement_list (tp);
5085   else if (TREE_CODE_CLASS (code) == tcc_type)
5086     *walk_subtrees = 0;
5087   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5088     *walk_subtrees = 0;
5089   else if (TREE_CODE_CLASS (code) == tcc_constant)
5090     *walk_subtrees = 0;
5091   return NULL_TREE;
5092 }
5093 
5094 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5095    information indicating to what new SAVE_EXPR this one should be mapped,
5096    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5097    the function into which the copy will be placed.  */
5098 
5099 static void
5100 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5101 {
5102   tree *n;
5103   tree t;
5104 
5105   /* See if we already encountered this SAVE_EXPR.  */
5106   n = st->get (*tp);
5107 
5108   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5109   if (!n)
5110     {
5111       t = copy_node (*tp);
5112 
5113       /* Remember this SAVE_EXPR.  */
5114       st->put (*tp, t);
5115       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5116       st->put (t, t);
5117     }
5118   else
5119     {
5120       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5121       *walk_subtrees = 0;
5122       t = *n;
5123     }
5124 
5125   /* Replace this SAVE_EXPR with the copy.  */
5126   *tp = t;
5127 }
5128 
5129 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5130    label, copies the declaration and enters it in the splay_tree in DATA (which
5131    is really a 'copy_body_data *'.  */
5132 
5133 static tree
5134 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5135 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5136 		        struct walk_stmt_info *wi)
5137 {
5138   copy_body_data *id = (copy_body_data *) wi->info;
5139   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5140 
5141   if (stmt)
5142     {
5143       tree decl = gimple_label_label (stmt);
5144 
5145       /* Copy the decl and remember the copy.  */
5146       insert_decl_map (id, decl, id->copy_decl (decl, id));
5147     }
5148 
5149   return NULL_TREE;
5150 }
5151 
5152 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5153 						  struct walk_stmt_info *wi);
5154 
5155 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5156    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5157    remaps all local declarations to appropriate replacements in gimple
5158    operands. */
5159 
5160 static tree
5161 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5162 {
5163   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5164   copy_body_data *id = (copy_body_data *) wi->info;
5165   hash_map<tree, tree> *st = id->decl_map;
5166   tree *n;
5167   tree expr = *tp;
5168 
5169   /* Only a local declaration (variable or label).  */
5170   if ((TREE_CODE (expr) == VAR_DECL
5171        && !TREE_STATIC (expr))
5172       || TREE_CODE (expr) == LABEL_DECL)
5173     {
5174       /* Lookup the declaration.  */
5175       n = st->get (expr);
5176 
5177       /* If it's there, remap it.  */
5178       if (n)
5179 	*tp = *n;
5180       *walk_subtrees = 0;
5181     }
5182   else if (TREE_CODE (expr) == STATEMENT_LIST
5183 	   || TREE_CODE (expr) == BIND_EXPR
5184 	   || TREE_CODE (expr) == SAVE_EXPR)
5185     gcc_unreachable ();
5186   else if (TREE_CODE (expr) == TARGET_EXPR)
5187     {
5188       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5189          It's OK for this to happen if it was part of a subtree that
5190          isn't immediately expanded, such as operand 2 of another
5191          TARGET_EXPR.  */
5192       if (!TREE_OPERAND (expr, 1))
5193 	{
5194 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5195 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5196 	}
5197     }
5198   else if (TREE_CODE (expr) == OMP_CLAUSE)
5199     {
5200       /* Before the omplower pass completes, some OMP clauses can contain
5201 	 sequences that are neither copied by gimple_seq_copy nor walked by
5202 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5203 	 in those situations, we have to copy and process them explicitely.  */
5204 
5205       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5206 	{
5207 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5208 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5209 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5210 	}
5211       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5212 	{
5213 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5214 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5215 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5216 	}
5217       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5218 	{
5219 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5220 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5221 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5222 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5223 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5224 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5225 	}
5226     }
5227 
5228   /* Keep iterating.  */
5229   return NULL_TREE;
5230 }
5231 
5232 
5233 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5234    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5235    remaps all local declarations to appropriate replacements in gimple
5236    statements. */
5237 
5238 static tree
5239 replace_locals_stmt (gimple_stmt_iterator *gsip,
5240 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5241 		     struct walk_stmt_info *wi)
5242 {
5243   copy_body_data *id = (copy_body_data *) wi->info;
5244   gimple *gs = gsi_stmt (*gsip);
5245 
5246   if (gbind *stmt = dyn_cast <gbind *> (gs))
5247     {
5248       tree block = gimple_bind_block (stmt);
5249 
5250       if (block)
5251 	{
5252 	  remap_block (&block, id);
5253 	  gimple_bind_set_block (stmt, block);
5254 	}
5255 
5256       /* This will remap a lot of the same decls again, but this should be
5257 	 harmless.  */
5258       if (gimple_bind_vars (stmt))
5259 	{
5260 	  tree old_var, decls = gimple_bind_vars (stmt);
5261 
5262 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5263 	    if (!can_be_nonlocal (old_var, id)
5264 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5265 	      remap_decl (old_var, id);
5266 
5267 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5268 	  id->prevent_decl_creation_for_types = true;
5269 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5270 	  id->prevent_decl_creation_for_types = false;
5271 	}
5272     }
5273 
5274   /* Keep iterating.  */
5275   return NULL_TREE;
5276 }
5277 
5278 /* Create a copy of SEQ and remap all decls in it.  */
5279 
5280 static gimple_seq
5281 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5282 {
5283   if (!seq)
5284     return NULL;
5285 
5286   /* If there are any labels in OMP sequences, they can be only referred to in
5287      the sequence itself and therefore we can do both here.  */
5288   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5289   gimple_seq copy = gimple_seq_copy (seq);
5290   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5291   return copy;
5292 }
5293 
5294 /* Copies everything in SEQ and replaces variables and labels local to
5295    current_function_decl.  */
5296 
5297 gimple_seq
5298 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5299 {
5300   copy_body_data id;
5301   struct walk_stmt_info wi;
5302   gimple_seq copy;
5303 
5304   /* There's nothing to do for NULL_TREE.  */
5305   if (seq == NULL)
5306     return seq;
5307 
5308   /* Set up ID.  */
5309   memset (&id, 0, sizeof (id));
5310   id.src_fn = current_function_decl;
5311   id.dst_fn = current_function_decl;
5312   id.decl_map = new hash_map<tree, tree>;
5313   id.debug_map = NULL;
5314 
5315   id.copy_decl = copy_decl_no_change;
5316   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5317   id.transform_new_cfg = false;
5318   id.transform_return_to_modify = false;
5319   id.transform_parameter = false;
5320   id.transform_lang_insert_block = NULL;
5321 
5322   /* Walk the tree once to find local labels.  */
5323   memset (&wi, 0, sizeof (wi));
5324   hash_set<tree> visited;
5325   wi.info = &id;
5326   wi.pset = &visited;
5327   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5328 
5329   copy = gimple_seq_copy (seq);
5330 
5331   /* Walk the copy, remapping decls.  */
5332   memset (&wi, 0, sizeof (wi));
5333   wi.info = &id;
5334   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5335 
5336   /* Clean up.  */
5337   delete id.decl_map;
5338   if (id.debug_map)
5339     delete id.debug_map;
5340   if (id.dependence_map)
5341     {
5342       delete id.dependence_map;
5343       id.dependence_map = NULL;
5344     }
5345 
5346   return copy;
5347 }
5348 
5349 
5350 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5351 
5352 static tree
5353 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5354 {
5355   if (*tp == data)
5356     return (tree) data;
5357   else
5358     return NULL;
5359 }
5360 
5361 DEBUG_FUNCTION bool
5362 debug_find_tree (tree top, tree search)
5363 {
5364   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5365 }
5366 
5367 
5368 /* Declare the variables created by the inliner.  Add all the variables in
5369    VARS to BIND_EXPR.  */
5370 
5371 static void
5372 declare_inline_vars (tree block, tree vars)
5373 {
5374   tree t;
5375   for (t = vars; t; t = DECL_CHAIN (t))
5376     {
5377       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5378       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5379       add_local_decl (cfun, t);
5380     }
5381 
5382   if (block)
5383     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5384 }
5385 
5386 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5387    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5388    VAR_DECL translation.  */
5389 
5390 static tree
5391 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5392 {
5393   /* Don't generate debug information for the copy if we wouldn't have
5394      generated it for the copy either.  */
5395   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5396   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5397 
5398   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5399      declaration inspired this copy.  */
5400   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5401 
5402   /* The new variable/label has no RTL, yet.  */
5403   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5404       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5405     SET_DECL_RTL (copy, 0);
5406 
5407   /* These args would always appear unused, if not for this.  */
5408   TREE_USED (copy) = 1;
5409 
5410   /* Set the context for the new declaration.  */
5411   if (!DECL_CONTEXT (decl))
5412     /* Globals stay global.  */
5413     ;
5414   else if (DECL_CONTEXT (decl) != id->src_fn)
5415     /* Things that weren't in the scope of the function we're inlining
5416        from aren't in the scope we're inlining to, either.  */
5417     ;
5418   else if (TREE_STATIC (decl))
5419     /* Function-scoped static variables should stay in the original
5420        function.  */
5421     ;
5422   else
5423     /* Ordinary automatic local variables are now in the scope of the
5424        new function.  */
5425     DECL_CONTEXT (copy) = id->dst_fn;
5426 
5427   return copy;
5428 }
5429 
5430 static tree
5431 copy_decl_to_var (tree decl, copy_body_data *id)
5432 {
5433   tree copy, type;
5434 
5435   gcc_assert (TREE_CODE (decl) == PARM_DECL
5436 	      || TREE_CODE (decl) == RESULT_DECL);
5437 
5438   type = TREE_TYPE (decl);
5439 
5440   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5441 		     VAR_DECL, DECL_NAME (decl), type);
5442   if (DECL_PT_UID_SET_P (decl))
5443     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5444   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5445   TREE_READONLY (copy) = TREE_READONLY (decl);
5446   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5447   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5448 
5449   return copy_decl_for_dup_finish (id, decl, copy);
5450 }
5451 
5452 /* Like copy_decl_to_var, but create a return slot object instead of a
5453    pointer variable for return by invisible reference.  */
5454 
5455 static tree
5456 copy_result_decl_to_var (tree decl, copy_body_data *id)
5457 {
5458   tree copy, type;
5459 
5460   gcc_assert (TREE_CODE (decl) == PARM_DECL
5461 	      || TREE_CODE (decl) == RESULT_DECL);
5462 
5463   type = TREE_TYPE (decl);
5464   if (DECL_BY_REFERENCE (decl))
5465     type = TREE_TYPE (type);
5466 
5467   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5468 		     VAR_DECL, DECL_NAME (decl), type);
5469   if (DECL_PT_UID_SET_P (decl))
5470     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5471   TREE_READONLY (copy) = TREE_READONLY (decl);
5472   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5473   if (!DECL_BY_REFERENCE (decl))
5474     {
5475       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5476       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5477     }
5478 
5479   return copy_decl_for_dup_finish (id, decl, copy);
5480 }
5481 
5482 tree
5483 copy_decl_no_change (tree decl, copy_body_data *id)
5484 {
5485   tree copy;
5486 
5487   copy = copy_node (decl);
5488 
5489   /* The COPY is not abstract; it will be generated in DST_FN.  */
5490   DECL_ABSTRACT_P (copy) = false;
5491   lang_hooks.dup_lang_specific_decl (copy);
5492 
5493   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5494      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5495   if (TREE_CODE (copy) == LABEL_DECL)
5496     {
5497       TREE_ADDRESSABLE (copy) = 0;
5498       LABEL_DECL_UID (copy) = -1;
5499     }
5500 
5501   return copy_decl_for_dup_finish (id, decl, copy);
5502 }
5503 
5504 static tree
5505 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5506 {
5507   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5508     return copy_decl_to_var (decl, id);
5509   else
5510     return copy_decl_no_change (decl, id);
5511 }
5512 
5513 /* Return a copy of the function's argument tree.  */
5514 static tree
5515 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5516 			       bitmap args_to_skip, tree *vars)
5517 {
5518   tree arg, *parg;
5519   tree new_parm = NULL;
5520   int i = 0;
5521 
5522   parg = &new_parm;
5523 
5524   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5525     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5526       {
5527         tree new_tree = remap_decl (arg, id);
5528 	if (TREE_CODE (new_tree) != PARM_DECL)
5529 	  new_tree = id->copy_decl (arg, id);
5530         lang_hooks.dup_lang_specific_decl (new_tree);
5531         *parg = new_tree;
5532 	parg = &DECL_CHAIN (new_tree);
5533       }
5534     else if (!id->decl_map->get (arg))
5535       {
5536 	/* Make an equivalent VAR_DECL.  If the argument was used
5537 	   as temporary variable later in function, the uses will be
5538 	   replaced by local variable.  */
5539 	tree var = copy_decl_to_var (arg, id);
5540 	insert_decl_map (id, arg, var);
5541         /* Declare this new variable.  */
5542         DECL_CHAIN (var) = *vars;
5543         *vars = var;
5544       }
5545   return new_parm;
5546 }
5547 
5548 /* Return a copy of the function's static chain.  */
5549 static tree
5550 copy_static_chain (tree static_chain, copy_body_data * id)
5551 {
5552   tree *chain_copy, *pvar;
5553 
5554   chain_copy = &static_chain;
5555   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5556     {
5557       tree new_tree = remap_decl (*pvar, id);
5558       lang_hooks.dup_lang_specific_decl (new_tree);
5559       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5560       *pvar = new_tree;
5561     }
5562   return static_chain;
5563 }
5564 
5565 /* Return true if the function is allowed to be versioned.
5566    This is a guard for the versioning functionality.  */
5567 
5568 bool
5569 tree_versionable_function_p (tree fndecl)
5570 {
5571   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5572 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5573 }
5574 
5575 /* Delete all unreachable basic blocks and update callgraph.
5576    Doing so is somewhat nontrivial because we need to update all clones and
5577    remove inline function that become unreachable.  */
5578 
5579 static bool
5580 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5581 {
5582   bool changed = false;
5583   basic_block b, next_bb;
5584 
5585   find_unreachable_blocks ();
5586 
5587   /* Delete all unreachable basic blocks.  */
5588 
5589   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5590        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5591     {
5592       next_bb = b->next_bb;
5593 
5594       if (!(b->flags & BB_REACHABLE))
5595 	{
5596           gimple_stmt_iterator bsi;
5597 
5598           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5599 	    {
5600 	      struct cgraph_edge *e;
5601 	      struct cgraph_node *node;
5602 
5603 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5604 
5605 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5606 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5607 		{
5608 		  if (!e->inline_failed)
5609 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5610 		  else
5611 		    e->remove ();
5612 		}
5613 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5614 		  && id->dst_node->clones)
5615 		for (node = id->dst_node->clones; node != id->dst_node;)
5616 		  {
5617 		    node->remove_stmt_references (gsi_stmt (bsi));
5618 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5619 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5620 		      {
5621 			if (!e->inline_failed)
5622 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5623 			else
5624 			  e->remove ();
5625 		      }
5626 
5627 		    if (node->clones)
5628 		      node = node->clones;
5629 		    else if (node->next_sibling_clone)
5630 		      node = node->next_sibling_clone;
5631 		    else
5632 		      {
5633 			while (node != id->dst_node && !node->next_sibling_clone)
5634 			  node = node->clone_of;
5635 			if (node != id->dst_node)
5636 			  node = node->next_sibling_clone;
5637 		      }
5638 		  }
5639 	    }
5640 	  delete_basic_block (b);
5641 	  changed = true;
5642 	}
5643     }
5644 
5645   return changed;
5646 }
5647 
5648 /* Update clone info after duplication.  */
5649 
5650 static void
5651 update_clone_info (copy_body_data * id)
5652 {
5653   struct cgraph_node *node;
5654   if (!id->dst_node->clones)
5655     return;
5656   for (node = id->dst_node->clones; node != id->dst_node;)
5657     {
5658       /* First update replace maps to match the new body.  */
5659       if (node->clone.tree_map)
5660         {
5661 	  unsigned int i;
5662           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5663 	    {
5664 	      struct ipa_replace_map *replace_info;
5665 	      replace_info = (*node->clone.tree_map)[i];
5666 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5667 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5668 	    }
5669 	}
5670       if (node->clones)
5671 	node = node->clones;
5672       else if (node->next_sibling_clone)
5673 	node = node->next_sibling_clone;
5674       else
5675 	{
5676 	  while (node != id->dst_node && !node->next_sibling_clone)
5677 	    node = node->clone_of;
5678 	  if (node != id->dst_node)
5679 	    node = node->next_sibling_clone;
5680 	}
5681     }
5682 }
5683 
5684 /* Create a copy of a function's tree.
5685    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5686    of the original function and the new copied function
5687    respectively.  In case we want to replace a DECL
5688    tree with another tree while duplicating the function's
5689    body, TREE_MAP represents the mapping between these
5690    trees. If UPDATE_CLONES is set, the call_stmt fields
5691    of edges of clones of the function will be updated.
5692 
5693    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5694    from new version.
5695    If SKIP_RETURN is true, the new version will return void.
5696    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5697    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5698 */
5699 void
5700 tree_function_versioning (tree old_decl, tree new_decl,
5701 			  vec<ipa_replace_map *, va_gc> *tree_map,
5702 			  bool update_clones, bitmap args_to_skip,
5703 			  bool skip_return, bitmap blocks_to_copy,
5704 			  basic_block new_entry)
5705 {
5706   struct cgraph_node *old_version_node;
5707   struct cgraph_node *new_version_node;
5708   copy_body_data id;
5709   tree p;
5710   unsigned i;
5711   struct ipa_replace_map *replace_info;
5712   basic_block old_entry_block, bb;
5713   auto_vec<gimple *, 10> init_stmts;
5714   tree vars = NULL_TREE;
5715   bitmap debug_args_to_skip = args_to_skip;
5716 
5717   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5718 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5719   DECL_POSSIBLY_INLINED (old_decl) = 1;
5720 
5721   old_version_node = cgraph_node::get (old_decl);
5722   gcc_checking_assert (old_version_node);
5723   new_version_node = cgraph_node::get (new_decl);
5724   gcc_checking_assert (new_version_node);
5725 
5726   /* Copy over debug args.  */
5727   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5728     {
5729       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5730       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5731       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5732       old_debug_args = decl_debug_args_lookup (old_decl);
5733       if (old_debug_args)
5734 	{
5735 	  new_debug_args = decl_debug_args_insert (new_decl);
5736 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5737 	}
5738     }
5739 
5740   /* Output the inlining info for this abstract function, since it has been
5741      inlined.  If we don't do this now, we can lose the information about the
5742      variables in the function when the blocks get blown away as soon as we
5743      remove the cgraph node.  */
5744   (*debug_hooks->outlining_inline_function) (old_decl);
5745 
5746   DECL_ARTIFICIAL (new_decl) = 1;
5747   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5748   if (DECL_ORIGIN (old_decl) == old_decl)
5749     old_version_node->used_as_abstract_origin = true;
5750   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5751 
5752   /* Prepare the data structures for the tree copy.  */
5753   memset (&id, 0, sizeof (id));
5754 
5755   /* Generate a new name for the new version. */
5756   id.statements_to_fold = new hash_set<gimple *>;
5757 
5758   id.decl_map = new hash_map<tree, tree>;
5759   id.debug_map = NULL;
5760   id.src_fn = old_decl;
5761   id.dst_fn = new_decl;
5762   id.src_node = old_version_node;
5763   id.dst_node = new_version_node;
5764   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5765   id.blocks_to_copy = blocks_to_copy;
5766 
5767   id.copy_decl = copy_decl_no_change;
5768   id.transform_call_graph_edges
5769     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5770   id.transform_new_cfg = true;
5771   id.transform_return_to_modify = false;
5772   id.transform_parameter = false;
5773   id.transform_lang_insert_block = NULL;
5774 
5775   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5776     (DECL_STRUCT_FUNCTION (old_decl));
5777   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5778   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5779   initialize_cfun (new_decl, old_decl,
5780 		   old_entry_block->count);
5781   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5782     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5783       = id.src_cfun->gimple_df->ipa_pta;
5784 
5785   /* Copy the function's static chain.  */
5786   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5787   if (p)
5788     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5789       = copy_static_chain (p, &id);
5790 
5791   /* If there's a tree_map, prepare for substitution.  */
5792   if (tree_map)
5793     for (i = 0; i < tree_map->length (); i++)
5794       {
5795 	gimple *init;
5796 	replace_info = (*tree_map)[i];
5797 	if (replace_info->replace_p)
5798 	  {
5799 	    int parm_num = -1;
5800 	    if (!replace_info->old_tree)
5801 	      {
5802 		int p = replace_info->parm_num;
5803 		tree parm;
5804 		tree req_type, new_type;
5805 
5806 		for (parm = DECL_ARGUMENTS (old_decl); p;
5807 		     parm = DECL_CHAIN (parm))
5808 		  p--;
5809 		replace_info->old_tree = parm;
5810 		parm_num = replace_info->parm_num;
5811 		req_type = TREE_TYPE (parm);
5812 		new_type = TREE_TYPE (replace_info->new_tree);
5813 		if (!useless_type_conversion_p (req_type, new_type))
5814 		  {
5815 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5816 		      replace_info->new_tree
5817 			= fold_build1 (NOP_EXPR, req_type,
5818 				       replace_info->new_tree);
5819 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5820 		      replace_info->new_tree
5821 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
5822 				       replace_info->new_tree);
5823 		    else
5824 		      {
5825 			if (dump_file)
5826 			  {
5827 			    fprintf (dump_file, "    const ");
5828 			    print_generic_expr (dump_file,
5829 						replace_info->new_tree, 0);
5830 			    fprintf (dump_file,
5831 				     "  can't be converted to param ");
5832 			    print_generic_expr (dump_file, parm, 0);
5833 			    fprintf (dump_file, "\n");
5834 			  }
5835 			replace_info->old_tree = NULL;
5836 		      }
5837 		  }
5838 	      }
5839 	    else
5840 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5841 	    if (replace_info->old_tree)
5842 	      {
5843 		init = setup_one_parameter (&id, replace_info->old_tree,
5844 					    replace_info->new_tree, id.src_fn,
5845 					    NULL,
5846 					    &vars);
5847 		if (init)
5848 		  init_stmts.safe_push (init);
5849 		if (MAY_HAVE_DEBUG_STMTS && args_to_skip)
5850 		  {
5851 		    if (parm_num == -1)
5852 		      {
5853 			tree parm;
5854 			int p;
5855 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5856 			     parm = DECL_CHAIN (parm), p++)
5857 			  if (parm == replace_info->old_tree)
5858 			    {
5859 			      parm_num = p;
5860 			      break;
5861 			    }
5862 		      }
5863 		    if (parm_num != -1)
5864 		      {
5865 			if (debug_args_to_skip == args_to_skip)
5866 			  {
5867 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
5868 			    bitmap_copy (debug_args_to_skip, args_to_skip);
5869 			  }
5870 			bitmap_clear_bit (debug_args_to_skip, parm_num);
5871 		      }
5872 		  }
5873 	      }
5874 	  }
5875       }
5876   /* Copy the function's arguments.  */
5877   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5878     DECL_ARGUMENTS (new_decl)
5879       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5880 				       args_to_skip, &vars);
5881 
5882   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5883   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5884 
5885   declare_inline_vars (DECL_INITIAL (new_decl), vars);
5886 
5887   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5888     /* Add local vars.  */
5889     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5890 
5891   if (DECL_RESULT (old_decl) == NULL_TREE)
5892     ;
5893   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5894     {
5895       DECL_RESULT (new_decl)
5896 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5897 		      RESULT_DECL, NULL_TREE, void_type_node);
5898       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5899       cfun->returns_struct = 0;
5900       cfun->returns_pcc_struct = 0;
5901     }
5902   else
5903     {
5904       tree old_name;
5905       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5906       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5907       if (gimple_in_ssa_p (id.src_cfun)
5908 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5909 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5910 	{
5911 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5912 	  insert_decl_map (&id, old_name, new_name);
5913 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5914 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5915 	}
5916     }
5917 
5918   /* Set up the destination functions loop tree.  */
5919   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5920     {
5921       cfun->curr_properties &= ~PROP_loops;
5922       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5923       cfun->curr_properties |= PROP_loops;
5924     }
5925 
5926   /* Copy the Function's body.  */
5927   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5928 	     ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5929 	     new_entry);
5930 
5931   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5932   number_blocks (new_decl);
5933 
5934   /* We want to create the BB unconditionally, so that the addition of
5935      debug stmts doesn't affect BB count, which may in the end cause
5936      codegen differences.  */
5937   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5938   while (init_stmts.length ())
5939     insert_init_stmt (&id, bb, init_stmts.pop ());
5940   update_clone_info (&id);
5941 
5942   /* Remap the nonlocal_goto_save_area, if any.  */
5943   if (cfun->nonlocal_goto_save_area)
5944     {
5945       struct walk_stmt_info wi;
5946 
5947       memset (&wi, 0, sizeof (wi));
5948       wi.info = &id;
5949       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5950     }
5951 
5952   /* Clean up.  */
5953   delete id.decl_map;
5954   if (id.debug_map)
5955     delete id.debug_map;
5956   free_dominance_info (CDI_DOMINATORS);
5957   free_dominance_info (CDI_POST_DOMINATORS);
5958 
5959   fold_marked_statements (0, id.statements_to_fold);
5960   delete id.statements_to_fold;
5961   delete_unreachable_blocks_update_callgraph (&id);
5962   if (id.dst_node->definition)
5963     cgraph_edge::rebuild_references ();
5964   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5965     {
5966       calculate_dominance_info (CDI_DOMINATORS);
5967       fix_loop_structure (NULL);
5968     }
5969   update_ssa (TODO_update_ssa);
5970 
5971   /* After partial cloning we need to rescale frequencies, so they are
5972      within proper range in the cloned function.  */
5973   if (new_entry)
5974     {
5975       struct cgraph_edge *e;
5976       rebuild_frequencies ();
5977 
5978       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5979       for (e = new_version_node->callees; e; e = e->next_callee)
5980 	{
5981 	  basic_block bb = gimple_bb (e->call_stmt);
5982 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5983 							 bb);
5984 	  e->count = bb->count;
5985 	}
5986       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5987 	{
5988 	  basic_block bb = gimple_bb (e->call_stmt);
5989 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5990 							 bb);
5991 	  e->count = bb->count;
5992 	}
5993     }
5994 
5995   if (debug_args_to_skip && MAY_HAVE_DEBUG_STMTS)
5996     {
5997       tree parm;
5998       vec<tree, va_gc> **debug_args = NULL;
5999       unsigned int len = 0;
6000       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6001 	   parm; parm = DECL_CHAIN (parm), i++)
6002 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6003 	  {
6004 	    tree ddecl;
6005 
6006 	    if (debug_args == NULL)
6007 	      {
6008 		debug_args = decl_debug_args_insert (new_decl);
6009 		len = vec_safe_length (*debug_args);
6010 	      }
6011 	    ddecl = make_node (DEBUG_EXPR_DECL);
6012 	    DECL_ARTIFICIAL (ddecl) = 1;
6013 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6014 	    DECL_MODE (ddecl) = DECL_MODE (parm);
6015 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6016 	    vec_safe_push (*debug_args, ddecl);
6017 	  }
6018       if (debug_args != NULL)
6019 	{
6020 	  /* On the callee side, add
6021 	     DEBUG D#Y s=> parm
6022 	     DEBUG var => D#Y
6023 	     stmts to the first bb where var is a VAR_DECL created for the
6024 	     optimized away parameter in DECL_INITIAL block.  This hints
6025 	     in the debug info that var (whole DECL_ORIGIN is the parm
6026 	     PARM_DECL) is optimized away, but could be looked up at the
6027 	     call site as value of D#X there.  */
6028 	  tree var = vars, vexpr;
6029 	  gimple_stmt_iterator cgsi
6030 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6031 	  gimple *def_temp;
6032 	  var = vars;
6033 	  i = vec_safe_length (*debug_args);
6034 	  do
6035 	    {
6036 	      i -= 2;
6037 	      while (var != NULL_TREE
6038 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6039 		var = TREE_CHAIN (var);
6040 	      if (var == NULL_TREE)
6041 		break;
6042 	      vexpr = make_node (DEBUG_EXPR_DECL);
6043 	      parm = (**debug_args)[i];
6044 	      DECL_ARTIFICIAL (vexpr) = 1;
6045 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6046 	      DECL_MODE (vexpr) = DECL_MODE (parm);
6047 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6048 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6049 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6050 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6051 	    }
6052 	  while (i > len);
6053 	}
6054     }
6055 
6056   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6057     BITMAP_FREE (debug_args_to_skip);
6058   free_dominance_info (CDI_DOMINATORS);
6059   free_dominance_info (CDI_POST_DOMINATORS);
6060 
6061   gcc_assert (!id.debug_stmts.exists ());
6062   pop_cfun ();
6063   return;
6064 }
6065 
6066 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6067    the callee and return the inlined body on success.  */
6068 
6069 tree
6070 maybe_inline_call_in_expr (tree exp)
6071 {
6072   tree fn = get_callee_fndecl (exp);
6073 
6074   /* We can only try to inline "const" functions.  */
6075   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6076     {
6077       call_expr_arg_iterator iter;
6078       copy_body_data id;
6079       tree param, arg, t;
6080       hash_map<tree, tree> decl_map;
6081 
6082       /* Remap the parameters.  */
6083       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6084 	   param;
6085 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6086 	decl_map.put (param, arg);
6087 
6088       memset (&id, 0, sizeof (id));
6089       id.src_fn = fn;
6090       id.dst_fn = current_function_decl;
6091       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6092       id.decl_map = &decl_map;
6093 
6094       id.copy_decl = copy_decl_no_change;
6095       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6096       id.transform_new_cfg = false;
6097       id.transform_return_to_modify = true;
6098       id.transform_parameter = true;
6099       id.transform_lang_insert_block = NULL;
6100 
6101       /* Make sure not to unshare trees behind the front-end's back
6102 	 since front-end specific mechanisms may rely on sharing.  */
6103       id.regimplify = false;
6104       id.do_not_unshare = true;
6105 
6106       /* We're not inside any EH region.  */
6107       id.eh_lp_nr = 0;
6108 
6109       t = copy_tree_body (&id);
6110 
6111       /* We can only return something suitable for use in a GENERIC
6112 	 expression tree.  */
6113       if (TREE_CODE (t) == MODIFY_EXPR)
6114 	return TREE_OPERAND (t, 1);
6115     }
6116 
6117    return NULL_TREE;
6118 }
6119 
6120 /* Duplicate a type, fields and all.  */
6121 
6122 tree
6123 build_duplicate_type (tree type)
6124 {
6125   struct copy_body_data id;
6126 
6127   memset (&id, 0, sizeof (id));
6128   id.src_fn = current_function_decl;
6129   id.dst_fn = current_function_decl;
6130   id.src_cfun = cfun;
6131   id.decl_map = new hash_map<tree, tree>;
6132   id.debug_map = NULL;
6133   id.copy_decl = copy_decl_no_change;
6134 
6135   type = remap_type_1 (type, &id);
6136 
6137   delete id.decl_map;
6138   if (id.debug_map)
6139     delete id.debug_map;
6140 
6141   TYPE_CANONICAL (type) = type;
6142 
6143   return type;
6144 }
6145 
6146 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6147    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6148    evaluation.  */
6149 
6150 tree
6151 copy_fn (tree fn, tree& parms, tree& result)
6152 {
6153   copy_body_data id;
6154   tree param;
6155   hash_map<tree, tree> decl_map;
6156 
6157   tree *p = &parms;
6158   *p = NULL_TREE;
6159 
6160   memset (&id, 0, sizeof (id));
6161   id.src_fn = fn;
6162   id.dst_fn = current_function_decl;
6163   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6164   id.decl_map = &decl_map;
6165 
6166   id.copy_decl = copy_decl_no_change;
6167   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6168   id.transform_new_cfg = false;
6169   id.transform_return_to_modify = false;
6170   id.transform_parameter = true;
6171   id.transform_lang_insert_block = NULL;
6172 
6173   /* Make sure not to unshare trees behind the front-end's back
6174      since front-end specific mechanisms may rely on sharing.  */
6175   id.regimplify = false;
6176   id.do_not_unshare = true;
6177 
6178   /* We're not inside any EH region.  */
6179   id.eh_lp_nr = 0;
6180 
6181   /* Remap the parameters and result and return them to the caller.  */
6182   for (param = DECL_ARGUMENTS (fn);
6183        param;
6184        param = DECL_CHAIN (param))
6185     {
6186       *p = remap_decl (param, &id);
6187       p = &DECL_CHAIN (*p);
6188     }
6189 
6190   if (DECL_RESULT (fn))
6191     result = remap_decl (DECL_RESULT (fn), &id);
6192   else
6193     result = NULL_TREE;
6194 
6195   return copy_tree_body (&id);
6196 }
6197