xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Tree inlining.
2    Copyright (C) 2001-2015 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "flags.h"
41 #include "params.h"
42 #include "insn-config.h"
43 #include "hashtab.h"
44 #include "langhooks.h"
45 #include "predict.h"
46 #include "hard-reg-set.h"
47 #include "function.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfganal.h"
51 #include "basic-block.h"
52 #include "tree-iterator.h"
53 #include "intl.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "gimple-fold.h"
57 #include "tree-eh.h"
58 #include "gimple-expr.h"
59 #include "is-a.h"
60 #include "gimple.h"
61 #include "gimplify.h"
62 #include "gimple-iterator.h"
63 #include "gimplify-me.h"
64 #include "gimple-walk.h"
65 #include "gimple-ssa.h"
66 #include "tree-cfg.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "stringpool.h"
70 #include "tree-ssanames.h"
71 #include "tree-into-ssa.h"
72 #include "rtl.h"
73 #include "statistics.h"
74 #include "real.h"
75 #include "fixed-value.h"
76 #include "expmed.h"
77 #include "dojump.h"
78 #include "explow.h"
79 #include "emit-rtl.h"
80 #include "varasm.h"
81 #include "stmt.h"
82 #include "expr.h"
83 #include "tree-dfa.h"
84 #include "tree-ssa.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "hash-map.h"
89 #include "plugin-api.h"
90 #include "ipa-ref.h"
91 #include "cgraph.h"
92 #include "alloc-pool.h"
93 #include "symbol-summary.h"
94 #include "ipa-prop.h"
95 #include "value-prof.h"
96 #include "tree-pass.h"
97 #include "target.h"
98 #include "cfgloop.h"
99 #include "builtins.h"
100 #include "tree-chkp.h"
101 
102 #include "rtl.h"	/* FIXME: For asm_str_count.  */
103 
104 /* I'm not real happy about this, but we need to handle gimple and
105    non-gimple trees.  */
106 
107 /* Inlining, Cloning, Versioning, Parallelization
108 
109    Inlining: a function body is duplicated, but the PARM_DECLs are
110    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
111    MODIFY_EXPRs that store to a dedicated returned-value variable.
112    The duplicated eh_region info of the copy will later be appended
113    to the info for the caller; the eh_region info in copied throwing
114    statements and RESX statements are adjusted accordingly.
115 
116    Cloning: (only in C++) We have one body for a con/de/structor, and
117    multiple function decls, each with a unique parameter list.
118    Duplicate the body, using the given splay tree; some parameters
119    will become constants (like 0 or 1).
120 
121    Versioning: a function body is duplicated and the result is a new
122    function rather than into blocks of an existing function as with
123    inlining.  Some parameters will become constants.
124 
125    Parallelization: a region of a function is duplicated resulting in
126    a new function.  Variables may be replaced with complex expressions
127    to enable shared variable semantics.
128 
129    All of these will simultaneously lookup any callgraph edges.  If
130    we're going to inline the duplicated function body, and the given
131    function has some cloned callgraph nodes (one for each place this
132    function will be inlined) those callgraph edges will be duplicated.
133    If we're cloning the body, those callgraph edges will be
134    updated to point into the new body.  (Note that the original
135    callgraph node and edge list will not be altered.)
136 
137    See the CALL_EXPR handling case in copy_tree_body_r ().  */
138 
139 /* To Do:
140 
141    o In order to make inlining-on-trees work, we pessimized
142      function-local static constants.  In particular, they are now
143      always output, even when not addressed.  Fix this by treating
144      function-local static constants just like global static
145      constants; the back-end already knows not to output them if they
146      are not needed.
147 
148    o Provide heuristics to clamp inlining of recursive template
149      calls?  */
150 
151 
152 /* Weights that estimate_num_insns uses to estimate the size of the
153    produced code.  */
154 
155 eni_weights eni_size_weights;
156 
157 /* Weights that estimate_num_insns uses to estimate the time necessary
158    to execute the produced code.  */
159 
160 eni_weights eni_time_weights;
161 
162 /* Prototypes.  */
163 
164 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
165 				     basic_block);
166 static void remap_block (tree *, copy_body_data *);
167 static void copy_bind_expr (tree *, int *, copy_body_data *);
168 static void declare_inline_vars (tree, tree);
169 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
170 static void prepend_lexical_block (tree current_block, tree new_block);
171 static tree copy_decl_to_var (tree, copy_body_data *);
172 static tree copy_result_decl_to_var (tree, copy_body_data *);
173 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
174 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
175 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
176 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
177 
178 /* Insert a tree->tree mapping for ID.  Despite the name suggests
179    that the trees should be variables, it is used for more than that.  */
180 
181 void
182 insert_decl_map (copy_body_data *id, tree key, tree value)
183 {
184   id->decl_map->put (key, value);
185 
186   /* Always insert an identity map as well.  If we see this same new
187      node again, we won't want to duplicate it a second time.  */
188   if (key != value)
189     id->decl_map->put (value, value);
190 }
191 
192 /* Insert a tree->tree mapping for ID.  This is only used for
193    variables.  */
194 
195 static void
196 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
197 {
198   if (!gimple_in_ssa_p (id->src_cfun))
199     return;
200 
201   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
202     return;
203 
204   if (!target_for_debug_bind (key))
205     return;
206 
207   gcc_assert (TREE_CODE (key) == PARM_DECL);
208   gcc_assert (TREE_CODE (value) == VAR_DECL);
209 
210   if (!id->debug_map)
211     id->debug_map = new hash_map<tree, tree>;
212 
213   id->debug_map->put (key, value);
214 }
215 
216 /* If nonzero, we're remapping the contents of inlined debug
217    statements.  If negative, an error has occurred, such as a
218    reference to a variable that isn't available in the inlined
219    context.  */
220 static int processing_debug_stmt = 0;
221 
222 /* Construct new SSA name for old NAME. ID is the inline context.  */
223 
224 static tree
225 remap_ssa_name (tree name, copy_body_data *id)
226 {
227   tree new_tree, var;
228   tree *n;
229 
230   gcc_assert (TREE_CODE (name) == SSA_NAME);
231 
232   n = id->decl_map->get (name);
233   if (n)
234     return unshare_expr (*n);
235 
236   if (processing_debug_stmt)
237     {
238       if (SSA_NAME_IS_DEFAULT_DEF (name)
239 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
240 	  && id->entry_bb == NULL
241 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
242 	{
243 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
244 	  gimple def_temp;
245 	  gimple_stmt_iterator gsi;
246 	  tree val = SSA_NAME_VAR (name);
247 
248 	  n = id->decl_map->get (val);
249 	  if (n != NULL)
250 	    val = *n;
251 	  if (TREE_CODE (val) != PARM_DECL)
252 	    {
253 	      processing_debug_stmt = -1;
254 	      return name;
255 	    }
256 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
257 	  DECL_ARTIFICIAL (vexpr) = 1;
258 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
259 	  DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
260 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
261 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
262 	  return vexpr;
263 	}
264 
265       processing_debug_stmt = -1;
266       return name;
267     }
268 
269   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
270   var = SSA_NAME_VAR (name);
271   if (!var
272       || (!SSA_NAME_IS_DEFAULT_DEF (name)
273 	  && TREE_CODE (var) == VAR_DECL
274 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
275 	  && DECL_ARTIFICIAL (var)
276 	  && DECL_IGNORED_P (var)
277 	  && !DECL_NAME (var)))
278     {
279       struct ptr_info_def *pi;
280       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
281       if (!var && SSA_NAME_IDENTIFIER (name))
282 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
283       insert_decl_map (id, name, new_tree);
284       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286       /* At least IPA points-to info can be directly transferred.  */
287       if (id->src_cfun->gimple_df
288 	  && id->src_cfun->gimple_df->ipa_pta
289 	  && (pi = SSA_NAME_PTR_INFO (name))
290 	  && !pi->pt.anything)
291 	{
292 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 	  new_pi->pt = pi->pt;
294 	}
295       return new_tree;
296     }
297 
298   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
299      in copy_bb.  */
300   new_tree = remap_decl (var, id);
301 
302   /* We might've substituted constant or another SSA_NAME for
303      the variable.
304 
305      Replace the SSA name representing RESULT_DECL by variable during
306      inlining:  this saves us from need to introduce PHI node in a case
307      return value is just partly initialized.  */
308   if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
309       && (!SSA_NAME_VAR (name)
310 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
311 	  || !id->transform_return_to_modify))
312     {
313       struct ptr_info_def *pi;
314       new_tree = make_ssa_name (new_tree);
315       insert_decl_map (id, name, new_tree);
316       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
317 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
318       /* At least IPA points-to info can be directly transferred.  */
319       if (id->src_cfun->gimple_df
320 	  && id->src_cfun->gimple_df->ipa_pta
321 	  && (pi = SSA_NAME_PTR_INFO (name))
322 	  && !pi->pt.anything)
323 	{
324 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
325 	  new_pi->pt = pi->pt;
326 	}
327       if (SSA_NAME_IS_DEFAULT_DEF (name))
328 	{
329 	  /* By inlining function having uninitialized variable, we might
330 	     extend the lifetime (variable might get reused).  This cause
331 	     ICE in the case we end up extending lifetime of SSA name across
332 	     abnormal edge, but also increase register pressure.
333 
334 	     We simply initialize all uninitialized vars by 0 except
335 	     for case we are inlining to very first BB.  We can avoid
336 	     this for all BBs that are not inside strongly connected
337 	     regions of the CFG, but this is expensive to test.  */
338 	  if (id->entry_bb
339 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
340 	      && (!SSA_NAME_VAR (name)
341 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
342 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
343 					     0)->dest
344 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
345 	    {
346 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
347 	      gimple init_stmt;
348 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
349 
350 	      init_stmt = gimple_build_assign (new_tree, zero);
351 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
352 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
353 	    }
354 	  else
355 	    {
356 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
357 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
358 	    }
359 	}
360     }
361   else
362     insert_decl_map (id, name, new_tree);
363   return new_tree;
364 }
365 
366 /* Remap DECL during the copying of the BLOCK tree for the function.  */
367 
368 tree
369 remap_decl (tree decl, copy_body_data *id)
370 {
371   tree *n;
372 
373   /* We only remap local variables in the current function.  */
374 
375   /* See if we have remapped this declaration.  */
376 
377   n = id->decl_map->get (decl);
378 
379   if (!n && processing_debug_stmt)
380     {
381       processing_debug_stmt = -1;
382       return decl;
383     }
384 
385   /* If we didn't already have an equivalent for this declaration,
386      create one now.  */
387   if (!n)
388     {
389       /* Make a copy of the variable or label.  */
390       tree t = id->copy_decl (decl, id);
391 
392       /* Remember it, so that if we encounter this local entity again
393 	 we can reuse this copy.  Do this early because remap_type may
394 	 need this decl for TYPE_STUB_DECL.  */
395       insert_decl_map (id, decl, t);
396 
397       if (!DECL_P (t))
398 	return t;
399 
400       /* Remap types, if necessary.  */
401       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
402       if (TREE_CODE (t) == TYPE_DECL)
403         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404 
405       /* Remap sizes as necessary.  */
406       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
407       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
408 
409       /* If fields, do likewise for offset and qualifier.  */
410       if (TREE_CODE (t) == FIELD_DECL)
411 	{
412 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
413 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
414 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
415 	}
416 
417       return t;
418     }
419 
420   if (id->do_not_unshare)
421     return *n;
422   else
423     return unshare_expr (*n);
424 }
425 
426 static tree
427 remap_type_1 (tree type, copy_body_data *id)
428 {
429   tree new_tree, t;
430 
431   /* We do need a copy.  build and register it now.  If this is a pointer or
432      reference type, remap the designated type and make a new pointer or
433      reference type.  */
434   if (TREE_CODE (type) == POINTER_TYPE)
435     {
436       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
437 					 TYPE_MODE (type),
438 					 TYPE_REF_CAN_ALIAS_ALL (type));
439       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
440 	new_tree = build_type_attribute_qual_variant (new_tree,
441 						      TYPE_ATTRIBUTES (type),
442 						      TYPE_QUALS (type));
443       insert_decl_map (id, type, new_tree);
444       return new_tree;
445     }
446   else if (TREE_CODE (type) == REFERENCE_TYPE)
447     {
448       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
449 					    TYPE_MODE (type),
450 					    TYPE_REF_CAN_ALIAS_ALL (type));
451       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 	new_tree = build_type_attribute_qual_variant (new_tree,
453 						      TYPE_ATTRIBUTES (type),
454 						      TYPE_QUALS (type));
455       insert_decl_map (id, type, new_tree);
456       return new_tree;
457     }
458   else
459     new_tree = copy_node (type);
460 
461   insert_decl_map (id, type, new_tree);
462 
463   /* This is a new type, not a copy of an old type.  Need to reassociate
464      variants.  We can handle everything except the main variant lazily.  */
465   t = TYPE_MAIN_VARIANT (type);
466   if (type != t)
467     {
468       t = remap_type (t, id);
469       TYPE_MAIN_VARIANT (new_tree) = t;
470       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
471       TYPE_NEXT_VARIANT (t) = new_tree;
472     }
473   else
474     {
475       TYPE_MAIN_VARIANT (new_tree) = new_tree;
476       TYPE_NEXT_VARIANT (new_tree) = NULL;
477     }
478 
479   if (TYPE_STUB_DECL (type))
480     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
481 
482   /* Lazily create pointer and reference types.  */
483   TYPE_POINTER_TO (new_tree) = NULL;
484   TYPE_REFERENCE_TO (new_tree) = NULL;
485 
486   /* Copy all types that may contain references to local variables; be sure to
487      preserve sharing in between type and its main variant when possible.  */
488   switch (TREE_CODE (new_tree))
489     {
490     case INTEGER_TYPE:
491     case REAL_TYPE:
492     case FIXED_POINT_TYPE:
493     case ENUMERAL_TYPE:
494     case BOOLEAN_TYPE:
495       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
496 	{
497 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
498 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
499 
500 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
502 	}
503       else
504 	{
505 	  t = TYPE_MIN_VALUE (new_tree);
506 	  if (t && TREE_CODE (t) != INTEGER_CST)
507 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
508 
509 	  t = TYPE_MAX_VALUE (new_tree);
510 	  if (t && TREE_CODE (t) != INTEGER_CST)
511 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
512 	}
513       return new_tree;
514 
515     case FUNCTION_TYPE:
516       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
518 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
519       else
520         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
521       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
523 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
524       else
525         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
526       return new_tree;
527 
528     case ARRAY_TYPE:
529       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532       else
533 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534 
535       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
536 	{
537 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
538 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
539 	}
540       else
541 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
542       break;
543 
544     case RECORD_TYPE:
545     case UNION_TYPE:
546     case QUAL_UNION_TYPE:
547       if (TYPE_MAIN_VARIANT (type) != type
548 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
549 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
550       else
551 	{
552 	  tree f, nf = NULL;
553 
554 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
555 	    {
556 	      t = remap_decl (f, id);
557 	      DECL_CONTEXT (t) = new_tree;
558 	      DECL_CHAIN (t) = nf;
559 	      nf = t;
560 	    }
561 	  TYPE_FIELDS (new_tree) = nreverse (nf);
562 	}
563       break;
564 
565     case OFFSET_TYPE:
566     default:
567       /* Shouldn't have been thought variable sized.  */
568       gcc_unreachable ();
569     }
570 
571   /* All variants of type share the same size, so use the already remaped data.  */
572   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
573     {
574       gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
575       gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
576 
577       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
578       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
579     }
580   else
581     {
582       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
583       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
584     }
585 
586   return new_tree;
587 }
588 
589 tree
590 remap_type (tree type, copy_body_data *id)
591 {
592   tree *node;
593   tree tmp;
594 
595   if (type == NULL)
596     return type;
597 
598   /* See if we have remapped this type.  */
599   node = id->decl_map->get (type);
600   if (node)
601     return *node;
602 
603   /* The type only needs remapping if it's variably modified.  */
604   if (! variably_modified_type_p (type, id->src_fn))
605     {
606       insert_decl_map (id, type, type);
607       return type;
608     }
609 
610   id->remapping_type_depth++;
611   tmp = remap_type_1 (type, id);
612   id->remapping_type_depth--;
613 
614   return tmp;
615 }
616 
617 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
618 
619 static bool
620 can_be_nonlocal (tree decl, copy_body_data *id)
621 {
622   /* We can not duplicate function decls.  */
623   if (TREE_CODE (decl) == FUNCTION_DECL)
624     return true;
625 
626   /* Local static vars must be non-local or we get multiple declaration
627      problems.  */
628   if (TREE_CODE (decl) == VAR_DECL
629       && !auto_var_in_fn_p (decl, id->src_fn))
630     return true;
631 
632   return false;
633 }
634 
635 static tree
636 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
637 	     copy_body_data *id)
638 {
639   tree old_var;
640   tree new_decls = NULL_TREE;
641 
642   /* Remap its variables.  */
643   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
644     {
645       tree new_var;
646 
647       if (can_be_nonlocal (old_var, id))
648 	{
649 	  /* We need to add this variable to the local decls as otherwise
650 	     nothing else will do so.  */
651 	  if (TREE_CODE (old_var) == VAR_DECL
652 	      && ! DECL_EXTERNAL (old_var)
653 	      && cfun)
654 	    add_local_decl (cfun, old_var);
655 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
656 	      && !DECL_IGNORED_P (old_var)
657 	      && nonlocalized_list)
658 	    vec_safe_push (*nonlocalized_list, old_var);
659 	  continue;
660 	}
661 
662       /* Remap the variable.  */
663       new_var = remap_decl (old_var, id);
664 
665       /* If we didn't remap this variable, we can't mess with its
666 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
667 	 already declared somewhere else, so don't declare it here.  */
668 
669       if (new_var == id->retvar)
670 	;
671       else if (!new_var)
672         {
673 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
674 	      && !DECL_IGNORED_P (old_var)
675 	      && nonlocalized_list)
676 	    vec_safe_push (*nonlocalized_list, old_var);
677 	}
678       else
679 	{
680 	  gcc_assert (DECL_P (new_var));
681 	  DECL_CHAIN (new_var) = new_decls;
682 	  new_decls = new_var;
683 
684 	  /* Also copy value-expressions.  */
685 	  if (TREE_CODE (new_var) == VAR_DECL
686 	      && DECL_HAS_VALUE_EXPR_P (new_var))
687 	    {
688 	      tree tem = DECL_VALUE_EXPR (new_var);
689 	      bool old_regimplify = id->regimplify;
690 	      id->remapping_type_depth++;
691 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
692 	      id->remapping_type_depth--;
693 	      id->regimplify = old_regimplify;
694 	      SET_DECL_VALUE_EXPR (new_var, tem);
695 	    }
696 	}
697     }
698 
699   return nreverse (new_decls);
700 }
701 
702 /* Copy the BLOCK to contain remapped versions of the variables
703    therein.  And hook the new block into the block-tree.  */
704 
705 static void
706 remap_block (tree *block, copy_body_data *id)
707 {
708   tree old_block;
709   tree new_block;
710 
711   /* Make the new block.  */
712   old_block = *block;
713   new_block = make_node (BLOCK);
714   TREE_USED (new_block) = TREE_USED (old_block);
715   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
716   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
717   BLOCK_NONLOCALIZED_VARS (new_block)
718     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
719   *block = new_block;
720 
721   /* Remap its variables.  */
722   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
723   					&BLOCK_NONLOCALIZED_VARS (new_block),
724 					id);
725 
726   if (id->transform_lang_insert_block)
727     id->transform_lang_insert_block (new_block);
728 
729   /* Remember the remapped block.  */
730   insert_decl_map (id, old_block, new_block);
731 }
732 
733 /* Copy the whole block tree and root it in id->block.  */
734 static tree
735 remap_blocks (tree block, copy_body_data *id)
736 {
737   tree t;
738   tree new_tree = block;
739 
740   if (!block)
741     return NULL;
742 
743   remap_block (&new_tree, id);
744   gcc_assert (new_tree != block);
745   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
746     prepend_lexical_block (new_tree, remap_blocks (t, id));
747   /* Blocks are in arbitrary order, but make things slightly prettier and do
748      not swap order when producing a copy.  */
749   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
750   return new_tree;
751 }
752 
753 /* Remap the block tree rooted at BLOCK to nothing.  */
754 static void
755 remap_blocks_to_null (tree block, copy_body_data *id)
756 {
757   tree t;
758   insert_decl_map (id, block, NULL_TREE);
759   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
760     remap_blocks_to_null (t, id);
761 }
762 
763 static void
764 copy_statement_list (tree *tp)
765 {
766   tree_stmt_iterator oi, ni;
767   tree new_tree;
768 
769   new_tree = alloc_stmt_list ();
770   ni = tsi_start (new_tree);
771   oi = tsi_start (*tp);
772   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
773   *tp = new_tree;
774 
775   for (; !tsi_end_p (oi); tsi_next (&oi))
776     {
777       tree stmt = tsi_stmt (oi);
778       if (TREE_CODE (stmt) == STATEMENT_LIST)
779 	/* This copy is not redundant; tsi_link_after will smash this
780 	   STATEMENT_LIST into the end of the one we're building, and we
781 	   don't want to do that with the original.  */
782 	copy_statement_list (&stmt);
783       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
784     }
785 }
786 
787 static void
788 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
789 {
790   tree block = BIND_EXPR_BLOCK (*tp);
791   /* Copy (and replace) the statement.  */
792   copy_tree_r (tp, walk_subtrees, NULL);
793   if (block)
794     {
795       remap_block (&block, id);
796       BIND_EXPR_BLOCK (*tp) = block;
797     }
798 
799   if (BIND_EXPR_VARS (*tp))
800     /* This will remap a lot of the same decls again, but this should be
801        harmless.  */
802     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
803 }
804 
805 
806 /* Create a new gimple_seq by remapping all the statements in BODY
807    using the inlining information in ID.  */
808 
809 static gimple_seq
810 remap_gimple_seq (gimple_seq body, copy_body_data *id)
811 {
812   gimple_stmt_iterator si;
813   gimple_seq new_body = NULL;
814 
815   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
816     {
817       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
818       gimple_seq_add_seq (&new_body, new_stmts);
819     }
820 
821   return new_body;
822 }
823 
824 
825 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
826    block using the mapping information in ID.  */
827 
828 static gimple
829 copy_gimple_bind (gbind *stmt, copy_body_data *id)
830 {
831   gimple new_bind;
832   tree new_block, new_vars;
833   gimple_seq body, new_body;
834 
835   /* Copy the statement.  Note that we purposely don't use copy_stmt
836      here because we need to remap statements as we copy.  */
837   body = gimple_bind_body (stmt);
838   new_body = remap_gimple_seq (body, id);
839 
840   new_block = gimple_bind_block (stmt);
841   if (new_block)
842     remap_block (&new_block, id);
843 
844   /* This will remap a lot of the same decls again, but this should be
845      harmless.  */
846   new_vars = gimple_bind_vars (stmt);
847   if (new_vars)
848     new_vars = remap_decls (new_vars, NULL, id);
849 
850   new_bind = gimple_build_bind (new_vars, new_body, new_block);
851 
852   return new_bind;
853 }
854 
855 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
856 
857 static bool
858 is_parm (tree decl)
859 {
860   if (TREE_CODE (decl) == SSA_NAME)
861     {
862       decl = SSA_NAME_VAR (decl);
863       if (!decl)
864 	return false;
865     }
866 
867   return (TREE_CODE (decl) == PARM_DECL);
868 }
869 
870 /* Remap the dependence CLIQUE from the source to the destination function
871    as specified in ID.  */
872 
873 static unsigned short
874 remap_dependence_clique (copy_body_data *id, unsigned short clique)
875 {
876   if (clique == 0 || processing_debug_stmt)
877     return 0;
878   if (!id->dependence_map)
879     id->dependence_map
880       = new hash_map<unsigned short, unsigned short, dependence_hasher>;
881   bool existed;
882   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
883   if (!existed)
884     newc = ++cfun->last_clique;
885   return newc;
886 }
887 
888 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
889    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
890    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
891    recursing into the children nodes of *TP.  */
892 
893 static tree
894 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
895 {
896   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
897   copy_body_data *id = (copy_body_data *) wi_p->info;
898   tree fn = id->src_fn;
899 
900   if (TREE_CODE (*tp) == SSA_NAME)
901     {
902       *tp = remap_ssa_name (*tp, id);
903       *walk_subtrees = 0;
904       return NULL;
905     }
906   else if (auto_var_in_fn_p (*tp, fn))
907     {
908       /* Local variables and labels need to be replaced by equivalent
909 	 variables.  We don't want to copy static variables; there's
910 	 only one of those, no matter how many times we inline the
911 	 containing function.  Similarly for globals from an outer
912 	 function.  */
913       tree new_decl;
914 
915       /* Remap the declaration.  */
916       new_decl = remap_decl (*tp, id);
917       gcc_assert (new_decl);
918       /* Replace this variable with the copy.  */
919       STRIP_TYPE_NOPS (new_decl);
920       /* ???  The C++ frontend uses void * pointer zero to initialize
921          any other type.  This confuses the middle-end type verification.
922 	 As cloned bodies do not go through gimplification again the fixup
923 	 there doesn't trigger.  */
924       if (TREE_CODE (new_decl) == INTEGER_CST
925 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
926 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
927       *tp = new_decl;
928       *walk_subtrees = 0;
929     }
930   else if (TREE_CODE (*tp) == STATEMENT_LIST)
931     gcc_unreachable ();
932   else if (TREE_CODE (*tp) == SAVE_EXPR)
933     gcc_unreachable ();
934   else if (TREE_CODE (*tp) == LABEL_DECL
935 	   && (!DECL_CONTEXT (*tp)
936 	       || decl_function_context (*tp) == id->src_fn))
937     /* These may need to be remapped for EH handling.  */
938     *tp = remap_decl (*tp, id);
939   else if (TREE_CODE (*tp) == FIELD_DECL)
940     {
941       /* If the enclosing record type is variably_modified_type_p, the field
942 	 has already been remapped.  Otherwise, it need not be.  */
943       tree *n = id->decl_map->get (*tp);
944       if (n)
945 	*tp = *n;
946       *walk_subtrees = 0;
947     }
948   else if (TYPE_P (*tp))
949     /* Types may need remapping as well.  */
950     *tp = remap_type (*tp, id);
951   else if (CONSTANT_CLASS_P (*tp))
952     {
953       /* If this is a constant, we have to copy the node iff the type
954 	 will be remapped.  copy_tree_r will not copy a constant.  */
955       tree new_type = remap_type (TREE_TYPE (*tp), id);
956 
957       if (new_type == TREE_TYPE (*tp))
958 	*walk_subtrees = 0;
959 
960       else if (TREE_CODE (*tp) == INTEGER_CST)
961 	*tp = wide_int_to_tree (new_type, *tp);
962       else
963 	{
964 	  *tp = copy_node (*tp);
965 	  TREE_TYPE (*tp) = new_type;
966 	}
967     }
968   else
969     {
970       /* Otherwise, just copy the node.  Note that copy_tree_r already
971 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
972 
973       if (TREE_CODE (*tp) == MEM_REF)
974 	{
975 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
976 	     that can happen when a pointer argument is an ADDR_EXPR.
977 	     Recurse here manually to allow that.  */
978 	  tree ptr = TREE_OPERAND (*tp, 0);
979 	  tree type = remap_type (TREE_TYPE (*tp), id);
980 	  tree old = *tp;
981 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
982 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
983 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
984 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
985 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
986 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
987 	    {
988 	      MR_DEPENDENCE_CLIQUE (*tp)
989 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
990 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
991 	    }
992 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
993 	     remapped a parameter as the property might be valid only
994 	     for the parameter itself.  */
995 	  if (TREE_THIS_NOTRAP (old)
996 	      && (!is_parm (TREE_OPERAND (old, 0))
997 		  || (!id->transform_parameter && is_parm (ptr))))
998 	    TREE_THIS_NOTRAP (*tp) = 1;
999 	  *walk_subtrees = 0;
1000 	  return NULL;
1001 	}
1002 
1003       /* Here is the "usual case".  Copy this tree node, and then
1004 	 tweak some special cases.  */
1005       copy_tree_r (tp, walk_subtrees, NULL);
1006 
1007       if (TREE_CODE (*tp) != OMP_CLAUSE)
1008 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1009 
1010       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1011 	{
1012 	  /* The copied TARGET_EXPR has never been expanded, even if the
1013 	     original node was expanded already.  */
1014 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1015 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1016 	}
1017       else if (TREE_CODE (*tp) == ADDR_EXPR)
1018 	{
1019 	  /* Variable substitution need not be simple.  In particular,
1020 	     the MEM_REF substitution above.  Make sure that
1021 	     TREE_CONSTANT and friends are up-to-date.  */
1022 	  int invariant = is_gimple_min_invariant (*tp);
1023 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1024 	  recompute_tree_invariant_for_addr_expr (*tp);
1025 
1026 	  /* If this used to be invariant, but is not any longer,
1027 	     then regimplification is probably needed.  */
1028 	  if (invariant && !is_gimple_min_invariant (*tp))
1029 	    id->regimplify = true;
1030 
1031 	  *walk_subtrees = 0;
1032 	}
1033     }
1034 
1035   /* Update the TREE_BLOCK for the cloned expr.  */
1036   if (EXPR_P (*tp))
1037     {
1038       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1039       tree old_block = TREE_BLOCK (*tp);
1040       if (old_block)
1041 	{
1042 	  tree *n;
1043 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1044 	  if (n)
1045 	    new_block = *n;
1046 	}
1047       TREE_SET_BLOCK (*tp, new_block);
1048     }
1049 
1050   /* Keep iterating.  */
1051   return NULL_TREE;
1052 }
1053 
1054 
1055 /* Called from copy_body_id via walk_tree.  DATA is really a
1056    `copy_body_data *'.  */
1057 
1058 tree
1059 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1060 {
1061   copy_body_data *id = (copy_body_data *) data;
1062   tree fn = id->src_fn;
1063   tree new_block;
1064 
1065   /* Begin by recognizing trees that we'll completely rewrite for the
1066      inlining context.  Our output for these trees is completely
1067      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1068      into an edge).  Further down, we'll handle trees that get
1069      duplicated and/or tweaked.  */
1070 
1071   /* When requested, RETURN_EXPRs should be transformed to just the
1072      contained MODIFY_EXPR.  The branch semantics of the return will
1073      be handled elsewhere by manipulating the CFG rather than a statement.  */
1074   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1075     {
1076       tree assignment = TREE_OPERAND (*tp, 0);
1077 
1078       /* If we're returning something, just turn that into an
1079 	 assignment into the equivalent of the original RESULT_DECL.
1080 	 If the "assignment" is just the result decl, the result
1081 	 decl has already been set (e.g. a recent "foo (&result_decl,
1082 	 ...)"); just toss the entire RETURN_EXPR.  */
1083       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1084 	{
1085 	  /* Replace the RETURN_EXPR with (a copy of) the
1086 	     MODIFY_EXPR hanging underneath.  */
1087 	  *tp = copy_node (assignment);
1088 	}
1089       else /* Else the RETURN_EXPR returns no value.  */
1090 	{
1091 	  *tp = NULL;
1092 	  return (tree) (void *)1;
1093 	}
1094     }
1095   else if (TREE_CODE (*tp) == SSA_NAME)
1096     {
1097       *tp = remap_ssa_name (*tp, id);
1098       *walk_subtrees = 0;
1099       return NULL;
1100     }
1101 
1102   /* Local variables and labels need to be replaced by equivalent
1103      variables.  We don't want to copy static variables; there's only
1104      one of those, no matter how many times we inline the containing
1105      function.  Similarly for globals from an outer function.  */
1106   else if (auto_var_in_fn_p (*tp, fn))
1107     {
1108       tree new_decl;
1109 
1110       /* Remap the declaration.  */
1111       new_decl = remap_decl (*tp, id);
1112       gcc_assert (new_decl);
1113       /* Replace this variable with the copy.  */
1114       STRIP_TYPE_NOPS (new_decl);
1115       *tp = new_decl;
1116       *walk_subtrees = 0;
1117     }
1118   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1119     copy_statement_list (tp);
1120   else if (TREE_CODE (*tp) == SAVE_EXPR
1121 	   || TREE_CODE (*tp) == TARGET_EXPR)
1122     remap_save_expr (tp, id->decl_map, walk_subtrees);
1123   else if (TREE_CODE (*tp) == LABEL_DECL
1124 	   && (! DECL_CONTEXT (*tp)
1125 	       || decl_function_context (*tp) == id->src_fn))
1126     /* These may need to be remapped for EH handling.  */
1127     *tp = remap_decl (*tp, id);
1128   else if (TREE_CODE (*tp) == BIND_EXPR)
1129     copy_bind_expr (tp, walk_subtrees, id);
1130   /* Types may need remapping as well.  */
1131   else if (TYPE_P (*tp))
1132     *tp = remap_type (*tp, id);
1133 
1134   /* If this is a constant, we have to copy the node iff the type will be
1135      remapped.  copy_tree_r will not copy a constant.  */
1136   else if (CONSTANT_CLASS_P (*tp))
1137     {
1138       tree new_type = remap_type (TREE_TYPE (*tp), id);
1139 
1140       if (new_type == TREE_TYPE (*tp))
1141 	*walk_subtrees = 0;
1142 
1143       else if (TREE_CODE (*tp) == INTEGER_CST)
1144 	*tp = wide_int_to_tree (new_type, *tp);
1145       else
1146 	{
1147 	  *tp = copy_node (*tp);
1148 	  TREE_TYPE (*tp) = new_type;
1149 	}
1150     }
1151 
1152   /* Otherwise, just copy the node.  Note that copy_tree_r already
1153      knows not to copy VAR_DECLs, etc., so this is safe.  */
1154   else
1155     {
1156       /* Here we handle trees that are not completely rewritten.
1157 	 First we detect some inlining-induced bogosities for
1158 	 discarding.  */
1159       if (TREE_CODE (*tp) == MODIFY_EXPR
1160 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1161 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1162 	{
1163 	  /* Some assignments VAR = VAR; don't generate any rtl code
1164 	     and thus don't count as variable modification.  Avoid
1165 	     keeping bogosities like 0 = 0.  */
1166 	  tree decl = TREE_OPERAND (*tp, 0), value;
1167 	  tree *n;
1168 
1169 	  n = id->decl_map->get (decl);
1170 	  if (n)
1171 	    {
1172 	      value = *n;
1173 	      STRIP_TYPE_NOPS (value);
1174 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1175 		{
1176 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1177 		  return copy_tree_body_r (tp, walk_subtrees, data);
1178 		}
1179 	    }
1180 	}
1181       else if (TREE_CODE (*tp) == INDIRECT_REF)
1182 	{
1183 	  /* Get rid of *& from inline substitutions that can happen when a
1184 	     pointer argument is an ADDR_EXPR.  */
1185 	  tree decl = TREE_OPERAND (*tp, 0);
1186 	  tree *n = id->decl_map->get (decl);
1187 	  if (n)
1188 	    {
1189 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1190 	         it manually here as we'll eventually get ADDR_EXPRs
1191 		 which lie about their types pointed to.  In this case
1192 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1193 		 but we absolutely rely on that.  As fold_indirect_ref
1194 	         does other useful transformations, try that first, though.  */
1195 	      tree type = TREE_TYPE (*tp);
1196 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1197 	      tree old = *tp;
1198 	      *tp = gimple_fold_indirect_ref (ptr);
1199 	      if (! *tp)
1200 	        {
1201 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1202 		    {
1203 		      *tp
1204 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1205 		      /* ???  We should either assert here or build
1206 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1207 			 incompatible types to our IL.  */
1208 		      if (! *tp)
1209 			*tp = TREE_OPERAND (ptr, 0);
1210 		    }
1211 	          else
1212 		    {
1213 	              *tp = build1 (INDIRECT_REF, type, ptr);
1214 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1215 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1216 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1217 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1218 			 have remapped a parameter as the property might be
1219 			 valid only for the parameter itself.  */
1220 		      if (TREE_THIS_NOTRAP (old)
1221 			  && (!is_parm (TREE_OPERAND (old, 0))
1222 			      || (!id->transform_parameter && is_parm (ptr))))
1223 		        TREE_THIS_NOTRAP (*tp) = 1;
1224 		    }
1225 		}
1226 	      *walk_subtrees = 0;
1227 	      return NULL;
1228 	    }
1229 	}
1230       else if (TREE_CODE (*tp) == MEM_REF)
1231 	{
1232 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1233 	     that can happen when a pointer argument is an ADDR_EXPR.
1234 	     Recurse here manually to allow that.  */
1235 	  tree ptr = TREE_OPERAND (*tp, 0);
1236 	  tree type = remap_type (TREE_TYPE (*tp), id);
1237 	  tree old = *tp;
1238 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1239 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1240 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1241 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1242 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1243 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1244 	    {
1245 	      MR_DEPENDENCE_CLIQUE (*tp)
1246 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1247 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1248 	    }
1249 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1250 	     remapped a parameter as the property might be valid only
1251 	     for the parameter itself.  */
1252 	  if (TREE_THIS_NOTRAP (old)
1253 	      && (!is_parm (TREE_OPERAND (old, 0))
1254 		  || (!id->transform_parameter && is_parm (ptr))))
1255 	    TREE_THIS_NOTRAP (*tp) = 1;
1256 	  *walk_subtrees = 0;
1257 	  return NULL;
1258 	}
1259 
1260       /* Here is the "usual case".  Copy this tree node, and then
1261 	 tweak some special cases.  */
1262       copy_tree_r (tp, walk_subtrees, NULL);
1263 
1264       /* If EXPR has block defined, map it to newly constructed block.
1265          When inlining we want EXPRs without block appear in the block
1266 	 of function call if we are not remapping a type.  */
1267       if (EXPR_P (*tp))
1268 	{
1269 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1270 	  if (TREE_BLOCK (*tp))
1271 	    {
1272 	      tree *n;
1273 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1274 	      if (n)
1275 		new_block = *n;
1276 	    }
1277 	  TREE_SET_BLOCK (*tp, new_block);
1278 	}
1279 
1280       if (TREE_CODE (*tp) != OMP_CLAUSE)
1281 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1282 
1283       /* The copied TARGET_EXPR has never been expanded, even if the
1284 	 original node was expanded already.  */
1285       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1286 	{
1287 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1288 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1289 	}
1290 
1291       /* Variable substitution need not be simple.  In particular, the
1292 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1293 	 and friends are up-to-date.  */
1294       else if (TREE_CODE (*tp) == ADDR_EXPR)
1295 	{
1296 	  int invariant = is_gimple_min_invariant (*tp);
1297 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1298 
1299 	  /* Handle the case where we substituted an INDIRECT_REF
1300 	     into the operand of the ADDR_EXPR.  */
1301 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1302 	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1303 	  else
1304 	    recompute_tree_invariant_for_addr_expr (*tp);
1305 
1306 	  /* If this used to be invariant, but is not any longer,
1307 	     then regimplification is probably needed.  */
1308 	  if (invariant && !is_gimple_min_invariant (*tp))
1309 	    id->regimplify = true;
1310 
1311 	  *walk_subtrees = 0;
1312 	}
1313     }
1314 
1315   /* Keep iterating.  */
1316   return NULL_TREE;
1317 }
1318 
1319 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1320    source function, map that to the duplicate EH region number in
1321    the destination function.  */
1322 
1323 static int
1324 remap_eh_region_nr (int old_nr, copy_body_data *id)
1325 {
1326   eh_region old_r, new_r;
1327 
1328   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1329   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1330 
1331   return new_r->index;
1332 }
1333 
1334 /* Similar, but operate on INTEGER_CSTs.  */
1335 
1336 static tree
1337 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1338 {
1339   int old_nr, new_nr;
1340 
1341   old_nr = tree_to_shwi (old_t_nr);
1342   new_nr = remap_eh_region_nr (old_nr, id);
1343 
1344   return build_int_cst (integer_type_node, new_nr);
1345 }
1346 
1347 /* Helper for copy_bb.  Remap statement STMT using the inlining
1348    information in ID.  Return the new statement copy.  */
1349 
1350 static gimple_seq
1351 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1352 {
1353   gimple copy = NULL;
1354   struct walk_stmt_info wi;
1355   bool skip_first = false;
1356   gimple_seq stmts = NULL;
1357 
1358   if (is_gimple_debug (stmt)
1359       && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1360     return stmts;
1361 
1362   /* Begin by recognizing trees that we'll completely rewrite for the
1363      inlining context.  Our output for these trees is completely
1364      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1365      into an edge).  Further down, we'll handle trees that get
1366      duplicated and/or tweaked.  */
1367 
1368   /* When requested, GIMPLE_RETURNs should be transformed to just the
1369      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1370      be handled elsewhere by manipulating the CFG rather than the
1371      statement.  */
1372   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1373     {
1374       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1375       tree retbnd = gimple_return_retbnd (stmt);
1376       tree bndslot = id->retbnd;
1377 
1378       if (retbnd && bndslot)
1379 	{
1380 	  gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1381 	  memset (&wi, 0, sizeof (wi));
1382 	  wi.info = id;
1383 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1384 	  gimple_seq_add_stmt (&stmts, bndcopy);
1385 	}
1386 
1387       /* If we're returning something, just turn that into an
1388 	 assignment into the equivalent of the original RESULT_DECL.
1389 	 If RETVAL is just the result decl, the result decl has
1390 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1391 	 just toss the entire GIMPLE_RETURN.  */
1392       if (retval
1393 	  && (TREE_CODE (retval) != RESULT_DECL
1394 	      && (TREE_CODE (retval) != SSA_NAME
1395 		  || ! SSA_NAME_VAR (retval)
1396 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1397         {
1398 	  copy = gimple_build_assign (id->do_not_unshare
1399 				      ? id->retvar : unshare_expr (id->retvar),
1400 				      retval);
1401 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1402 	  skip_first = true;
1403 
1404 	  /* We need to copy bounds if return structure with pointers into
1405 	     instrumented function.  */
1406 	  if (chkp_function_instrumented_p (id->dst_fn)
1407 	      && !bndslot
1408 	      && !BOUNDED_P (id->retvar)
1409 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1410 	    id->assign_stmts.safe_push (copy);
1411 
1412 	}
1413       else
1414 	return stmts;
1415     }
1416   else if (gimple_has_substatements (stmt))
1417     {
1418       gimple_seq s1, s2;
1419 
1420       /* When cloning bodies from the C++ front end, we will be handed bodies
1421 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1422 	 have embedded statements.  */
1423       switch (gimple_code (stmt))
1424 	{
1425 	case GIMPLE_BIND:
1426 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1427 	  break;
1428 
1429 	case GIMPLE_CATCH:
1430 	  {
1431 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1432 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1433 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1434 	  }
1435 	  break;
1436 
1437 	case GIMPLE_EH_FILTER:
1438 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1439 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1440 	  break;
1441 
1442 	case GIMPLE_TRY:
1443 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1444 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1445 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1446 	  break;
1447 
1448 	case GIMPLE_WITH_CLEANUP_EXPR:
1449 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1450 	  copy = gimple_build_wce (s1);
1451 	  break;
1452 
1453 	case GIMPLE_OMP_PARALLEL:
1454 	  {
1455 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1456 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1457 	    copy = gimple_build_omp_parallel
1458 	             (s1,
1459 		      gimple_omp_parallel_clauses (omp_par_stmt),
1460 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1461 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1462 	  }
1463 	  break;
1464 
1465 	case GIMPLE_OMP_TASK:
1466 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1467 	  copy = gimple_build_omp_task
1468 	           (s1,
1469 		    gimple_omp_task_clauses (stmt),
1470 		    gimple_omp_task_child_fn (stmt),
1471 		    gimple_omp_task_data_arg (stmt),
1472 		    gimple_omp_task_copy_fn (stmt),
1473 		    gimple_omp_task_arg_size (stmt),
1474 		    gimple_omp_task_arg_align (stmt));
1475 	  break;
1476 
1477 	case GIMPLE_OMP_FOR:
1478 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1479 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1480 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1481 				       gimple_omp_for_clauses (stmt),
1482 				       gimple_omp_for_collapse (stmt), s2);
1483 	  {
1484 	    size_t i;
1485 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1486 	      {
1487 		gimple_omp_for_set_index (copy, i,
1488 					  gimple_omp_for_index (stmt, i));
1489 		gimple_omp_for_set_initial (copy, i,
1490 					    gimple_omp_for_initial (stmt, i));
1491 		gimple_omp_for_set_final (copy, i,
1492 					  gimple_omp_for_final (stmt, i));
1493 		gimple_omp_for_set_incr (copy, i,
1494 					 gimple_omp_for_incr (stmt, i));
1495 		gimple_omp_for_set_cond (copy, i,
1496 					 gimple_omp_for_cond (stmt, i));
1497 	      }
1498 	  }
1499 	  break;
1500 
1501 	case GIMPLE_OMP_MASTER:
1502 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1503 	  copy = gimple_build_omp_master (s1);
1504 	  break;
1505 
1506 	case GIMPLE_OMP_TASKGROUP:
1507 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1508 	  copy = gimple_build_omp_taskgroup (s1);
1509 	  break;
1510 
1511 	case GIMPLE_OMP_ORDERED:
1512 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1513 	  copy = gimple_build_omp_ordered (s1);
1514 	  break;
1515 
1516 	case GIMPLE_OMP_SECTION:
1517 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1518 	  copy = gimple_build_omp_section (s1);
1519 	  break;
1520 
1521 	case GIMPLE_OMP_SECTIONS:
1522 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 	  copy = gimple_build_omp_sections
1524 	           (s1, gimple_omp_sections_clauses (stmt));
1525 	  break;
1526 
1527 	case GIMPLE_OMP_SINGLE:
1528 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 	  copy = gimple_build_omp_single
1530 	           (s1, gimple_omp_single_clauses (stmt));
1531 	  break;
1532 
1533 	case GIMPLE_OMP_TARGET:
1534 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1535 	  copy = gimple_build_omp_target
1536 		   (s1, gimple_omp_target_kind (stmt),
1537 		    gimple_omp_target_clauses (stmt));
1538 	  break;
1539 
1540 	case GIMPLE_OMP_TEAMS:
1541 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1542 	  copy = gimple_build_omp_teams
1543 		   (s1, gimple_omp_teams_clauses (stmt));
1544 	  break;
1545 
1546 	case GIMPLE_OMP_CRITICAL:
1547 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1548 	  copy = gimple_build_omp_critical (s1,
1549 					    gimple_omp_critical_name (
1550 					      as_a <gomp_critical *> (stmt)));
1551 	  break;
1552 
1553 	case GIMPLE_TRANSACTION:
1554 	  {
1555 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1556 	    gtransaction *new_trans_stmt;
1557 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1558 				   id);
1559 	    copy = new_trans_stmt
1560 	      = gimple_build_transaction (
1561 		  s1,
1562 		  gimple_transaction_label (old_trans_stmt));
1563 	    gimple_transaction_set_subcode (
1564               new_trans_stmt,
1565 	      gimple_transaction_subcode (old_trans_stmt));
1566 	  }
1567 	  break;
1568 
1569 	default:
1570 	  gcc_unreachable ();
1571 	}
1572     }
1573   else
1574     {
1575       if (gimple_assign_copy_p (stmt)
1576 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1577 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1578 	{
1579 	  /* Here we handle statements that are not completely rewritten.
1580 	     First we detect some inlining-induced bogosities for
1581 	     discarding.  */
1582 
1583 	  /* Some assignments VAR = VAR; don't generate any rtl code
1584 	     and thus don't count as variable modification.  Avoid
1585 	     keeping bogosities like 0 = 0.  */
1586 	  tree decl = gimple_assign_lhs (stmt), value;
1587 	  tree *n;
1588 
1589 	  n = id->decl_map->get (decl);
1590 	  if (n)
1591 	    {
1592 	      value = *n;
1593 	      STRIP_TYPE_NOPS (value);
1594 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1595 		return NULL;
1596 	    }
1597 	}
1598 
1599       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1600 	 in a block that we aren't copying during tree_function_versioning,
1601 	 just drop the clobber stmt.  */
1602       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1603 	{
1604 	  tree lhs = gimple_assign_lhs (stmt);
1605 	  if (TREE_CODE (lhs) == MEM_REF
1606 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1607 	    {
1608 	      gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1609 	      if (gimple_bb (def_stmt)
1610 		  && !bitmap_bit_p (id->blocks_to_copy,
1611 				    gimple_bb (def_stmt)->index))
1612 		return NULL;
1613 	    }
1614 	}
1615 
1616       if (gimple_debug_bind_p (stmt))
1617 	{
1618 	  gdebug *copy
1619 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1620 				       gimple_debug_bind_get_value (stmt),
1621 				       stmt);
1622 	  id->debug_stmts.safe_push (copy);
1623 	  gimple_seq_add_stmt (&stmts, copy);
1624 	  return stmts;
1625 	}
1626       if (gimple_debug_source_bind_p (stmt))
1627 	{
1628 	  gdebug *copy = gimple_build_debug_source_bind
1629 	                   (gimple_debug_source_bind_get_var (stmt),
1630 			    gimple_debug_source_bind_get_value (stmt),
1631 			    stmt);
1632 	  id->debug_stmts.safe_push (copy);
1633 	  gimple_seq_add_stmt (&stmts, copy);
1634 	  return stmts;
1635 	}
1636 
1637       /* Create a new deep copy of the statement.  */
1638       copy = gimple_copy (stmt);
1639 
1640       /* Clear flags that need revisiting.  */
1641       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1642         {
1643 	  if (gimple_call_tail_p (call_stmt))
1644 	    gimple_call_set_tail (call_stmt, false);
1645 	  if (gimple_call_from_thunk_p (call_stmt))
1646 	    gimple_call_set_from_thunk (call_stmt, false);
1647 	  if (gimple_call_internal_p (call_stmt))
1648 	    switch (gimple_call_internal_fn (call_stmt))
1649 	      {
1650 	      case IFN_GOMP_SIMD_LANE:
1651 	      case IFN_GOMP_SIMD_VF:
1652 	      case IFN_GOMP_SIMD_LAST_LANE:
1653 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1654 	        break;
1655 	      default:
1656 		break;
1657 	      }
1658 	}
1659 
1660       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1661 	 RESX and EH_DISPATCH.  */
1662       if (id->eh_map)
1663 	switch (gimple_code (copy))
1664 	  {
1665 	  case GIMPLE_CALL:
1666 	    {
1667 	      tree r, fndecl = gimple_call_fndecl (copy);
1668 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1669 		switch (DECL_FUNCTION_CODE (fndecl))
1670 		  {
1671 		  case BUILT_IN_EH_COPY_VALUES:
1672 		    r = gimple_call_arg (copy, 1);
1673 		    r = remap_eh_region_tree_nr (r, id);
1674 		    gimple_call_set_arg (copy, 1, r);
1675 		    /* FALLTHRU */
1676 
1677 		  case BUILT_IN_EH_POINTER:
1678 		  case BUILT_IN_EH_FILTER:
1679 		    r = gimple_call_arg (copy, 0);
1680 		    r = remap_eh_region_tree_nr (r, id);
1681 		    gimple_call_set_arg (copy, 0, r);
1682 		    break;
1683 
1684 		  default:
1685 		    break;
1686 		  }
1687 
1688 	      /* Reset alias info if we didn't apply measures to
1689 		 keep it valid over inlining by setting DECL_PT_UID.  */
1690 	      if (!id->src_cfun->gimple_df
1691 		  || !id->src_cfun->gimple_df->ipa_pta)
1692 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1693 	    }
1694 	    break;
1695 
1696 	  case GIMPLE_RESX:
1697 	    {
1698 	      gresx *resx_stmt = as_a <gresx *> (copy);
1699 	      int r = gimple_resx_region (resx_stmt);
1700 	      r = remap_eh_region_nr (r, id);
1701 	      gimple_resx_set_region (resx_stmt, r);
1702 	    }
1703 	    break;
1704 
1705 	  case GIMPLE_EH_DISPATCH:
1706 	    {
1707 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1708 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1709 	      r = remap_eh_region_nr (r, id);
1710 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1711 	    }
1712 	    break;
1713 
1714 	  default:
1715 	    break;
1716 	  }
1717     }
1718 
1719   /* If STMT has a block defined, map it to the newly constructed
1720      block.  */
1721   if (gimple_block (copy))
1722     {
1723       tree *n;
1724       n = id->decl_map->get (gimple_block (copy));
1725       gcc_assert (n);
1726       gimple_set_block (copy, *n);
1727     }
1728 
1729   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1730     {
1731       gimple_seq_add_stmt (&stmts, copy);
1732       return stmts;
1733     }
1734 
1735   /* Remap all the operands in COPY.  */
1736   memset (&wi, 0, sizeof (wi));
1737   wi.info = id;
1738   if (skip_first)
1739     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1740   else
1741     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1742 
1743   /* Clear the copied virtual operands.  We are not remapping them here
1744      but are going to recreate them from scratch.  */
1745   if (gimple_has_mem_ops (copy))
1746     {
1747       gimple_set_vdef (copy, NULL_TREE);
1748       gimple_set_vuse (copy, NULL_TREE);
1749     }
1750 
1751   gimple_seq_add_stmt (&stmts, copy);
1752   return stmts;
1753 }
1754 
1755 
1756 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1757    later  */
1758 
1759 static basic_block
1760 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1761          gcov_type count_scale)
1762 {
1763   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1764   basic_block copy_basic_block;
1765   tree decl;
1766   gcov_type freq;
1767   basic_block prev;
1768 
1769   /* Search for previous copied basic block.  */
1770   prev = bb->prev_bb;
1771   while (!prev->aux)
1772     prev = prev->prev_bb;
1773 
1774   /* create_basic_block() will append every new block to
1775      basic_block_info automatically.  */
1776   copy_basic_block = create_basic_block (NULL, (void *) 0,
1777                                          (basic_block) prev->aux);
1778   copy_basic_block->count = apply_scale (bb->count, count_scale);
1779 
1780   /* We are going to rebuild frequencies from scratch.  These values
1781      have just small importance to drive canonicalize_loop_headers.  */
1782   freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1783 
1784   /* We recompute frequencies after inlining, so this is quite safe.  */
1785   if (freq > BB_FREQ_MAX)
1786     freq = BB_FREQ_MAX;
1787   copy_basic_block->frequency = freq;
1788 
1789   copy_gsi = gsi_start_bb (copy_basic_block);
1790 
1791   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1792     {
1793       gimple_seq stmts;
1794       gimple stmt = gsi_stmt (gsi);
1795       gimple orig_stmt = stmt;
1796       gimple_stmt_iterator stmts_gsi;
1797       bool stmt_added = false;
1798 
1799       id->regimplify = false;
1800       stmts = remap_gimple_stmt (stmt, id);
1801 
1802       if (gimple_seq_empty_p (stmts))
1803 	continue;
1804 
1805       seq_gsi = copy_gsi;
1806 
1807       for (stmts_gsi = gsi_start (stmts);
1808 	   !gsi_end_p (stmts_gsi); )
1809 	{
1810 	  stmt = gsi_stmt (stmts_gsi);
1811 
1812 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1813 	  gsi_next (&stmts_gsi);
1814 
1815 	  if (gimple_nop_p (stmt))
1816 	      continue;
1817 
1818 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1819 					    orig_stmt);
1820 
1821 	  /* With return slot optimization we can end up with
1822 	     non-gimple (foo *)&this->m, fix that here.  */
1823 	  if (is_gimple_assign (stmt)
1824 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1825 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1826 	    {
1827 	      tree new_rhs;
1828 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1829 						  gimple_assign_rhs1 (stmt),
1830 						  true, NULL, false,
1831 						  GSI_CONTINUE_LINKING);
1832 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1833 	      id->regimplify = false;
1834 	    }
1835 
1836 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1837 
1838 	  if (id->regimplify)
1839 	    gimple_regimplify_operands (stmt, &seq_gsi);
1840 
1841 	  stmt_added = true;
1842 	}
1843 
1844       if (!stmt_added)
1845 	continue;
1846 
1847       /* If copy_basic_block has been empty at the start of this iteration,
1848 	 call gsi_start_bb again to get at the newly added statements.  */
1849       if (gsi_end_p (copy_gsi))
1850 	copy_gsi = gsi_start_bb (copy_basic_block);
1851       else
1852 	gsi_next (&copy_gsi);
1853 
1854       /* Process the new statement.  The call to gimple_regimplify_operands
1855 	 possibly turned the statement into multiple statements, we
1856 	 need to process all of them.  */
1857       do
1858 	{
1859 	  tree fn;
1860 	  gcall *call_stmt;
1861 
1862 	  stmt = gsi_stmt (copy_gsi);
1863 	  call_stmt = dyn_cast <gcall *> (stmt);
1864 	  if (call_stmt
1865 	      && gimple_call_va_arg_pack_p (call_stmt)
1866 	      && id->call_stmt
1867 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1868 	    {
1869 	      /* __builtin_va_arg_pack () should be replaced by
1870 		 all arguments corresponding to ... in the caller.  */
1871 	      tree p;
1872 	      gcall *new_call;
1873 	      vec<tree> argarray;
1874 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1875 	      size_t n, i, nargs_to_copy;
1876 	      bool remove_bounds = false;
1877 
1878 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1879 		nargs--;
1880 
1881 	      /* Bounds should be removed from arg pack in case
1882 		 we handle not instrumented call in instrumented
1883 		 function.  */
1884 	      nargs_to_copy = nargs;
1885 	      if (gimple_call_with_bounds_p (id->call_stmt)
1886 		  && !gimple_call_with_bounds_p (stmt))
1887 		{
1888 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1889 		       i < gimple_call_num_args (id->call_stmt);
1890 		       i++)
1891 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1892 		      nargs_to_copy--;
1893 		  remove_bounds = true;
1894 		}
1895 
1896 	      /* Create the new array of arguments.  */
1897 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1898 	      argarray.create (n);
1899 	      argarray.safe_grow_cleared (n);
1900 
1901 	      /* Copy all the arguments before '...'  */
1902 	      memcpy (argarray.address (),
1903 		      gimple_call_arg_ptr (call_stmt, 0),
1904 		      gimple_call_num_args (call_stmt) * sizeof (tree));
1905 
1906 	      if (remove_bounds)
1907 		{
1908 		  /* Append the rest of arguments removing bounds.  */
1909 		  unsigned cur = gimple_call_num_args (call_stmt);
1910 		  i = gimple_call_num_args (id->call_stmt) - nargs;
1911 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1912 		       i < gimple_call_num_args (id->call_stmt);
1913 		       i++)
1914 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1915 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1916 		  gcc_assert (cur == n);
1917 		}
1918 	      else
1919 		{
1920 		  /* Append the arguments passed in '...'  */
1921 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1922 			  gimple_call_arg_ptr (id->call_stmt, 0)
1923 			  + (gimple_call_num_args (id->call_stmt) - nargs),
1924 			  nargs * sizeof (tree));
1925 		}
1926 
1927 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1928 						argarray);
1929 
1930 	      argarray.release ();
1931 
1932 	      /* Copy all GIMPLE_CALL flags, location and block, except
1933 		 GF_CALL_VA_ARG_PACK.  */
1934 	      gimple_call_copy_flags (new_call, call_stmt);
1935 	      gimple_call_set_va_arg_pack (new_call, false);
1936 	      gimple_set_location (new_call, gimple_location (stmt));
1937 	      gimple_set_block (new_call, gimple_block (stmt));
1938 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1939 
1940 	      gsi_replace (&copy_gsi, new_call, false);
1941 	      stmt = new_call;
1942 	    }
1943 	  else if (call_stmt
1944 		   && id->call_stmt
1945 		   && (decl = gimple_call_fndecl (stmt))
1946 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1947 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1948 		   && ! gimple_call_va_arg_pack_p (id->call_stmt))
1949 	    {
1950 	      /* __builtin_va_arg_pack_len () should be replaced by
1951 		 the number of anonymous arguments.  */
1952 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
1953 	      tree count, p;
1954 	      gimple new_stmt;
1955 
1956 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1957 		nargs--;
1958 
1959 	      /* For instrumented calls we should ignore bounds.  */
1960 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
1961 		   i < gimple_call_num_args (id->call_stmt);
1962 		   i++)
1963 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1964 		  nargs--;
1965 
1966 	      count = build_int_cst (integer_type_node, nargs);
1967 	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1968 	      gsi_replace (&copy_gsi, new_stmt, false);
1969 	      stmt = new_stmt;
1970 	    }
1971 	  else if (call_stmt
1972 		   && id->call_stmt
1973 		   && gimple_call_internal_p (stmt)
1974 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1975 	    {
1976 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
1977 	      gsi_remove (&copy_gsi, false);
1978 	      continue;
1979 	    }
1980 
1981 	  /* Statements produced by inlining can be unfolded, especially
1982 	     when we constant propagated some operands.  We can't fold
1983 	     them right now for two reasons:
1984 	     1) folding require SSA_NAME_DEF_STMTs to be correct
1985 	     2) we can't change function calls to builtins.
1986 	     So we just mark statement for later folding.  We mark
1987 	     all new statements, instead just statements that has changed
1988 	     by some nontrivial substitution so even statements made
1989 	     foldable indirectly are updated.  If this turns out to be
1990 	     expensive, copy_body can be told to watch for nontrivial
1991 	     changes.  */
1992 	  if (id->statements_to_fold)
1993 	    id->statements_to_fold->add (stmt);
1994 
1995 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1996 	     callgraph edges and update or duplicate them.  */
1997 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1998 	    {
1999 	      struct cgraph_edge *edge;
2000 
2001 	      switch (id->transform_call_graph_edges)
2002 		{
2003 		case CB_CGE_DUPLICATE:
2004 		  edge = id->src_node->get_edge (orig_stmt);
2005 		  if (edge)
2006 		    {
2007 		      int edge_freq = edge->frequency;
2008 		      int new_freq;
2009 		      struct cgraph_edge *old_edge = edge;
2010 		      edge = edge->clone (id->dst_node, call_stmt,
2011 					  gimple_uid (stmt),
2012 					  REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2013 					  true);
2014 		      /* We could also just rescale the frequency, but
2015 		         doing so would introduce roundoff errors and make
2016 			 verifier unhappy.  */
2017 		      new_freq  = compute_call_stmt_bb_frequency (id->dst_node->decl,
2018 								  copy_basic_block);
2019 
2020 		      /* Speculative calls consist of two edges - direct and indirect.
2021 			 Duplicate the whole thing and distribute frequencies accordingly.  */
2022 		      if (edge->speculative)
2023 			{
2024 			  struct cgraph_edge *direct, *indirect;
2025 			  struct ipa_ref *ref;
2026 
2027 			  gcc_assert (!edge->indirect_unknown_callee);
2028 			  old_edge->speculative_call_info (direct, indirect, ref);
2029 			  indirect = indirect->clone (id->dst_node, call_stmt,
2030 						      gimple_uid (stmt),
2031 						      REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2032 						      true);
2033 			  if (old_edge->frequency + indirect->frequency)
2034 			    {
2035 			      edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2036 						           (old_edge->frequency + indirect->frequency)),
2037 						     CGRAPH_FREQ_MAX);
2038 			      indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2039 							       (old_edge->frequency + indirect->frequency)),
2040 							 CGRAPH_FREQ_MAX);
2041 			    }
2042 			  id->dst_node->clone_reference (ref, stmt);
2043 			}
2044 		      else
2045 			{
2046 			  edge->frequency = new_freq;
2047 			  if (dump_file
2048 			      && profile_status_for_fn (cfun) != PROFILE_ABSENT
2049 			      && (edge_freq > edge->frequency + 10
2050 				  || edge_freq < edge->frequency - 10))
2051 			    {
2052 			      fprintf (dump_file, "Edge frequency estimated by "
2053 				       "cgraph %i diverge from inliner's estimate %i\n",
2054 				       edge_freq,
2055 				       edge->frequency);
2056 			      fprintf (dump_file,
2057 				       "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2058 				       bb->index,
2059 				       bb->frequency,
2060 				       copy_basic_block->frequency);
2061 			    }
2062 			}
2063 		    }
2064 		  break;
2065 
2066 		case CB_CGE_MOVE_CLONES:
2067 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2068 								call_stmt);
2069 		  edge = id->dst_node->get_edge (stmt);
2070 		  break;
2071 
2072 		case CB_CGE_MOVE:
2073 		  edge = id->dst_node->get_edge (orig_stmt);
2074 		  if (edge)
2075 		    edge->set_call_stmt (call_stmt);
2076 		  break;
2077 
2078 		default:
2079 		  gcc_unreachable ();
2080 		}
2081 
2082 	      /* Constant propagation on argument done during inlining
2083 		 may create new direct call.  Produce an edge for it.  */
2084 	      if ((!edge
2085 		   || (edge->indirect_inlining_edge
2086 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2087 		  && id->dst_node->definition
2088 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2089 		{
2090 		  struct cgraph_node *dest = cgraph_node::get (fn);
2091 
2092 		  /* We have missing edge in the callgraph.  This can happen
2093 		     when previous inlining turned an indirect call into a
2094 		     direct call by constant propagating arguments or we are
2095 		     producing dead clone (for further cloning).  In all
2096 		     other cases we hit a bug (incorrect node sharing is the
2097 		     most common reason for missing edges).  */
2098 		  gcc_assert (!dest->definition
2099 			      || dest->address_taken
2100 		  	      || !id->src_node->definition
2101 			      || !id->dst_node->definition);
2102 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2103 		    id->dst_node->create_edge_including_clones
2104 		      (dest, orig_stmt, call_stmt, bb->count,
2105 		       compute_call_stmt_bb_frequency (id->dst_node->decl,
2106 		       				       copy_basic_block),
2107 		       CIF_ORIGINALLY_INDIRECT_CALL);
2108 		  else
2109 		    id->dst_node->create_edge (dest, call_stmt,
2110 					bb->count,
2111 					compute_call_stmt_bb_frequency
2112 					  (id->dst_node->decl,
2113 					   copy_basic_block))->inline_failed
2114 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2115 		  if (dump_file)
2116 		    {
2117 		      fprintf (dump_file, "Created new direct edge to %s\n",
2118 			       dest->name ());
2119 		    }
2120 		}
2121 
2122 	      notice_special_calls (as_a <gcall *> (stmt));
2123 	    }
2124 
2125 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2126 				      id->eh_map, id->eh_lp_nr);
2127 
2128 	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2129 	    {
2130 	      ssa_op_iter i;
2131 	      tree def;
2132 
2133 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2134 		if (TREE_CODE (def) == SSA_NAME)
2135 		  SSA_NAME_DEF_STMT (def) = stmt;
2136 	    }
2137 
2138 	  gsi_next (&copy_gsi);
2139 	}
2140       while (!gsi_end_p (copy_gsi));
2141 
2142       copy_gsi = gsi_last_bb (copy_basic_block);
2143     }
2144 
2145   return copy_basic_block;
2146 }
2147 
2148 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2149    form is quite easy, since dominator relationship for old basic blocks does
2150    not change.
2151 
2152    There is however exception where inlining might change dominator relation
2153    across EH edges from basic block within inlined functions destinating
2154    to landing pads in function we inline into.
2155 
2156    The function fills in PHI_RESULTs of such PHI nodes if they refer
2157    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2158    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2159    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2160    set, and this means that there will be no overlapping live ranges
2161    for the underlying symbol.
2162 
2163    This might change in future if we allow redirecting of EH edges and
2164    we might want to change way build CFG pre-inlining to include
2165    all the possible edges then.  */
2166 static void
2167 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2168 				  bool can_throw, bool nonlocal_goto)
2169 {
2170   edge e;
2171   edge_iterator ei;
2172 
2173   FOR_EACH_EDGE (e, ei, bb->succs)
2174     if (!e->dest->aux
2175 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2176       {
2177 	gphi *phi;
2178 	gphi_iterator si;
2179 
2180 	if (!nonlocal_goto)
2181 	  gcc_assert (e->flags & EDGE_EH);
2182 
2183 	if (!can_throw)
2184 	  gcc_assert (!(e->flags & EDGE_EH));
2185 
2186 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2187 	  {
2188 	    edge re;
2189 
2190 	    phi = si.phi ();
2191 
2192 	    /* For abnormal goto/call edges the receiver can be the
2193 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2194 
2195 	    gcc_assert ((e->flags & EDGE_EH)
2196 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2197 
2198 	    re = find_edge (ret_bb, e->dest);
2199 	    gcc_checking_assert (re);
2200 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2201 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2202 
2203 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2204 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2205 	  }
2206       }
2207 }
2208 
2209 
2210 /* Copy edges from BB into its copy constructed earlier, scale profile
2211    accordingly.  Edges will be taken care of later.  Assume aux
2212    pointers to point to the copies of each BB.  Return true if any
2213    debug stmts are left after a statement that must end the basic block.  */
2214 
2215 static bool
2216 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2217 		   basic_block abnormal_goto_dest)
2218 {
2219   basic_block new_bb = (basic_block) bb->aux;
2220   edge_iterator ei;
2221   edge old_edge;
2222   gimple_stmt_iterator si;
2223   int flags;
2224   bool need_debug_cleanup = false;
2225 
2226   /* Use the indices from the original blocks to create edges for the
2227      new ones.  */
2228   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2229     if (!(old_edge->flags & EDGE_EH))
2230       {
2231 	edge new_edge;
2232 
2233 	flags = old_edge->flags;
2234 
2235 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2236 	if (old_edge->dest->index == EXIT_BLOCK
2237 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2238 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2239 	  flags |= EDGE_FALLTHRU;
2240 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2241 	new_edge->count = apply_scale (old_edge->count, count_scale);
2242 	new_edge->probability = old_edge->probability;
2243       }
2244 
2245   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2246     return false;
2247 
2248   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2249     {
2250       gimple copy_stmt;
2251       bool can_throw, nonlocal_goto;
2252 
2253       copy_stmt = gsi_stmt (si);
2254       if (!is_gimple_debug (copy_stmt))
2255 	update_stmt (copy_stmt);
2256 
2257       /* Do this before the possible split_block.  */
2258       gsi_next (&si);
2259 
2260       /* If this tree could throw an exception, there are two
2261          cases where we need to add abnormal edge(s): the
2262          tree wasn't in a region and there is a "current
2263          region" in the caller; or the original tree had
2264          EH edges.  In both cases split the block after the tree,
2265          and add abnormal edge(s) as needed; we need both
2266          those from the callee and the caller.
2267          We check whether the copy can throw, because the const
2268          propagation can change an INDIRECT_REF which throws
2269          into a COMPONENT_REF which doesn't.  If the copy
2270          can throw, the original could also throw.  */
2271       can_throw = stmt_can_throw_internal (copy_stmt);
2272       nonlocal_goto
2273 	= (stmt_can_make_abnormal_goto (copy_stmt)
2274 	   && !computed_goto_p (copy_stmt));
2275 
2276       if (can_throw || nonlocal_goto)
2277 	{
2278 	  if (!gsi_end_p (si))
2279 	    {
2280 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2281 		gsi_next (&si);
2282 	      if (gsi_end_p (si))
2283 		need_debug_cleanup = true;
2284 	    }
2285 	  if (!gsi_end_p (si))
2286 	    /* Note that bb's predecessor edges aren't necessarily
2287 	       right at this point; split_block doesn't care.  */
2288 	    {
2289 	      edge e = split_block (new_bb, copy_stmt);
2290 
2291 	      new_bb = e->dest;
2292 	      new_bb->aux = e->src->aux;
2293 	      si = gsi_start_bb (new_bb);
2294 	    }
2295 	}
2296 
2297       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2298 	make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2299       else if (can_throw)
2300 	make_eh_edges (copy_stmt);
2301 
2302       /* If the call we inline cannot make abnormal goto do not add
2303          additional abnormal edges but only retain those already present
2304 	 in the original function body.  */
2305       if (abnormal_goto_dest == NULL)
2306 	nonlocal_goto = false;
2307       if (nonlocal_goto)
2308 	{
2309 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2310 
2311 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2312 	    nonlocal_goto = false;
2313 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2314 	     in OpenMP regions which aren't allowed to be left abnormally.
2315 	     So, no need to add abnormal edge in that case.  */
2316 	  else if (is_gimple_call (copy_stmt)
2317 		   && gimple_call_internal_p (copy_stmt)
2318 		   && (gimple_call_internal_fn (copy_stmt)
2319 		       == IFN_ABNORMAL_DISPATCHER)
2320 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2321 	    nonlocal_goto = false;
2322 	  else
2323 	    make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2324 	}
2325 
2326       if ((can_throw || nonlocal_goto)
2327 	  && gimple_in_ssa_p (cfun))
2328 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2329 					  can_throw, nonlocal_goto);
2330     }
2331   return need_debug_cleanup;
2332 }
2333 
2334 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2335    was possibly split and new outgoing EH edges inserted.
2336    BB points to the block of original function and AUX pointers links
2337    the original and newly copied blocks.  */
2338 
2339 static void
2340 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2341 {
2342   basic_block const new_bb = (basic_block) bb->aux;
2343   edge_iterator ei;
2344   gphi *phi;
2345   gphi_iterator si;
2346   edge new_edge;
2347   bool inserted = false;
2348 
2349   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2350     {
2351       tree res, new_res;
2352       gphi *new_phi;
2353 
2354       phi = si.phi ();
2355       res = PHI_RESULT (phi);
2356       new_res = res;
2357       if (!virtual_operand_p (res))
2358 	{
2359 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2360 	  new_phi = create_phi_node (new_res, new_bb);
2361 	  if (EDGE_COUNT (new_bb->preds) == 0)
2362 	    {
2363 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2364 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2365 	    }
2366 	  else
2367 	    FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2368 	      {
2369 		edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2370 		tree arg;
2371 		tree new_arg;
2372 		edge_iterator ei2;
2373 		location_t locus;
2374 
2375 		/* When doing partial cloning, we allow PHIs on the entry block
2376 		   as long as all the arguments are the same.  Find any input
2377 		   edge to see argument to copy.  */
2378 		if (!old_edge)
2379 		  FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2380 		    if (!old_edge->src->aux)
2381 		      break;
2382 
2383 		arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2384 		new_arg = arg;
2385 		walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2386 		gcc_assert (new_arg);
2387 		/* With return slot optimization we can end up with
2388 		   non-gimple (foo *)&this->m, fix that here.  */
2389 		if (TREE_CODE (new_arg) != SSA_NAME
2390 		    && TREE_CODE (new_arg) != FUNCTION_DECL
2391 		    && !is_gimple_val (new_arg))
2392 		  {
2393 		    gimple_seq stmts = NULL;
2394 		    new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2395 		    gsi_insert_seq_on_edge (new_edge, stmts);
2396 		    inserted = true;
2397 		  }
2398 		locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2399 		if (LOCATION_BLOCK (locus))
2400 		  {
2401 		    tree *n;
2402 		    n = id->decl_map->get (LOCATION_BLOCK (locus));
2403 		    gcc_assert (n);
2404 		    if (*n)
2405 		      locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2406 		    else
2407 		      locus = LOCATION_LOCUS (locus);
2408 		  }
2409 		 else
2410 		   locus = LOCATION_LOCUS (locus);
2411 
2412 		add_phi_arg (new_phi, new_arg, new_edge, locus);
2413 	      }
2414 	}
2415     }
2416 
2417   /* Commit the delayed edge insertions.  */
2418   if (inserted)
2419     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2420       gsi_commit_one_edge_insert (new_edge, NULL);
2421 }
2422 
2423 
2424 /* Wrapper for remap_decl so it can be used as a callback.  */
2425 
2426 static tree
2427 remap_decl_1 (tree decl, void *data)
2428 {
2429   return remap_decl (decl, (copy_body_data *) data);
2430 }
2431 
2432 /* Build struct function and associated datastructures for the new clone
2433    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2434    the cfun to the function of new_fndecl (and current_function_decl too).  */
2435 
2436 static void
2437 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2438 {
2439   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2440   gcov_type count_scale;
2441 
2442   if (!DECL_ARGUMENTS (new_fndecl))
2443     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2444   if (!DECL_RESULT (new_fndecl))
2445     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2446 
2447   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2448     count_scale
2449         = GCOV_COMPUTE_SCALE (count,
2450                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2451   else
2452     count_scale = REG_BR_PROB_BASE;
2453 
2454   /* Register specific tree functions.  */
2455   gimple_register_cfg_hooks ();
2456 
2457   /* Get clean struct function.  */
2458   push_struct_function (new_fndecl);
2459 
2460   /* We will rebuild these, so just sanity check that they are empty.  */
2461   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2462   gcc_assert (cfun->local_decls == NULL);
2463   gcc_assert (cfun->cfg == NULL);
2464   gcc_assert (cfun->decl == new_fndecl);
2465 
2466   /* Copy items we preserve during cloning.  */
2467   cfun->static_chain_decl = src_cfun->static_chain_decl;
2468   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2469   cfun->function_end_locus = src_cfun->function_end_locus;
2470   cfun->curr_properties = src_cfun->curr_properties;
2471   cfun->last_verified = src_cfun->last_verified;
2472   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2473   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2474   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2475   cfun->stdarg = src_cfun->stdarg;
2476   cfun->after_inlining = src_cfun->after_inlining;
2477   cfun->can_throw_non_call_exceptions
2478     = src_cfun->can_throw_non_call_exceptions;
2479   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2480   cfun->returns_struct = src_cfun->returns_struct;
2481   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2482 
2483   init_empty_tree_cfg ();
2484 
2485   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2486   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2487     (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2488      REG_BR_PROB_BASE);
2489   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2490     = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2491   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2492     (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2493      REG_BR_PROB_BASE);
2494   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2495     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2496   if (src_cfun->eh)
2497     init_eh_for_function ();
2498 
2499   if (src_cfun->gimple_df)
2500     {
2501       init_tree_ssa (cfun);
2502       cfun->gimple_df->in_ssa_p = true;
2503       init_ssa_operands (cfun);
2504     }
2505 }
2506 
2507 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2508    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2509    successor has multiple predecessors, reset them, otherwise keep
2510    their value.  */
2511 
2512 static void
2513 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2514 {
2515   edge e;
2516   edge_iterator ei;
2517   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2518 
2519   if (gsi_end_p (si)
2520       || gsi_one_before_end_p (si)
2521       || !(stmt_can_throw_internal (gsi_stmt (si))
2522 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2523     return;
2524 
2525   FOR_EACH_EDGE (e, ei, new_bb->succs)
2526     {
2527       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2528       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2529       while (is_gimple_debug (gsi_stmt (ssi)))
2530 	{
2531 	  gimple stmt = gsi_stmt (ssi);
2532 	  gdebug *new_stmt;
2533 	  tree var;
2534 	  tree value;
2535 
2536 	  /* For the last edge move the debug stmts instead of copying
2537 	     them.  */
2538 	  if (ei_one_before_end_p (ei))
2539 	    {
2540 	      si = ssi;
2541 	      gsi_prev (&ssi);
2542 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2543 		gimple_debug_bind_reset_value (stmt);
2544 	      gsi_remove (&si, false);
2545 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2546 	      continue;
2547 	    }
2548 
2549 	  if (gimple_debug_bind_p (stmt))
2550 	    {
2551 	      var = gimple_debug_bind_get_var (stmt);
2552 	      if (single_pred_p (e->dest))
2553 		{
2554 		  value = gimple_debug_bind_get_value (stmt);
2555 		  value = unshare_expr (value);
2556 		}
2557 	      else
2558 		value = NULL_TREE;
2559 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2560 	    }
2561 	  else if (gimple_debug_source_bind_p (stmt))
2562 	    {
2563 	      var = gimple_debug_source_bind_get_var (stmt);
2564 	      value = gimple_debug_source_bind_get_value (stmt);
2565 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2566 	    }
2567 	  else
2568 	    gcc_unreachable ();
2569 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2570 	  id->debug_stmts.safe_push (new_stmt);
2571 	  gsi_prev (&ssi);
2572 	}
2573     }
2574 }
2575 
2576 /* Make a copy of the sub-loops of SRC_PARENT and place them
2577    as siblings of DEST_PARENT.  */
2578 
2579 static void
2580 copy_loops (copy_body_data *id,
2581 	    struct loop *dest_parent, struct loop *src_parent)
2582 {
2583   struct loop *src_loop = src_parent->inner;
2584   while (src_loop)
2585     {
2586       if (!id->blocks_to_copy
2587 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2588 	{
2589 	  struct loop *dest_loop = alloc_loop ();
2590 
2591 	  /* Assign the new loop its header and latch and associate
2592 	     those with the new loop.  */
2593 	  dest_loop->header = (basic_block)src_loop->header->aux;
2594 	  dest_loop->header->loop_father = dest_loop;
2595 	  if (src_loop->latch != NULL)
2596 	    {
2597 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2598 	      dest_loop->latch->loop_father = dest_loop;
2599 	    }
2600 
2601 	  /* Copy loop meta-data.  */
2602 	  copy_loop_info (src_loop, dest_loop);
2603 
2604 	  /* Finally place it into the loop array and the loop tree.  */
2605 	  place_new_loop (cfun, dest_loop);
2606 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2607 
2608 	  dest_loop->safelen = src_loop->safelen;
2609 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2610 	  if (src_loop->force_vectorize)
2611 	    {
2612 	      dest_loop->force_vectorize = true;
2613 	      cfun->has_force_vectorize_loops = true;
2614 	    }
2615 	  if (src_loop->simduid)
2616 	    {
2617 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2618 	      cfun->has_simduid_loops = true;
2619 	    }
2620 
2621 	  /* Recurse.  */
2622 	  copy_loops (id, dest_loop, src_loop);
2623 	}
2624       src_loop = src_loop->next;
2625     }
2626 }
2627 
2628 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2629 
2630 void
2631 redirect_all_calls (copy_body_data * id, basic_block bb)
2632 {
2633   gimple_stmt_iterator si;
2634   gimple last = last_stmt (bb);
2635   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2636     {
2637       gimple stmt = gsi_stmt (si);
2638       if (is_gimple_call (stmt))
2639 	{
2640 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2641 	  if (edge)
2642 	    {
2643 	      edge->redirect_call_stmt_to_callee ();
2644 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2645 		gimple_purge_dead_eh_edges (bb);
2646 	    }
2647 	}
2648     }
2649 }
2650 
2651 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2652    with each bb's frequency. Used when NODE has a 0-weight entry
2653    but we are about to inline it into a non-zero count call bb.
2654    See the comments for handle_missing_profiles() in predict.c for
2655    when this can happen for COMDATs.  */
2656 
2657 void
2658 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2659 {
2660   basic_block bb;
2661   edge_iterator ei;
2662   edge e;
2663   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2664 
2665   FOR_ALL_BB_FN(bb, fn)
2666     {
2667       bb->count = apply_scale (count,
2668                                GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2669       FOR_EACH_EDGE (e, ei, bb->succs)
2670         e->count = apply_probability (e->src->count, e->probability);
2671     }
2672 }
2673 
2674 /* Make a copy of the body of FN so that it can be inserted inline in
2675    another function.  Walks FN via CFG, returns new fndecl.  */
2676 
2677 static tree
2678 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2679 	       basic_block entry_block_map, basic_block exit_block_map,
2680 	       basic_block new_entry)
2681 {
2682   tree callee_fndecl = id->src_fn;
2683   /* Original cfun for the callee, doesn't change.  */
2684   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2685   struct function *cfun_to_copy;
2686   basic_block bb;
2687   tree new_fndecl = NULL;
2688   bool need_debug_cleanup = false;
2689   gcov_type count_scale;
2690   int last;
2691   int incoming_frequency = 0;
2692   gcov_type incoming_count = 0;
2693 
2694   /* This can happen for COMDAT routines that end up with 0 counts
2695      despite being called (see the comments for handle_missing_profiles()
2696      in predict.c as to why). Apply counts to the blocks in the callee
2697      before inlining, using the guessed edge frequencies, so that we don't
2698      end up with a 0-count inline body which can confuse downstream
2699      optimizations such as function splitting.  */
2700   if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2701     {
2702       /* Apply the larger of the call bb count and the total incoming
2703          call edge count to the callee.  */
2704       gcov_type in_count = 0;
2705       struct cgraph_edge *in_edge;
2706       for (in_edge = id->src_node->callers; in_edge;
2707            in_edge = in_edge->next_caller)
2708         in_count += in_edge->count;
2709       freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2710     }
2711 
2712   if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2713     count_scale
2714         = GCOV_COMPUTE_SCALE (count,
2715                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2716   else
2717     count_scale = REG_BR_PROB_BASE;
2718 
2719   /* Register specific tree functions.  */
2720   gimple_register_cfg_hooks ();
2721 
2722   /* If we are inlining just region of the function, make sure to connect
2723      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2724      part of loop, we must compute frequency and probability of
2725      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2726      probabilities of edges incoming from nonduplicated region.  */
2727   if (new_entry)
2728     {
2729       edge e;
2730       edge_iterator ei;
2731 
2732       FOR_EACH_EDGE (e, ei, new_entry->preds)
2733 	if (!e->src->aux)
2734 	  {
2735 	    incoming_frequency += EDGE_FREQUENCY (e);
2736 	    incoming_count += e->count;
2737 	  }
2738       incoming_count = apply_scale (incoming_count, count_scale);
2739       incoming_frequency
2740 	= apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2741       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2742       ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2743     }
2744 
2745   /* Must have a CFG here at this point.  */
2746   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2747 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2748 
2749   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2750 
2751   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2752   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2753   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2754   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2755 
2756   /* Duplicate any exception-handling regions.  */
2757   if (cfun->eh)
2758     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2759 				       remap_decl_1, id);
2760 
2761   /* Use aux pointers to map the original blocks to copy.  */
2762   FOR_EACH_BB_FN (bb, cfun_to_copy)
2763     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2764       {
2765 	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2766 	bb->aux = new_bb;
2767 	new_bb->aux = bb;
2768 	new_bb->loop_father = entry_block_map->loop_father;
2769       }
2770 
2771   last = last_basic_block_for_fn (cfun);
2772 
2773   /* Now that we've duplicated the blocks, duplicate their edges.  */
2774   basic_block abnormal_goto_dest = NULL;
2775   if (id->call_stmt
2776       && stmt_can_make_abnormal_goto (id->call_stmt))
2777     {
2778       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2779 
2780       bb = gimple_bb (id->call_stmt);
2781       gsi_next (&gsi);
2782       if (gsi_end_p (gsi))
2783 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2784     }
2785   FOR_ALL_BB_FN (bb, cfun_to_copy)
2786     if (!id->blocks_to_copy
2787 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2788       need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2789 					       abnormal_goto_dest);
2790 
2791   if (new_entry)
2792     {
2793       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2794       e->probability = REG_BR_PROB_BASE;
2795       e->count = incoming_count;
2796     }
2797 
2798   /* Duplicate the loop tree, if available and wanted.  */
2799   if (loops_for_fn (src_cfun) != NULL
2800       && current_loops != NULL)
2801     {
2802       copy_loops (id, entry_block_map->loop_father,
2803 		  get_loop (src_cfun, 0));
2804       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2805       loops_state_set (LOOPS_NEED_FIXUP);
2806     }
2807 
2808   /* If the loop tree in the source function needed fixup, mark the
2809      destination loop tree for fixup, too.  */
2810   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2811     loops_state_set (LOOPS_NEED_FIXUP);
2812 
2813   if (gimple_in_ssa_p (cfun))
2814     FOR_ALL_BB_FN (bb, cfun_to_copy)
2815       if (!id->blocks_to_copy
2816 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2817 	copy_phis_for_bb (bb, id);
2818 
2819   FOR_ALL_BB_FN (bb, cfun_to_copy)
2820     if (bb->aux)
2821       {
2822 	if (need_debug_cleanup
2823 	    && bb->index != ENTRY_BLOCK
2824 	    && bb->index != EXIT_BLOCK)
2825 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2826 	/* Update call edge destinations.  This can not be done before loop
2827 	   info is updated, because we may split basic blocks.  */
2828 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2829 	    && bb->index != ENTRY_BLOCK
2830 	    && bb->index != EXIT_BLOCK)
2831 	  redirect_all_calls (id, (basic_block)bb->aux);
2832 	((basic_block)bb->aux)->aux = NULL;
2833 	bb->aux = NULL;
2834       }
2835 
2836   /* Zero out AUX fields of newly created block during EH edge
2837      insertion. */
2838   for (; last < last_basic_block_for_fn (cfun); last++)
2839     {
2840       if (need_debug_cleanup)
2841 	maybe_move_debug_stmts_to_successors (id,
2842 					      BASIC_BLOCK_FOR_FN (cfun, last));
2843       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2844       /* Update call edge destinations.  This can not be done before loop
2845 	 info is updated, because we may split basic blocks.  */
2846       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2847 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2848     }
2849   entry_block_map->aux = NULL;
2850   exit_block_map->aux = NULL;
2851 
2852   if (id->eh_map)
2853     {
2854       delete id->eh_map;
2855       id->eh_map = NULL;
2856     }
2857   if (id->dependence_map)
2858     {
2859       delete id->dependence_map;
2860       id->dependence_map = NULL;
2861     }
2862 
2863   return new_fndecl;
2864 }
2865 
2866 /* Copy the debug STMT using ID.  We deal with these statements in a
2867    special way: if any variable in their VALUE expression wasn't
2868    remapped yet, we won't remap it, because that would get decl uids
2869    out of sync, causing codegen differences between -g and -g0.  If
2870    this arises, we drop the VALUE expression altogether.  */
2871 
2872 static void
2873 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2874 {
2875   tree t, *n;
2876   struct walk_stmt_info wi;
2877 
2878   if (gimple_block (stmt))
2879     {
2880       n = id->decl_map->get (gimple_block (stmt));
2881       gimple_set_block (stmt, n ? *n : id->block);
2882     }
2883 
2884   /* Remap all the operands in COPY.  */
2885   memset (&wi, 0, sizeof (wi));
2886   wi.info = id;
2887 
2888   processing_debug_stmt = 1;
2889 
2890   if (gimple_debug_source_bind_p (stmt))
2891     t = gimple_debug_source_bind_get_var (stmt);
2892   else
2893     t = gimple_debug_bind_get_var (stmt);
2894 
2895   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2896       && (n = id->debug_map->get (t)))
2897     {
2898       gcc_assert (TREE_CODE (*n) == VAR_DECL);
2899       t = *n;
2900     }
2901   else if (TREE_CODE (t) == VAR_DECL
2902 	   && !is_global_var (t)
2903 	   && !id->decl_map->get (t))
2904     /* T is a non-localized variable.  */;
2905   else
2906     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2907 
2908   if (gimple_debug_bind_p (stmt))
2909     {
2910       gimple_debug_bind_set_var (stmt, t);
2911 
2912       if (gimple_debug_bind_has_value_p (stmt))
2913 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2914 		   remap_gimple_op_r, &wi, NULL);
2915 
2916       /* Punt if any decl couldn't be remapped.  */
2917       if (processing_debug_stmt < 0)
2918 	gimple_debug_bind_reset_value (stmt);
2919     }
2920   else if (gimple_debug_source_bind_p (stmt))
2921     {
2922       gimple_debug_source_bind_set_var (stmt, t);
2923       /* When inlining and source bind refers to one of the optimized
2924 	 away parameters, change the source bind into normal debug bind
2925 	 referring to the corresponding DEBUG_EXPR_DECL that should have
2926 	 been bound before the call stmt.  */
2927       t = gimple_debug_source_bind_get_value (stmt);
2928       if (t != NULL_TREE
2929 	  && TREE_CODE (t) == PARM_DECL
2930 	  && id->call_stmt)
2931 	{
2932 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2933 	  unsigned int i;
2934 	  if (debug_args != NULL)
2935 	    {
2936 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2937 		if ((**debug_args)[i] == DECL_ORIGIN (t)
2938 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2939 		  {
2940 		    t = (**debug_args)[i + 1];
2941 		    stmt->subcode = GIMPLE_DEBUG_BIND;
2942 		    gimple_debug_bind_set_value (stmt, t);
2943 		    break;
2944 		  }
2945 	    }
2946 	}
2947       if (gimple_debug_source_bind_p (stmt))
2948 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2949 		   remap_gimple_op_r, &wi, NULL);
2950     }
2951 
2952   processing_debug_stmt = 0;
2953 
2954   update_stmt (stmt);
2955 }
2956 
2957 /* Process deferred debug stmts.  In order to give values better odds
2958    of being successfully remapped, we delay the processing of debug
2959    stmts until all other stmts that might require remapping are
2960    processed.  */
2961 
2962 static void
2963 copy_debug_stmts (copy_body_data *id)
2964 {
2965   size_t i;
2966   gdebug *stmt;
2967 
2968   if (!id->debug_stmts.exists ())
2969     return;
2970 
2971   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2972     copy_debug_stmt (stmt, id);
2973 
2974   id->debug_stmts.release ();
2975 }
2976 
2977 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2978    another function.  */
2979 
2980 static tree
2981 copy_tree_body (copy_body_data *id)
2982 {
2983   tree fndecl = id->src_fn;
2984   tree body = DECL_SAVED_TREE (fndecl);
2985 
2986   walk_tree (&body, copy_tree_body_r, id, NULL);
2987 
2988   return body;
2989 }
2990 
2991 /* Make a copy of the body of FN so that it can be inserted inline in
2992    another function.  */
2993 
2994 static tree
2995 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2996 	   basic_block entry_block_map, basic_block exit_block_map,
2997 	   basic_block new_entry)
2998 {
2999   tree fndecl = id->src_fn;
3000   tree body;
3001 
3002   /* If this body has a CFG, walk CFG and copy.  */
3003   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3004   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
3005 			new_entry);
3006   copy_debug_stmts (id);
3007 
3008   return body;
3009 }
3010 
3011 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3012    defined in function FN, or of a data member thereof.  */
3013 
3014 static bool
3015 self_inlining_addr_expr (tree value, tree fn)
3016 {
3017   tree var;
3018 
3019   if (TREE_CODE (value) != ADDR_EXPR)
3020     return false;
3021 
3022   var = get_base_address (TREE_OPERAND (value, 0));
3023 
3024   return var && auto_var_in_fn_p (var, fn);
3025 }
3026 
3027 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3028    lexical block and line number information from base_stmt, if given,
3029    or from the last stmt of the block otherwise.  */
3030 
3031 static gimple
3032 insert_init_debug_bind (copy_body_data *id,
3033 			basic_block bb, tree var, tree value,
3034 			gimple base_stmt)
3035 {
3036   gimple note;
3037   gimple_stmt_iterator gsi;
3038   tree tracked_var;
3039 
3040   if (!gimple_in_ssa_p (id->src_cfun))
3041     return NULL;
3042 
3043   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3044     return NULL;
3045 
3046   tracked_var = target_for_debug_bind (var);
3047   if (!tracked_var)
3048     return NULL;
3049 
3050   if (bb)
3051     {
3052       gsi = gsi_last_bb (bb);
3053       if (!base_stmt && !gsi_end_p (gsi))
3054 	base_stmt = gsi_stmt (gsi);
3055     }
3056 
3057   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3058 
3059   if (bb)
3060     {
3061       if (!gsi_end_p (gsi))
3062 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3063       else
3064 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3065     }
3066 
3067   return note;
3068 }
3069 
3070 static void
3071 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3072 {
3073   /* If VAR represents a zero-sized variable, it's possible that the
3074      assignment statement may result in no gimple statements.  */
3075   if (init_stmt)
3076     {
3077       gimple_stmt_iterator si = gsi_last_bb (bb);
3078 
3079       /* We can end up with init statements that store to a non-register
3080          from a rhs with a conversion.  Handle that here by forcing the
3081 	 rhs into a temporary.  gimple_regimplify_operands is not
3082 	 prepared to do this for us.  */
3083       if (!is_gimple_debug (init_stmt)
3084 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3085 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3086 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3087 	{
3088 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3089 			     gimple_expr_type (init_stmt),
3090 			     gimple_assign_rhs1 (init_stmt));
3091 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3092 					  GSI_NEW_STMT);
3093 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3094 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3095 	}
3096       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3097       gimple_regimplify_operands (init_stmt, &si);
3098 
3099       if (!is_gimple_debug (init_stmt))
3100 	{
3101 	  tree def = gimple_assign_lhs (init_stmt);
3102 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3103 	}
3104     }
3105 }
3106 
3107 /* Initialize parameter P with VALUE.  If needed, produce init statement
3108    at the end of BB.  When BB is NULL, we return init statement to be
3109    output later.  */
3110 static gimple
3111 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3112 		     basic_block bb, tree *vars)
3113 {
3114   gimple init_stmt = NULL;
3115   tree var;
3116   tree rhs = value;
3117   tree def = (gimple_in_ssa_p (cfun)
3118 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3119 
3120   if (value
3121       && value != error_mark_node
3122       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3123     {
3124       /* If we can match up types by promotion/demotion do so.  */
3125       if (fold_convertible_p (TREE_TYPE (p), value))
3126 	rhs = fold_convert (TREE_TYPE (p), value);
3127       else
3128 	{
3129 	  /* ???  For valid programs we should not end up here.
3130 	     Still if we end up with truly mismatched types here, fall back
3131 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3132 	     GIMPLE to the following passes.  */
3133 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3134 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3135 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3136 	  else
3137 	    rhs = build_zero_cst (TREE_TYPE (p));
3138 	}
3139     }
3140 
3141   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3142      here since the type of this decl must be visible to the calling
3143      function.  */
3144   var = copy_decl_to_var (p, id);
3145 
3146   /* Declare this new variable.  */
3147   DECL_CHAIN (var) = *vars;
3148   *vars = var;
3149 
3150   /* Make gimplifier happy about this variable.  */
3151   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3152 
3153   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3154      we would not need to create a new variable here at all, if it
3155      weren't for debug info.  Still, we can just use the argument
3156      value.  */
3157   if (TREE_READONLY (p)
3158       && !TREE_ADDRESSABLE (p)
3159       && value && !TREE_SIDE_EFFECTS (value)
3160       && !def)
3161     {
3162       /* We may produce non-gimple trees by adding NOPs or introduce
3163 	 invalid sharing when operand is not really constant.
3164 	 It is not big deal to prohibit constant propagation here as
3165 	 we will constant propagate in DOM1 pass anyway.  */
3166       if (is_gimple_min_invariant (value)
3167 	  && useless_type_conversion_p (TREE_TYPE (p),
3168 						 TREE_TYPE (value))
3169 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3170 	     the base variable isn't a local variable of the inlined
3171 	     function, e.g., when doing recursive inlining, direct or
3172 	     mutually-recursive or whatever, which is why we don't
3173 	     just test whether fn == current_function_decl.  */
3174 	  && ! self_inlining_addr_expr (value, fn))
3175 	{
3176 	  insert_decl_map (id, p, value);
3177 	  insert_debug_decl_map (id, p, var);
3178 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3179 	}
3180     }
3181 
3182   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3183      that way, when the PARM_DECL is encountered, it will be
3184      automatically replaced by the VAR_DECL.  */
3185   insert_decl_map (id, p, var);
3186 
3187   /* Even if P was TREE_READONLY, the new VAR should not be.
3188      In the original code, we would have constructed a
3189      temporary, and then the function body would have never
3190      changed the value of P.  However, now, we will be
3191      constructing VAR directly.  The constructor body may
3192      change its value multiple times as it is being
3193      constructed.  Therefore, it must not be TREE_READONLY;
3194      the back-end assumes that TREE_READONLY variable is
3195      assigned to only once.  */
3196   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3197     TREE_READONLY (var) = 0;
3198 
3199   /* If there is no setup required and we are in SSA, take the easy route
3200      replacing all SSA names representing the function parameter by the
3201      SSA name passed to function.
3202 
3203      We need to construct map for the variable anyway as it might be used
3204      in different SSA names when parameter is set in function.
3205 
3206      Do replacement at -O0 for const arguments replaced by constant.
3207      This is important for builtin_constant_p and other construct requiring
3208      constant argument to be visible in inlined function body.  */
3209   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3210       && (optimize
3211           || (TREE_READONLY (p)
3212 	      && is_gimple_min_invariant (rhs)))
3213       && (TREE_CODE (rhs) == SSA_NAME
3214 	  || is_gimple_min_invariant (rhs))
3215       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3216     {
3217       insert_decl_map (id, def, rhs);
3218       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3219     }
3220 
3221   /* If the value of argument is never used, don't care about initializing
3222      it.  */
3223   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3224     {
3225       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3226       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3227     }
3228 
3229   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3230      the argument to the proper type in case it was promoted.  */
3231   if (value)
3232     {
3233       if (rhs == error_mark_node)
3234 	{
3235 	  insert_decl_map (id, p, var);
3236 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3237 	}
3238 
3239       STRIP_USELESS_TYPE_CONVERSION (rhs);
3240 
3241       /* If we are in SSA form properly remap the default definition
3242          or assign to a dummy SSA name if the parameter is unused and
3243 	 we are not optimizing.  */
3244       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3245 	{
3246 	  if (def)
3247 	    {
3248 	      def = remap_ssa_name (def, id);
3249 	      init_stmt = gimple_build_assign (def, rhs);
3250 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3251 	      set_ssa_default_def (cfun, var, NULL);
3252 	    }
3253 	  else if (!optimize)
3254 	    {
3255 	      def = make_ssa_name (var);
3256 	      init_stmt = gimple_build_assign (def, rhs);
3257 	    }
3258 	}
3259       else
3260         init_stmt = gimple_build_assign (var, rhs);
3261 
3262       if (bb && init_stmt)
3263         insert_init_stmt (id, bb, init_stmt);
3264     }
3265   return init_stmt;
3266 }
3267 
3268 /* Generate code to initialize the parameters of the function at the
3269    top of the stack in ID from the GIMPLE_CALL STMT.  */
3270 
3271 static void
3272 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3273 			       tree fn, basic_block bb)
3274 {
3275   tree parms;
3276   size_t i;
3277   tree p;
3278   tree vars = NULL_TREE;
3279   tree static_chain = gimple_call_chain (stmt);
3280 
3281   /* Figure out what the parameters are.  */
3282   parms = DECL_ARGUMENTS (fn);
3283 
3284   /* Loop through the parameter declarations, replacing each with an
3285      equivalent VAR_DECL, appropriately initialized.  */
3286   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3287     {
3288       tree val;
3289       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3290       setup_one_parameter (id, p, val, fn, bb, &vars);
3291     }
3292   /* After remapping parameters remap their types.  This has to be done
3293      in a second loop over all parameters to appropriately remap
3294      variable sized arrays when the size is specified in a
3295      parameter following the array.  */
3296   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3297     {
3298       tree *varp = id->decl_map->get (p);
3299       if (varp
3300 	  && TREE_CODE (*varp) == VAR_DECL)
3301 	{
3302 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3303 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3304 	  tree var = *varp;
3305 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3306 	  /* Also remap the default definition if it was remapped
3307 	     to the default definition of the parameter replacement
3308 	     by the parameter setup.  */
3309 	  if (def)
3310 	    {
3311 	      tree *defp = id->decl_map->get (def);
3312 	      if (defp
3313 		  && TREE_CODE (*defp) == SSA_NAME
3314 		  && SSA_NAME_VAR (*defp) == var)
3315 		TREE_TYPE (*defp) = TREE_TYPE (var);
3316 	    }
3317 	}
3318     }
3319 
3320   /* Initialize the static chain.  */
3321   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3322   gcc_assert (fn != current_function_decl);
3323   if (p)
3324     {
3325       /* No static chain?  Seems like a bug in tree-nested.c.  */
3326       gcc_assert (static_chain);
3327 
3328       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3329     }
3330 
3331   declare_inline_vars (id->block, vars);
3332 }
3333 
3334 
3335 /* Declare a return variable to replace the RESULT_DECL for the
3336    function we are calling.  An appropriate DECL_STMT is returned.
3337    The USE_STMT is filled to contain a use of the declaration to
3338    indicate the return value of the function.
3339 
3340    RETURN_SLOT, if non-null is place where to store the result.  It
3341    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3342    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3343 
3344    RETURN_BOUNDS holds a destination for returned bounds.
3345 
3346    The return value is a (possibly null) value that holds the result
3347    as seen by the caller.  */
3348 
3349 static tree
3350 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3351 			 tree return_bounds, basic_block entry_bb)
3352 {
3353   tree callee = id->src_fn;
3354   tree result = DECL_RESULT (callee);
3355   tree callee_type = TREE_TYPE (result);
3356   tree caller_type;
3357   tree var, use;
3358 
3359   /* Handle type-mismatches in the function declaration return type
3360      vs. the call expression.  */
3361   if (modify_dest)
3362     caller_type = TREE_TYPE (modify_dest);
3363   else
3364     caller_type = TREE_TYPE (TREE_TYPE (callee));
3365 
3366   /* We don't need to do anything for functions that don't return anything.  */
3367   if (VOID_TYPE_P (callee_type))
3368     return NULL_TREE;
3369 
3370   /* If there was a return slot, then the return value is the
3371      dereferenced address of that object.  */
3372   if (return_slot)
3373     {
3374       /* The front end shouldn't have used both return_slot and
3375 	 a modify expression.  */
3376       gcc_assert (!modify_dest);
3377       if (DECL_BY_REFERENCE (result))
3378 	{
3379 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3380 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3381 
3382 	  /* We are going to construct *&return_slot and we can't do that
3383 	     for variables believed to be not addressable.
3384 
3385 	     FIXME: This check possibly can match, because values returned
3386 	     via return slot optimization are not believed to have address
3387 	     taken by alias analysis.  */
3388 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3389 	  var = return_slot_addr;
3390 	}
3391       else
3392 	{
3393 	  var = return_slot;
3394 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3395 	  if (TREE_ADDRESSABLE (result))
3396 	    mark_addressable (var);
3397 	}
3398       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3399            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3400 	  && !DECL_GIMPLE_REG_P (result)
3401 	  && DECL_P (var))
3402 	DECL_GIMPLE_REG_P (var) = 0;
3403       use = NULL;
3404       goto done;
3405     }
3406 
3407   /* All types requiring non-trivial constructors should have been handled.  */
3408   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3409 
3410   /* Attempt to avoid creating a new temporary variable.  */
3411   if (modify_dest
3412       && TREE_CODE (modify_dest) != SSA_NAME)
3413     {
3414       bool use_it = false;
3415 
3416       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3417       if (!useless_type_conversion_p (callee_type, caller_type))
3418 	use_it = false;
3419 
3420       /* ??? If we're assigning to a variable sized type, then we must
3421 	 reuse the destination variable, because we've no good way to
3422 	 create variable sized temporaries at this point.  */
3423       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3424 	use_it = true;
3425 
3426       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3427 	 reuse it as the result of the call directly.  Don't do this if
3428 	 it would promote MODIFY_DEST to addressable.  */
3429       else if (TREE_ADDRESSABLE (result))
3430 	use_it = false;
3431       else
3432 	{
3433 	  tree base_m = get_base_address (modify_dest);
3434 
3435 	  /* If the base isn't a decl, then it's a pointer, and we don't
3436 	     know where that's going to go.  */
3437 	  if (!DECL_P (base_m))
3438 	    use_it = false;
3439 	  else if (is_global_var (base_m))
3440 	    use_it = false;
3441 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3442 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3443 		   && !DECL_GIMPLE_REG_P (result)
3444 		   && DECL_GIMPLE_REG_P (base_m))
3445 	    use_it = false;
3446 	  else if (!TREE_ADDRESSABLE (base_m))
3447 	    use_it = true;
3448 	}
3449 
3450       if (use_it)
3451 	{
3452 	  var = modify_dest;
3453 	  use = NULL;
3454 	  goto done;
3455 	}
3456     }
3457 
3458   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3459 
3460   var = copy_result_decl_to_var (result, id);
3461   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3462 
3463   /* Do not have the rest of GCC warn about this variable as it should
3464      not be visible to the user.  */
3465   TREE_NO_WARNING (var) = 1;
3466 
3467   declare_inline_vars (id->block, var);
3468 
3469   /* Build the use expr.  If the return type of the function was
3470      promoted, convert it back to the expected type.  */
3471   use = var;
3472   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3473     {
3474       /* If we can match up types by promotion/demotion do so.  */
3475       if (fold_convertible_p (caller_type, var))
3476 	use = fold_convert (caller_type, var);
3477       else
3478 	{
3479 	  /* ???  For valid programs we should not end up here.
3480 	     Still if we end up with truly mismatched types here, fall back
3481 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3482 	     passes.  */
3483 	  /* Prevent var from being written into SSA form.  */
3484 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3485 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3486 	    DECL_GIMPLE_REG_P (var) = false;
3487 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3488 	    TREE_ADDRESSABLE (var) = true;
3489 	  use = fold_build2 (MEM_REF, caller_type,
3490 			     build_fold_addr_expr (var),
3491 			     build_int_cst (ptr_type_node, 0));
3492 	}
3493     }
3494 
3495   STRIP_USELESS_TYPE_CONVERSION (use);
3496 
3497   if (DECL_BY_REFERENCE (result))
3498     {
3499       TREE_ADDRESSABLE (var) = 1;
3500       var = build_fold_addr_expr (var);
3501     }
3502 
3503  done:
3504   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3505      way, when the RESULT_DECL is encountered, it will be
3506      automatically replaced by the VAR_DECL.
3507 
3508      When returning by reference, ensure that RESULT_DECL remaps to
3509      gimple_val.  */
3510   if (DECL_BY_REFERENCE (result)
3511       && !is_gimple_val (var))
3512     {
3513       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3514       insert_decl_map (id, result, temp);
3515       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3516 	 it's default_def SSA_NAME.  */
3517       if (gimple_in_ssa_p (id->src_cfun)
3518 	  && is_gimple_reg (result))
3519 	{
3520 	  temp = make_ssa_name (temp);
3521 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3522 	}
3523       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3524     }
3525   else
3526     insert_decl_map (id, result, var);
3527 
3528   /* Remember this so we can ignore it in remap_decls.  */
3529   id->retvar = var;
3530 
3531   /* If returned bounds are used, then make var for them.  */
3532   if (return_bounds)
3533   {
3534     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3535     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3536     TREE_NO_WARNING (bndtemp) = 1;
3537     declare_inline_vars (id->block, bndtemp);
3538 
3539     id->retbnd = bndtemp;
3540     insert_init_stmt (id, entry_bb,
3541 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3542   }
3543 
3544   return use;
3545 }
3546 
3547 /* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
3548    to a local label.  */
3549 
3550 static tree
3551 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3552 {
3553   tree node = *nodep;
3554   tree fn = (tree) fnp;
3555 
3556   if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3557     return node;
3558 
3559   if (TYPE_P (node))
3560     *walk_subtrees = 0;
3561 
3562   return NULL_TREE;
3563 }
3564 
3565 /* Determine if the function can be copied.  If so return NULL.  If
3566    not return a string describng the reason for failure.  */
3567 
3568 const char *
3569 copy_forbidden (struct function *fun, tree fndecl)
3570 {
3571   const char *reason = fun->cannot_be_copied_reason;
3572   tree decl;
3573   unsigned ix;
3574 
3575   /* Only examine the function once.  */
3576   if (fun->cannot_be_copied_set)
3577     return reason;
3578 
3579   /* We cannot copy a function that receives a non-local goto
3580      because we cannot remap the destination label used in the
3581      function that is performing the non-local goto.  */
3582   /* ??? Actually, this should be possible, if we work at it.
3583      No doubt there's just a handful of places that simply
3584      assume it doesn't happen and don't substitute properly.  */
3585   if (fun->has_nonlocal_label)
3586     {
3587       reason = G_("function %q+F can never be copied "
3588 		  "because it receives a non-local goto");
3589       goto fail;
3590     }
3591 
3592   FOR_EACH_LOCAL_DECL (fun, ix, decl)
3593     if (TREE_CODE (decl) == VAR_DECL
3594 	&& TREE_STATIC (decl)
3595 	&& !DECL_EXTERNAL (decl)
3596 	&& DECL_INITIAL (decl)
3597 	&& walk_tree_without_duplicates (&DECL_INITIAL (decl),
3598 					 has_label_address_in_static_1,
3599 					 fndecl))
3600       {
3601 	reason = G_("function %q+F can never be copied because it saves "
3602 		    "address of local label in a static variable");
3603 	goto fail;
3604       }
3605 
3606  fail:
3607   fun->cannot_be_copied_reason = reason;
3608   fun->cannot_be_copied_set = true;
3609   return reason;
3610 }
3611 
3612 
3613 static const char *inline_forbidden_reason;
3614 
3615 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3616    iff a function can not be inlined.  Also sets the reason why. */
3617 
3618 static tree
3619 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3620 			 struct walk_stmt_info *wip)
3621 {
3622   tree fn = (tree) wip->info;
3623   tree t;
3624   gimple stmt = gsi_stmt (*gsi);
3625 
3626   switch (gimple_code (stmt))
3627     {
3628     case GIMPLE_CALL:
3629       /* Refuse to inline alloca call unless user explicitly forced so as
3630 	 this may change program's memory overhead drastically when the
3631 	 function using alloca is called in loop.  In GCC present in
3632 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3633 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3634 	 VLA objects as those can't cause unbounded growth (they're always
3635 	 wrapped inside stack_save/stack_restore regions.  */
3636       if (gimple_alloca_call_p (stmt)
3637 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3638 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3639 	{
3640 	  inline_forbidden_reason
3641 	    = G_("function %q+F can never be inlined because it uses "
3642 		 "alloca (override using the always_inline attribute)");
3643 	  *handled_ops_p = true;
3644 	  return fn;
3645 	}
3646 
3647       t = gimple_call_fndecl (stmt);
3648       if (t == NULL_TREE)
3649 	break;
3650 
3651       /* We cannot inline functions that call setjmp.  */
3652       if (setjmp_call_p (t))
3653 	{
3654 	  inline_forbidden_reason
3655 	    = G_("function %q+F can never be inlined because it uses setjmp");
3656 	  *handled_ops_p = true;
3657 	  return t;
3658 	}
3659 
3660       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3661 	switch (DECL_FUNCTION_CODE (t))
3662 	  {
3663 	    /* We cannot inline functions that take a variable number of
3664 	       arguments.  */
3665 	  case BUILT_IN_VA_START:
3666 	  case BUILT_IN_NEXT_ARG:
3667 	  case BUILT_IN_VA_END:
3668 	    inline_forbidden_reason
3669 	      = G_("function %q+F can never be inlined because it "
3670 		   "uses variable argument lists");
3671 	    *handled_ops_p = true;
3672 	    return t;
3673 
3674 	  case BUILT_IN_LONGJMP:
3675 	    /* We can't inline functions that call __builtin_longjmp at
3676 	       all.  The non-local goto machinery really requires the
3677 	       destination be in a different function.  If we allow the
3678 	       function calling __builtin_longjmp to be inlined into the
3679 	       function calling __builtin_setjmp, Things will Go Awry.  */
3680 	    inline_forbidden_reason
3681 	      = G_("function %q+F can never be inlined because "
3682 		   "it uses setjmp-longjmp exception handling");
3683 	    *handled_ops_p = true;
3684 	    return t;
3685 
3686 	  case BUILT_IN_NONLOCAL_GOTO:
3687 	    /* Similarly.  */
3688 	    inline_forbidden_reason
3689 	      = G_("function %q+F can never be inlined because "
3690 		   "it uses non-local goto");
3691 	    *handled_ops_p = true;
3692 	    return t;
3693 
3694 	  case BUILT_IN_RETURN:
3695 	  case BUILT_IN_APPLY_ARGS:
3696 	    /* If a __builtin_apply_args caller would be inlined,
3697 	       it would be saving arguments of the function it has
3698 	       been inlined into.  Similarly __builtin_return would
3699 	       return from the function the inline has been inlined into.  */
3700 	    inline_forbidden_reason
3701 	      = G_("function %q+F can never be inlined because "
3702 		   "it uses __builtin_return or __builtin_apply_args");
3703 	    *handled_ops_p = true;
3704 	    return t;
3705 
3706 	  default:
3707 	    break;
3708 	  }
3709       break;
3710 
3711     case GIMPLE_GOTO:
3712       t = gimple_goto_dest (stmt);
3713 
3714       /* We will not inline a function which uses computed goto.  The
3715 	 addresses of its local labels, which may be tucked into
3716 	 global storage, are of course not constant across
3717 	 instantiations, which causes unexpected behavior.  */
3718       if (TREE_CODE (t) != LABEL_DECL)
3719 	{
3720 	  inline_forbidden_reason
3721 	    = G_("function %q+F can never be inlined "
3722 		 "because it contains a computed goto");
3723 	  *handled_ops_p = true;
3724 	  return t;
3725 	}
3726       break;
3727 
3728     default:
3729       break;
3730     }
3731 
3732   *handled_ops_p = false;
3733   return NULL_TREE;
3734 }
3735 
3736 /* Return true if FNDECL is a function that cannot be inlined into
3737    another one.  */
3738 
3739 static bool
3740 inline_forbidden_p (tree fndecl)
3741 {
3742   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3743   struct walk_stmt_info wi;
3744   basic_block bb;
3745   bool forbidden_p = false;
3746 
3747   /* First check for shared reasons not to copy the code.  */
3748   inline_forbidden_reason = copy_forbidden (fun, fndecl);
3749   if (inline_forbidden_reason != NULL)
3750     return true;
3751 
3752   /* Next, walk the statements of the function looking for
3753      constraucts we can't handle, or are non-optimal for inlining.  */
3754   hash_set<tree> visited_nodes;
3755   memset (&wi, 0, sizeof (wi));
3756   wi.info = (void *) fndecl;
3757   wi.pset = &visited_nodes;
3758 
3759   FOR_EACH_BB_FN (bb, fun)
3760     {
3761       gimple ret;
3762       gimple_seq seq = bb_seq (bb);
3763       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3764       forbidden_p = (ret != NULL);
3765       if (forbidden_p)
3766 	break;
3767     }
3768 
3769   return forbidden_p;
3770 }
3771 
3772 /* Return false if the function FNDECL cannot be inlined on account of its
3773    attributes, true otherwise.  */
3774 static bool
3775 function_attribute_inlinable_p (const_tree fndecl)
3776 {
3777   if (targetm.attribute_table)
3778     {
3779       const_tree a;
3780 
3781       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3782 	{
3783 	  const_tree name = TREE_PURPOSE (a);
3784 	  int i;
3785 
3786 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3787 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3788 	      return targetm.function_attribute_inlinable_p (fndecl);
3789 	}
3790     }
3791 
3792   return true;
3793 }
3794 
3795 /* Returns nonzero if FN is a function that does not have any
3796    fundamental inline blocking properties.  */
3797 
3798 bool
3799 tree_inlinable_function_p (tree fn)
3800 {
3801   bool inlinable = true;
3802   bool do_warning;
3803   tree always_inline;
3804 
3805   /* If we've already decided this function shouldn't be inlined,
3806      there's no need to check again.  */
3807   if (DECL_UNINLINABLE (fn))
3808     return false;
3809 
3810   /* We only warn for functions declared `inline' by the user.  */
3811   do_warning = (warn_inline
3812 		&& DECL_DECLARED_INLINE_P (fn)
3813 		&& !DECL_NO_INLINE_WARNING_P (fn)
3814 		&& !DECL_IN_SYSTEM_HEADER (fn));
3815 
3816   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3817 
3818   if (flag_no_inline
3819       && always_inline == NULL)
3820     {
3821       if (do_warning)
3822         warning (OPT_Winline, "function %q+F can never be inlined because it "
3823                  "is suppressed using -fno-inline", fn);
3824       inlinable = false;
3825     }
3826 
3827   else if (!function_attribute_inlinable_p (fn))
3828     {
3829       if (do_warning)
3830         warning (OPT_Winline, "function %q+F can never be inlined because it "
3831                  "uses attributes conflicting with inlining", fn);
3832       inlinable = false;
3833     }
3834 
3835   else if (inline_forbidden_p (fn))
3836     {
3837       /* See if we should warn about uninlinable functions.  Previously,
3838 	 some of these warnings would be issued while trying to expand
3839 	 the function inline, but that would cause multiple warnings
3840 	 about functions that would for example call alloca.  But since
3841 	 this a property of the function, just one warning is enough.
3842 	 As a bonus we can now give more details about the reason why a
3843 	 function is not inlinable.  */
3844       if (always_inline)
3845 	error (inline_forbidden_reason, fn);
3846       else if (do_warning)
3847 	warning (OPT_Winline, inline_forbidden_reason, fn);
3848 
3849       inlinable = false;
3850     }
3851 
3852   /* Squirrel away the result so that we don't have to check again.  */
3853   DECL_UNINLINABLE (fn) = !inlinable;
3854 
3855   return inlinable;
3856 }
3857 
3858 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3859    word size and take possible memcpy call into account and return
3860    cost based on whether optimizing for size or speed according to SPEED_P.  */
3861 
3862 int
3863 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3864 {
3865   HOST_WIDE_INT size;
3866 
3867   gcc_assert (!VOID_TYPE_P (type));
3868 
3869   if (TREE_CODE (type) == VECTOR_TYPE)
3870     {
3871       machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3872       machine_mode simd
3873 	= targetm.vectorize.preferred_simd_mode (inner);
3874       int simd_mode_size = GET_MODE_SIZE (simd);
3875       return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3876 	      / simd_mode_size);
3877     }
3878 
3879   size = int_size_in_bytes (type);
3880 
3881   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3882     /* Cost of a memcpy call, 3 arguments and the call.  */
3883     return 4;
3884   else
3885     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3886 }
3887 
3888 /* Returns cost of operation CODE, according to WEIGHTS  */
3889 
3890 static int
3891 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3892 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3893 {
3894   switch (code)
3895     {
3896     /* These are "free" conversions, or their presumed cost
3897        is folded into other operations.  */
3898     case RANGE_EXPR:
3899     CASE_CONVERT:
3900     case COMPLEX_EXPR:
3901     case PAREN_EXPR:
3902     case VIEW_CONVERT_EXPR:
3903       return 0;
3904 
3905     /* Assign cost of 1 to usual operations.
3906        ??? We may consider mapping RTL costs to this.  */
3907     case COND_EXPR:
3908     case VEC_COND_EXPR:
3909     case VEC_PERM_EXPR:
3910 
3911     case PLUS_EXPR:
3912     case POINTER_PLUS_EXPR:
3913     case MINUS_EXPR:
3914     case MULT_EXPR:
3915     case MULT_HIGHPART_EXPR:
3916     case FMA_EXPR:
3917 
3918     case ADDR_SPACE_CONVERT_EXPR:
3919     case FIXED_CONVERT_EXPR:
3920     case FIX_TRUNC_EXPR:
3921 
3922     case NEGATE_EXPR:
3923     case FLOAT_EXPR:
3924     case MIN_EXPR:
3925     case MAX_EXPR:
3926     case ABS_EXPR:
3927 
3928     case LSHIFT_EXPR:
3929     case RSHIFT_EXPR:
3930     case LROTATE_EXPR:
3931     case RROTATE_EXPR:
3932 
3933     case BIT_IOR_EXPR:
3934     case BIT_XOR_EXPR:
3935     case BIT_AND_EXPR:
3936     case BIT_NOT_EXPR:
3937 
3938     case TRUTH_ANDIF_EXPR:
3939     case TRUTH_ORIF_EXPR:
3940     case TRUTH_AND_EXPR:
3941     case TRUTH_OR_EXPR:
3942     case TRUTH_XOR_EXPR:
3943     case TRUTH_NOT_EXPR:
3944 
3945     case LT_EXPR:
3946     case LE_EXPR:
3947     case GT_EXPR:
3948     case GE_EXPR:
3949     case EQ_EXPR:
3950     case NE_EXPR:
3951     case ORDERED_EXPR:
3952     case UNORDERED_EXPR:
3953 
3954     case UNLT_EXPR:
3955     case UNLE_EXPR:
3956     case UNGT_EXPR:
3957     case UNGE_EXPR:
3958     case UNEQ_EXPR:
3959     case LTGT_EXPR:
3960 
3961     case CONJ_EXPR:
3962 
3963     case PREDECREMENT_EXPR:
3964     case PREINCREMENT_EXPR:
3965     case POSTDECREMENT_EXPR:
3966     case POSTINCREMENT_EXPR:
3967 
3968     case REALIGN_LOAD_EXPR:
3969 
3970     case REDUC_MAX_EXPR:
3971     case REDUC_MIN_EXPR:
3972     case REDUC_PLUS_EXPR:
3973     case WIDEN_SUM_EXPR:
3974     case WIDEN_MULT_EXPR:
3975     case DOT_PROD_EXPR:
3976     case SAD_EXPR:
3977     case WIDEN_MULT_PLUS_EXPR:
3978     case WIDEN_MULT_MINUS_EXPR:
3979     case WIDEN_LSHIFT_EXPR:
3980 
3981     case VEC_WIDEN_MULT_HI_EXPR:
3982     case VEC_WIDEN_MULT_LO_EXPR:
3983     case VEC_WIDEN_MULT_EVEN_EXPR:
3984     case VEC_WIDEN_MULT_ODD_EXPR:
3985     case VEC_UNPACK_HI_EXPR:
3986     case VEC_UNPACK_LO_EXPR:
3987     case VEC_UNPACK_FLOAT_HI_EXPR:
3988     case VEC_UNPACK_FLOAT_LO_EXPR:
3989     case VEC_PACK_TRUNC_EXPR:
3990     case VEC_PACK_SAT_EXPR:
3991     case VEC_PACK_FIX_TRUNC_EXPR:
3992     case VEC_WIDEN_LSHIFT_HI_EXPR:
3993     case VEC_WIDEN_LSHIFT_LO_EXPR:
3994 
3995       return 1;
3996 
3997     /* Few special cases of expensive operations.  This is useful
3998        to avoid inlining on functions having too many of these.  */
3999     case TRUNC_DIV_EXPR:
4000     case CEIL_DIV_EXPR:
4001     case FLOOR_DIV_EXPR:
4002     case ROUND_DIV_EXPR:
4003     case EXACT_DIV_EXPR:
4004     case TRUNC_MOD_EXPR:
4005     case CEIL_MOD_EXPR:
4006     case FLOOR_MOD_EXPR:
4007     case ROUND_MOD_EXPR:
4008     case RDIV_EXPR:
4009       if (TREE_CODE (op2) != INTEGER_CST)
4010         return weights->div_mod_cost;
4011       return 1;
4012 
4013     default:
4014       /* We expect a copy assignment with no operator.  */
4015       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4016       return 0;
4017     }
4018 }
4019 
4020 
4021 /* Estimate number of instructions that will be created by expanding
4022    the statements in the statement sequence STMTS.
4023    WEIGHTS contains weights attributed to various constructs.  */
4024 
4025 static
4026 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4027 {
4028   int cost;
4029   gimple_stmt_iterator gsi;
4030 
4031   cost = 0;
4032   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4033     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4034 
4035   return cost;
4036 }
4037 
4038 
4039 /* Estimate number of instructions that will be created by expanding STMT.
4040    WEIGHTS contains weights attributed to various constructs.  */
4041 
4042 int
4043 estimate_num_insns (gimple stmt, eni_weights *weights)
4044 {
4045   unsigned cost, i;
4046   enum gimple_code code = gimple_code (stmt);
4047   tree lhs;
4048   tree rhs;
4049 
4050   switch (code)
4051     {
4052     case GIMPLE_ASSIGN:
4053       /* Try to estimate the cost of assignments.  We have three cases to
4054 	 deal with:
4055 	 1) Simple assignments to registers;
4056 	 2) Stores to things that must live in memory.  This includes
4057 	    "normal" stores to scalars, but also assignments of large
4058 	    structures, or constructors of big arrays;
4059 
4060 	 Let us look at the first two cases, assuming we have "a = b + C":
4061 	 <GIMPLE_ASSIGN <var_decl "a">
4062 	        <plus_expr <var_decl "b"> <constant C>>
4063 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4064 	 any target, because "a" usually ends up in a real register.  Hence
4065 	 the only cost of this expression comes from the PLUS_EXPR, and we
4066 	 can ignore the GIMPLE_ASSIGN.
4067 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4068 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4069 	 of moving something into "a", which we compute using the function
4070 	 estimate_move_cost.  */
4071       if (gimple_clobber_p (stmt))
4072 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4073 
4074       lhs = gimple_assign_lhs (stmt);
4075       rhs = gimple_assign_rhs1 (stmt);
4076 
4077       cost = 0;
4078 
4079       /* Account for the cost of moving to / from memory.  */
4080       if (gimple_store_p (stmt))
4081 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4082       if (gimple_assign_load_p (stmt))
4083 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4084 
4085       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4086       				      gimple_assign_rhs1 (stmt),
4087 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4088 				      == GIMPLE_BINARY_RHS
4089 				      ? gimple_assign_rhs2 (stmt) : NULL);
4090       break;
4091 
4092     case GIMPLE_COND:
4093       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4094       				         gimple_op (stmt, 0),
4095 				         gimple_op (stmt, 1));
4096       break;
4097 
4098     case GIMPLE_SWITCH:
4099       {
4100 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4101 	/* Take into account cost of the switch + guess 2 conditional jumps for
4102 	   each case label.
4103 
4104 	   TODO: once the switch expansion logic is sufficiently separated, we can
4105 	   do better job on estimating cost of the switch.  */
4106 	if (weights->time_based)
4107 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4108 	else
4109 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4110       }
4111       break;
4112 
4113     case GIMPLE_CALL:
4114       {
4115 	tree decl;
4116 
4117 	if (gimple_call_internal_p (stmt))
4118 	  return 0;
4119 	else if ((decl = gimple_call_fndecl (stmt))
4120 		 && DECL_BUILT_IN (decl))
4121 	  {
4122 	    /* Do not special case builtins where we see the body.
4123 	       This just confuse inliner.  */
4124 	    struct cgraph_node *node;
4125 	    if (!(node = cgraph_node::get (decl))
4126 		|| node->definition)
4127 	      ;
4128 	    /* For buitins that are likely expanded to nothing or
4129 	       inlined do not account operand costs.  */
4130 	    else if (is_simple_builtin (decl))
4131 	      return 0;
4132 	    else if (is_inexpensive_builtin (decl))
4133 	      return weights->target_builtin_call_cost;
4134 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4135 	      {
4136 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4137 		   specialize the cheap expansion we do here.
4138 		   ???  This asks for a more general solution.  */
4139 		switch (DECL_FUNCTION_CODE (decl))
4140 		  {
4141 		    case BUILT_IN_POW:
4142 		    case BUILT_IN_POWF:
4143 		    case BUILT_IN_POWL:
4144 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4145 			  && REAL_VALUES_EQUAL
4146 			  (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4147 			return estimate_operator_cost
4148 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4149 			     gimple_call_arg (stmt, 0));
4150 		      break;
4151 
4152 		    default:
4153 		      break;
4154 		  }
4155 	      }
4156 	  }
4157 
4158 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4159 	if (gimple_call_lhs (stmt))
4160 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4161 				      weights->time_based);
4162 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4163 	  {
4164 	    tree arg = gimple_call_arg (stmt, i);
4165 	    cost += estimate_move_cost (TREE_TYPE (arg),
4166 					weights->time_based);
4167 	  }
4168 	break;
4169       }
4170 
4171     case GIMPLE_RETURN:
4172       return weights->return_cost;
4173 
4174     case GIMPLE_GOTO:
4175     case GIMPLE_LABEL:
4176     case GIMPLE_NOP:
4177     case GIMPLE_PHI:
4178     case GIMPLE_PREDICT:
4179     case GIMPLE_DEBUG:
4180       return 0;
4181 
4182     case GIMPLE_ASM:
4183       {
4184 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4185 	/* 1000 means infinity. This avoids overflows later
4186 	   with very long asm statements.  */
4187 	if (count > 1000)
4188 	  count = 1000;
4189 	return count;
4190       }
4191 
4192     case GIMPLE_RESX:
4193       /* This is either going to be an external function call with one
4194 	 argument, or two register copy statements plus a goto.  */
4195       return 2;
4196 
4197     case GIMPLE_EH_DISPATCH:
4198       /* ??? This is going to turn into a switch statement.  Ideally
4199 	 we'd have a look at the eh region and estimate the number of
4200 	 edges involved.  */
4201       return 10;
4202 
4203     case GIMPLE_BIND:
4204       return estimate_num_insns_seq (
4205 	       gimple_bind_body (as_a <gbind *> (stmt)),
4206 	       weights);
4207 
4208     case GIMPLE_EH_FILTER:
4209       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4210 
4211     case GIMPLE_CATCH:
4212       return estimate_num_insns_seq (gimple_catch_handler (
4213 				       as_a <gcatch *> (stmt)),
4214 				     weights);
4215 
4216     case GIMPLE_TRY:
4217       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4218               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4219 
4220     /* OMP directives are generally very expensive.  */
4221 
4222     case GIMPLE_OMP_RETURN:
4223     case GIMPLE_OMP_SECTIONS_SWITCH:
4224     case GIMPLE_OMP_ATOMIC_STORE:
4225     case GIMPLE_OMP_CONTINUE:
4226       /* ...except these, which are cheap.  */
4227       return 0;
4228 
4229     case GIMPLE_OMP_ATOMIC_LOAD:
4230       return weights->omp_cost;
4231 
4232     case GIMPLE_OMP_FOR:
4233       return (weights->omp_cost
4234               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4235               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4236 
4237     case GIMPLE_OMP_PARALLEL:
4238     case GIMPLE_OMP_TASK:
4239     case GIMPLE_OMP_CRITICAL:
4240     case GIMPLE_OMP_MASTER:
4241     case GIMPLE_OMP_TASKGROUP:
4242     case GIMPLE_OMP_ORDERED:
4243     case GIMPLE_OMP_SECTION:
4244     case GIMPLE_OMP_SECTIONS:
4245     case GIMPLE_OMP_SINGLE:
4246     case GIMPLE_OMP_TARGET:
4247     case GIMPLE_OMP_TEAMS:
4248       return (weights->omp_cost
4249               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4250 
4251     case GIMPLE_TRANSACTION:
4252       return (weights->tm_cost
4253 	      + estimate_num_insns_seq (gimple_transaction_body (
4254 					  as_a <gtransaction *> (stmt)),
4255 					weights));
4256 
4257     default:
4258       gcc_unreachable ();
4259     }
4260 
4261   return cost;
4262 }
4263 
4264 /* Estimate number of instructions that will be created by expanding
4265    function FNDECL.  WEIGHTS contains weights attributed to various
4266    constructs.  */
4267 
4268 int
4269 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4270 {
4271   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4272   gimple_stmt_iterator bsi;
4273   basic_block bb;
4274   int n = 0;
4275 
4276   gcc_assert (my_function && my_function->cfg);
4277   FOR_EACH_BB_FN (bb, my_function)
4278     {
4279       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4280 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4281     }
4282 
4283   return n;
4284 }
4285 
4286 
4287 /* Initializes weights used by estimate_num_insns.  */
4288 
4289 void
4290 init_inline_once (void)
4291 {
4292   eni_size_weights.call_cost = 1;
4293   eni_size_weights.indirect_call_cost = 3;
4294   eni_size_weights.target_builtin_call_cost = 1;
4295   eni_size_weights.div_mod_cost = 1;
4296   eni_size_weights.omp_cost = 40;
4297   eni_size_weights.tm_cost = 10;
4298   eni_size_weights.time_based = false;
4299   eni_size_weights.return_cost = 1;
4300 
4301   /* Estimating time for call is difficult, since we have no idea what the
4302      called function does.  In the current uses of eni_time_weights,
4303      underestimating the cost does less harm than overestimating it, so
4304      we choose a rather small value here.  */
4305   eni_time_weights.call_cost = 10;
4306   eni_time_weights.indirect_call_cost = 15;
4307   eni_time_weights.target_builtin_call_cost = 1;
4308   eni_time_weights.div_mod_cost = 10;
4309   eni_time_weights.omp_cost = 40;
4310   eni_time_weights.tm_cost = 40;
4311   eni_time_weights.time_based = true;
4312   eni_time_weights.return_cost = 2;
4313 }
4314 
4315 /* Estimate the number of instructions in a gimple_seq. */
4316 
4317 int
4318 count_insns_seq (gimple_seq seq, eni_weights *weights)
4319 {
4320   gimple_stmt_iterator gsi;
4321   int n = 0;
4322   for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4323     n += estimate_num_insns (gsi_stmt (gsi), weights);
4324 
4325   return n;
4326 }
4327 
4328 
4329 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4330 
4331 static void
4332 prepend_lexical_block (tree current_block, tree new_block)
4333 {
4334   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4335   BLOCK_SUBBLOCKS (current_block) = new_block;
4336   BLOCK_SUPERCONTEXT (new_block) = current_block;
4337 }
4338 
4339 /* Add local variables from CALLEE to CALLER.  */
4340 
4341 static inline void
4342 add_local_variables (struct function *callee, struct function *caller,
4343 		     copy_body_data *id)
4344 {
4345   tree var;
4346   unsigned ix;
4347 
4348   FOR_EACH_LOCAL_DECL (callee, ix, var)
4349     if (!can_be_nonlocal (var, id))
4350       {
4351         tree new_var = remap_decl (var, id);
4352 
4353         /* Remap debug-expressions.  */
4354 	if (TREE_CODE (new_var) == VAR_DECL
4355 	    && DECL_HAS_DEBUG_EXPR_P (var)
4356 	    && new_var != var)
4357 	  {
4358 	    tree tem = DECL_DEBUG_EXPR (var);
4359 	    bool old_regimplify = id->regimplify;
4360 	    id->remapping_type_depth++;
4361 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4362 	    id->remapping_type_depth--;
4363 	    id->regimplify = old_regimplify;
4364 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4365 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4366 	  }
4367 	add_local_decl (caller, new_var);
4368       }
4369 }
4370 
4371 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4372    have brought in or introduced any debug stmts for SRCVAR.  */
4373 
4374 static inline void
4375 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4376 {
4377   tree *remappedvarp = id->decl_map->get (srcvar);
4378 
4379   if (!remappedvarp)
4380     return;
4381 
4382   if (TREE_CODE (*remappedvarp) != VAR_DECL)
4383     return;
4384 
4385   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4386     return;
4387 
4388   tree tvar = target_for_debug_bind (*remappedvarp);
4389   if (!tvar)
4390     return;
4391 
4392   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4393 					  id->call_stmt);
4394   gimple_seq_add_stmt (bindings, stmt);
4395 }
4396 
4397 /* For each inlined variable for which we may have debug bind stmts,
4398    add before GSI a final debug stmt resetting it, marking the end of
4399    its life, so that var-tracking knows it doesn't have to compute
4400    further locations for it.  */
4401 
4402 static inline void
4403 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4404 {
4405   tree var;
4406   unsigned ix;
4407   gimple_seq bindings = NULL;
4408 
4409   if (!gimple_in_ssa_p (id->src_cfun))
4410     return;
4411 
4412   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4413     return;
4414 
4415   for (var = DECL_ARGUMENTS (id->src_fn);
4416        var; var = DECL_CHAIN (var))
4417     reset_debug_binding (id, var, &bindings);
4418 
4419   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4420     reset_debug_binding (id, var, &bindings);
4421 
4422   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4423 }
4424 
4425 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4426 
4427 static bool
4428 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4429 {
4430   tree use_retvar;
4431   tree fn;
4432   hash_map<tree, tree> *dst;
4433   hash_map<tree, tree> *st = NULL;
4434   tree return_slot;
4435   tree modify_dest;
4436   tree return_bounds = NULL;
4437   location_t saved_location;
4438   struct cgraph_edge *cg_edge;
4439   cgraph_inline_failed_t reason;
4440   basic_block return_block;
4441   edge e;
4442   gimple_stmt_iterator gsi, stmt_gsi;
4443   bool successfully_inlined = FALSE;
4444   bool purge_dead_abnormal_edges;
4445   gcall *call_stmt;
4446   unsigned int i;
4447 
4448   /* Set input_location here so we get the right instantiation context
4449      if we call instantiate_decl from inlinable_function_p.  */
4450   /* FIXME: instantiate_decl isn't called by inlinable_function_p.  */
4451   saved_location = input_location;
4452   input_location = gimple_location (stmt);
4453 
4454   /* From here on, we're only interested in CALL_EXPRs.  */
4455   call_stmt = dyn_cast <gcall *> (stmt);
4456   if (!call_stmt)
4457     goto egress;
4458 
4459   cg_edge = id->dst_node->get_edge (stmt);
4460   gcc_checking_assert (cg_edge);
4461   /* First, see if we can figure out what function is being called.
4462      If we cannot, then there is no hope of inlining the function.  */
4463   if (cg_edge->indirect_unknown_callee)
4464     goto egress;
4465   fn = cg_edge->callee->decl;
4466   gcc_checking_assert (fn);
4467 
4468   /* If FN is a declaration of a function in a nested scope that was
4469      globally declared inline, we don't set its DECL_INITIAL.
4470      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4471      C++ front-end uses it for cdtors to refer to their internal
4472      declarations, that are not real functions.  Fortunately those
4473      don't have trees to be saved, so we can tell by checking their
4474      gimple_body.  */
4475   if (!DECL_INITIAL (fn)
4476       && DECL_ABSTRACT_ORIGIN (fn)
4477       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4478     fn = DECL_ABSTRACT_ORIGIN (fn);
4479 
4480   /* Don't try to inline functions that are not well-suited to inlining.  */
4481   if (cg_edge->inline_failed)
4482     {
4483       reason = cg_edge->inline_failed;
4484       /* If this call was originally indirect, we do not want to emit any
4485 	 inlining related warnings or sorry messages because there are no
4486 	 guarantees regarding those.  */
4487       if (cg_edge->indirect_inlining_edge)
4488 	goto egress;
4489 
4490       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4491           /* For extern inline functions that get redefined we always
4492 	     silently ignored always_inline flag. Better behaviour would
4493 	     be to be able to keep both bodies and use extern inline body
4494 	     for inlining, but we can't do that because frontends overwrite
4495 	     the body.  */
4496 	  && !cg_edge->callee->local.redefined_extern_inline
4497 	  /* During early inline pass, report only when optimization is
4498 	     not turned on.  */
4499 	  && (symtab->global_info_ready
4500 	      || !optimize
4501 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4502 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4503 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4504 	{
4505 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4506 		 cgraph_inline_failed_string (reason));
4507 	  error ("called from here");
4508 	}
4509       else if (warn_inline
4510 	       && DECL_DECLARED_INLINE_P (fn)
4511 	       && !DECL_NO_INLINE_WARNING_P (fn)
4512 	       && !DECL_IN_SYSTEM_HEADER (fn)
4513 	       && reason != CIF_UNSPECIFIED
4514 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4515 	       /* Do not warn about not inlined recursive calls.  */
4516 	       && !cg_edge->recursive_p ()
4517 	       /* Avoid warnings during early inline pass. */
4518 	       && symtab->global_info_ready)
4519 	{
4520 	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4521 		   fn, _(cgraph_inline_failed_string (reason)));
4522 	  warning (OPT_Winline, "called from here");
4523 	}
4524       goto egress;
4525     }
4526   fn = cg_edge->callee->decl;
4527   cg_edge->callee->get_untransformed_body ();
4528 
4529 #ifdef ENABLE_CHECKING
4530   if (cg_edge->callee->decl != id->dst_node->decl)
4531     cg_edge->callee->verify ();
4532 #endif
4533 
4534   /* We will be inlining this callee.  */
4535   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4536   id->assign_stmts.create (0);
4537 
4538   /* Update the callers EH personality.  */
4539   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4540     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4541       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4542 
4543   /* Split the block holding the GIMPLE_CALL.  */
4544   e = split_block (bb, stmt);
4545   bb = e->src;
4546   return_block = e->dest;
4547   remove_edge (e);
4548 
4549   /* split_block splits after the statement; work around this by
4550      moving the call into the second block manually.  Not pretty,
4551      but seems easier than doing the CFG manipulation by hand
4552      when the GIMPLE_CALL is in the last statement of BB.  */
4553   stmt_gsi = gsi_last_bb (bb);
4554   gsi_remove (&stmt_gsi, false);
4555 
4556   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4557      been the source of abnormal edges.  In this case, schedule
4558      the removal of dead abnormal edges.  */
4559   gsi = gsi_start_bb (return_block);
4560   if (gsi_end_p (gsi))
4561     {
4562       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4563       purge_dead_abnormal_edges = true;
4564     }
4565   else
4566     {
4567       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4568       purge_dead_abnormal_edges = false;
4569     }
4570 
4571   stmt_gsi = gsi_start_bb (return_block);
4572 
4573   /* Build a block containing code to initialize the arguments, the
4574      actual inline expansion of the body, and a label for the return
4575      statements within the function to jump to.  The type of the
4576      statement expression is the return type of the function call.
4577      ???  If the call does not have an associated block then we will
4578      remap all callee blocks to NULL, effectively dropping most of
4579      its debug information.  This should only happen for calls to
4580      artificial decls inserted by the compiler itself.  We need to
4581      either link the inlined blocks into the caller block tree or
4582      not refer to them in any way to not break GC for locations.  */
4583   if (gimple_block (stmt))
4584     {
4585       id->block = make_node (BLOCK);
4586       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4587       BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4588       prepend_lexical_block (gimple_block (stmt), id->block);
4589     }
4590 
4591   /* Local declarations will be replaced by their equivalents in this
4592      map.  */
4593   st = id->decl_map;
4594   id->decl_map = new hash_map<tree, tree>;
4595   dst = id->debug_map;
4596   id->debug_map = NULL;
4597 
4598   /* Record the function we are about to inline.  */
4599   id->src_fn = fn;
4600   id->src_node = cg_edge->callee;
4601   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4602   id->call_stmt = call_stmt;
4603 
4604   gcc_assert (!id->src_cfun->after_inlining);
4605 
4606   id->entry_bb = bb;
4607   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4608     {
4609       gimple_stmt_iterator si = gsi_last_bb (bb);
4610       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4611       						   NOT_TAKEN),
4612 			GSI_NEW_STMT);
4613     }
4614   initialize_inlined_parameters (id, stmt, fn, bb);
4615 
4616   if (DECL_INITIAL (fn))
4617     {
4618       if (gimple_block (stmt))
4619 	{
4620 	  tree *var;
4621 
4622 	  prepend_lexical_block (id->block,
4623 				 remap_blocks (DECL_INITIAL (fn), id));
4624 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4625 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4626 				   == NULL_TREE));
4627 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4628 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4629 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4630 	     under it.  The parameters can be then evaluated in the debugger,
4631 	     but don't show in backtraces.  */
4632 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4633 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4634 	      {
4635 		tree v = *var;
4636 		*var = TREE_CHAIN (v);
4637 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4638 		BLOCK_VARS (id->block) = v;
4639 	      }
4640 	    else
4641 	      var = &TREE_CHAIN (*var);
4642 	}
4643       else
4644 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4645     }
4646 
4647   /* Return statements in the function body will be replaced by jumps
4648      to the RET_LABEL.  */
4649   gcc_assert (DECL_INITIAL (fn));
4650   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4651 
4652   /* Find the LHS to which the result of this call is assigned.  */
4653   return_slot = NULL;
4654   if (gimple_call_lhs (stmt))
4655     {
4656       modify_dest = gimple_call_lhs (stmt);
4657 
4658       /* Remember where to copy returned bounds.  */
4659       if (gimple_call_with_bounds_p (stmt)
4660 	  && TREE_CODE (modify_dest) == SSA_NAME)
4661 	{
4662 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4663 	  if (retbnd)
4664 	    {
4665 	      return_bounds = gimple_call_lhs (retbnd);
4666 	      /* If returned bounds are not used then just
4667 		 remove unused call.  */
4668 	      if (!return_bounds)
4669 		{
4670 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4671 		  gsi_remove (&iter, true);
4672 		}
4673 	    }
4674 	}
4675 
4676       /* The function which we are inlining might not return a value,
4677 	 in which case we should issue a warning that the function
4678 	 does not return a value.  In that case the optimizers will
4679 	 see that the variable to which the value is assigned was not
4680 	 initialized.  We do not want to issue a warning about that
4681 	 uninitialized variable.  */
4682       if (DECL_P (modify_dest))
4683 	TREE_NO_WARNING (modify_dest) = 1;
4684 
4685       if (gimple_call_return_slot_opt_p (call_stmt))
4686 	{
4687 	  return_slot = modify_dest;
4688 	  modify_dest = NULL;
4689 	}
4690     }
4691   else
4692     modify_dest = NULL;
4693 
4694   /* If we are inlining a call to the C++ operator new, we don't want
4695      to use type based alias analysis on the return value.  Otherwise
4696      we may get confused if the compiler sees that the inlined new
4697      function returns a pointer which was just deleted.  See bug
4698      33407.  */
4699   if (DECL_IS_OPERATOR_NEW (fn))
4700     {
4701       return_slot = NULL;
4702       modify_dest = NULL;
4703     }
4704 
4705   /* Declare the return variable for the function.  */
4706   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4707 					return_bounds, bb);
4708 
4709   /* Add local vars in this inlined callee to caller.  */
4710   add_local_variables (id->src_cfun, cfun, id);
4711 
4712   if (dump_file && (dump_flags & TDF_DETAILS))
4713     {
4714       fprintf (dump_file, "Inlining ");
4715       print_generic_expr (dump_file, id->src_fn, 0);
4716       fprintf (dump_file, " to ");
4717       print_generic_expr (dump_file, id->dst_fn, 0);
4718       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4719     }
4720 
4721   /* This is it.  Duplicate the callee body.  Assume callee is
4722      pre-gimplified.  Note that we must not alter the caller
4723      function in any way before this point, as this CALL_EXPR may be
4724      a self-referential call; if we're calling ourselves, we need to
4725      duplicate our body before altering anything.  */
4726   copy_body (id, cg_edge->callee->count,
4727   	     GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4728 	     bb, return_block, NULL);
4729 
4730   reset_debug_bindings (id, stmt_gsi);
4731 
4732   /* Reset the escaped solution.  */
4733   if (cfun->gimple_df)
4734     pt_solution_reset (&cfun->gimple_df->escaped);
4735 
4736   /* Clean up.  */
4737   if (id->debug_map)
4738     {
4739       delete id->debug_map;
4740       id->debug_map = dst;
4741     }
4742   delete id->decl_map;
4743   id->decl_map = st;
4744 
4745   /* Unlink the calls virtual operands before replacing it.  */
4746   unlink_stmt_vdef (stmt);
4747   if (gimple_vdef (stmt)
4748       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4749     release_ssa_name (gimple_vdef (stmt));
4750 
4751   /* If the inlined function returns a result that we care about,
4752      substitute the GIMPLE_CALL with an assignment of the return
4753      variable to the LHS of the call.  That is, if STMT was
4754      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4755   if (use_retvar && gimple_call_lhs (stmt))
4756     {
4757       gimple old_stmt = stmt;
4758       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4759       gsi_replace (&stmt_gsi, stmt, false);
4760       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4761 
4762       /* Copy bounds if we copy structure with bounds.  */
4763       if (chkp_function_instrumented_p (id->dst_fn)
4764 	  && !BOUNDED_P (use_retvar)
4765 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4766 	id->assign_stmts.safe_push (stmt);
4767     }
4768   else
4769     {
4770       /* Handle the case of inlining a function with no return
4771 	 statement, which causes the return value to become undefined.  */
4772       if (gimple_call_lhs (stmt)
4773 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4774 	{
4775 	  tree name = gimple_call_lhs (stmt);
4776 	  tree var = SSA_NAME_VAR (name);
4777 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4778 
4779 	  if (def)
4780 	    {
4781 	      /* If the variable is used undefined, make this name
4782 		 undefined via a move.  */
4783 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4784 	      gsi_replace (&stmt_gsi, stmt, true);
4785 	    }
4786 	  else
4787 	    {
4788 	      if (!var)
4789 		{
4790 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4791 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4792 		}
4793 	      /* Otherwise make this variable undefined.  */
4794 	      gsi_remove (&stmt_gsi, true);
4795 	      set_ssa_default_def (cfun, var, name);
4796 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4797 	    }
4798 	}
4799       else
4800         gsi_remove (&stmt_gsi, true);
4801     }
4802 
4803   /* Put returned bounds into the correct place if required.  */
4804   if (return_bounds)
4805     {
4806       gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4807       gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4808       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4809       unlink_stmt_vdef (old_stmt);
4810       gsi_replace (&bnd_gsi, new_stmt, false);
4811       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4812       cgraph_update_edges_for_call_stmt (old_stmt,
4813 					 gimple_call_fndecl (old_stmt),
4814 					 new_stmt);
4815     }
4816 
4817   if (purge_dead_abnormal_edges)
4818     {
4819       gimple_purge_dead_eh_edges (return_block);
4820       gimple_purge_dead_abnormal_call_edges (return_block);
4821     }
4822 
4823   /* If the value of the new expression is ignored, that's OK.  We
4824      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4825      the equivalent inlined version either.  */
4826   if (is_gimple_assign (stmt))
4827     {
4828       gcc_assert (gimple_assign_single_p (stmt)
4829 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4830       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4831     }
4832 
4833   /* Copy bounds for all generated assigns that need it.  */
4834   for (i = 0; i < id->assign_stmts.length (); i++)
4835     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4836   id->assign_stmts.release ();
4837 
4838   /* Output the inlining info for this abstract function, since it has been
4839      inlined.  If we don't do this now, we can lose the information about the
4840      variables in the function when the blocks get blown away as soon as we
4841      remove the cgraph node.  */
4842   if (gimple_block (stmt))
4843     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4844 
4845   /* Update callgraph if needed.  */
4846   cg_edge->callee->remove ();
4847 
4848   id->block = NULL_TREE;
4849   successfully_inlined = TRUE;
4850 
4851  egress:
4852   input_location = saved_location;
4853   return successfully_inlined;
4854 }
4855 
4856 /* Expand call statements reachable from STMT_P.
4857    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4858    in a MODIFY_EXPR.  */
4859 
4860 static bool
4861 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4862 {
4863   gimple_stmt_iterator gsi;
4864   bool inlined = false;
4865 
4866   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4867     {
4868       gimple stmt = gsi_stmt (gsi);
4869       gsi_prev (&gsi);
4870 
4871       if (is_gimple_call (stmt)
4872 	  && !gimple_call_internal_p (stmt))
4873 	inlined |= expand_call_inline (bb, stmt, id);
4874     }
4875 
4876   return inlined;
4877 }
4878 
4879 
4880 /* Walk all basic blocks created after FIRST and try to fold every statement
4881    in the STATEMENTS pointer set.  */
4882 
4883 static void
4884 fold_marked_statements (int first, hash_set<gimple> *statements)
4885 {
4886   for (; first < n_basic_blocks_for_fn (cfun); first++)
4887     if (BASIC_BLOCK_FOR_FN (cfun, first))
4888       {
4889         gimple_stmt_iterator gsi;
4890 
4891 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4892 	     !gsi_end_p (gsi);
4893 	     gsi_next (&gsi))
4894 	  if (statements->contains (gsi_stmt (gsi)))
4895 	    {
4896 	      gimple old_stmt = gsi_stmt (gsi);
4897 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4898 
4899 	      if (old_decl && DECL_BUILT_IN (old_decl))
4900 		{
4901 		  /* Folding builtins can create multiple instructions,
4902 		     we need to look at all of them.  */
4903 		  gimple_stmt_iterator i2 = gsi;
4904 		  gsi_prev (&i2);
4905 		  if (fold_stmt (&gsi))
4906 		    {
4907 		      gimple new_stmt;
4908 		      /* If a builtin at the end of a bb folded into nothing,
4909 			 the following loop won't work.  */
4910 		      if (gsi_end_p (gsi))
4911 			{
4912 			  cgraph_update_edges_for_call_stmt (old_stmt,
4913 							     old_decl, NULL);
4914 			  break;
4915 			}
4916 		      if (gsi_end_p (i2))
4917 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4918 		      else
4919 			gsi_next (&i2);
4920 		      while (1)
4921 			{
4922 			  new_stmt = gsi_stmt (i2);
4923 			  update_stmt (new_stmt);
4924 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4925 							     new_stmt);
4926 
4927 			  if (new_stmt == gsi_stmt (gsi))
4928 			    {
4929 			      /* It is okay to check only for the very last
4930 				 of these statements.  If it is a throwing
4931 				 statement nothing will change.  If it isn't
4932 				 this can remove EH edges.  If that weren't
4933 				 correct then because some intermediate stmts
4934 				 throw, but not the last one.  That would mean
4935 				 we'd have to split the block, which we can't
4936 				 here and we'd loose anyway.  And as builtins
4937 				 probably never throw, this all
4938 				 is mood anyway.  */
4939 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
4940 								  new_stmt))
4941 				gimple_purge_dead_eh_edges (
4942 				  BASIC_BLOCK_FOR_FN (cfun, first));
4943 			      break;
4944 			    }
4945 			  gsi_next (&i2);
4946 			}
4947 		    }
4948 		}
4949 	      else if (fold_stmt (&gsi))
4950 		{
4951 		  /* Re-read the statement from GSI as fold_stmt() may
4952 		     have changed it.  */
4953 		  gimple new_stmt = gsi_stmt (gsi);
4954 		  update_stmt (new_stmt);
4955 
4956 		  if (is_gimple_call (old_stmt)
4957 		      || is_gimple_call (new_stmt))
4958 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4959 						       new_stmt);
4960 
4961 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4962 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4963 								    first));
4964 		}
4965 	    }
4966       }
4967 }
4968 
4969 /* Expand calls to inline functions in the body of FN.  */
4970 
4971 unsigned int
4972 optimize_inline_calls (tree fn)
4973 {
4974   copy_body_data id;
4975   basic_block bb;
4976   int last = n_basic_blocks_for_fn (cfun);
4977   bool inlined_p = false;
4978 
4979   /* Clear out ID.  */
4980   memset (&id, 0, sizeof (id));
4981 
4982   id.src_node = id.dst_node = cgraph_node::get (fn);
4983   gcc_assert (id.dst_node->definition);
4984   id.dst_fn = fn;
4985   /* Or any functions that aren't finished yet.  */
4986   if (current_function_decl)
4987     id.dst_fn = current_function_decl;
4988 
4989   id.copy_decl = copy_decl_maybe_to_var;
4990   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4991   id.transform_new_cfg = false;
4992   id.transform_return_to_modify = true;
4993   id.transform_parameter = true;
4994   id.transform_lang_insert_block = NULL;
4995   id.statements_to_fold = new hash_set<gimple>;
4996 
4997   push_gimplify_context ();
4998 
4999   /* We make no attempts to keep dominance info up-to-date.  */
5000   free_dominance_info (CDI_DOMINATORS);
5001   free_dominance_info (CDI_POST_DOMINATORS);
5002 
5003   /* Register specific gimple functions.  */
5004   gimple_register_cfg_hooks ();
5005 
5006   /* Reach the trees by walking over the CFG, and note the
5007      enclosing basic-blocks in the call edges.  */
5008   /* We walk the blocks going forward, because inlined function bodies
5009      will split id->current_basic_block, and the new blocks will
5010      follow it; we'll trudge through them, processing their CALL_EXPRs
5011      along the way.  */
5012   FOR_EACH_BB_FN (bb, cfun)
5013     inlined_p |= gimple_expand_calls_inline (bb, &id);
5014 
5015   pop_gimplify_context (NULL);
5016 
5017 #ifdef ENABLE_CHECKING
5018     {
5019       struct cgraph_edge *e;
5020 
5021       id.dst_node->verify ();
5022 
5023       /* Double check that we inlined everything we are supposed to inline.  */
5024       for (e = id.dst_node->callees; e; e = e->next_callee)
5025 	gcc_assert (e->inline_failed);
5026     }
5027 #endif
5028 
5029   /* Fold queued statements.  */
5030   fold_marked_statements (last, id.statements_to_fold);
5031   delete id.statements_to_fold;
5032 
5033   gcc_assert (!id.debug_stmts.exists ());
5034 
5035   /* If we didn't inline into the function there is nothing to do.  */
5036   if (!inlined_p)
5037     return 0;
5038 
5039   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5040   number_blocks (fn);
5041 
5042   delete_unreachable_blocks_update_callgraph (&id);
5043 #ifdef ENABLE_CHECKING
5044   id.dst_node->verify ();
5045 #endif
5046 
5047   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5048      not possible yet - the IPA passes might make various functions to not
5049      throw and they don't care to proactively update local EH info.  This is
5050      done later in fixup_cfg pass that also execute the verification.  */
5051   return (TODO_update_ssa
5052 	  | TODO_cleanup_cfg
5053 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5054 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5055 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5056 	     ? TODO_rebuild_frequencies : 0));
5057 }
5058 
5059 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5060 
5061 tree
5062 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5063 {
5064   enum tree_code code = TREE_CODE (*tp);
5065   enum tree_code_class cl = TREE_CODE_CLASS (code);
5066 
5067   /* We make copies of most nodes.  */
5068   if (IS_EXPR_CODE_CLASS (cl)
5069       || code == TREE_LIST
5070       || code == TREE_VEC
5071       || code == TYPE_DECL
5072       || code == OMP_CLAUSE)
5073     {
5074       /* Because the chain gets clobbered when we make a copy, we save it
5075 	 here.  */
5076       tree chain = NULL_TREE, new_tree;
5077 
5078       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5079 	chain = TREE_CHAIN (*tp);
5080 
5081       /* Copy the node.  */
5082       new_tree = copy_node (*tp);
5083 
5084       *tp = new_tree;
5085 
5086       /* Now, restore the chain, if appropriate.  That will cause
5087 	 walk_tree to walk into the chain as well.  */
5088       if (code == PARM_DECL
5089 	  || code == TREE_LIST
5090 	  || code == OMP_CLAUSE)
5091 	TREE_CHAIN (*tp) = chain;
5092 
5093       /* For now, we don't update BLOCKs when we make copies.  So, we
5094 	 have to nullify all BIND_EXPRs.  */
5095       if (TREE_CODE (*tp) == BIND_EXPR)
5096 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5097     }
5098   else if (code == CONSTRUCTOR)
5099     {
5100       /* CONSTRUCTOR nodes need special handling because
5101          we need to duplicate the vector of elements.  */
5102       tree new_tree;
5103 
5104       new_tree = copy_node (*tp);
5105       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5106       *tp = new_tree;
5107     }
5108   else if (code == STATEMENT_LIST)
5109     /* We used to just abort on STATEMENT_LIST, but we can run into them
5110        with statement-expressions (c++/40975).  */
5111     copy_statement_list (tp);
5112   else if (TREE_CODE_CLASS (code) == tcc_type)
5113     *walk_subtrees = 0;
5114   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5115     *walk_subtrees = 0;
5116   else if (TREE_CODE_CLASS (code) == tcc_constant)
5117     *walk_subtrees = 0;
5118   return NULL_TREE;
5119 }
5120 
5121 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5122    information indicating to what new SAVE_EXPR this one should be mapped,
5123    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5124    the function into which the copy will be placed.  */
5125 
5126 static void
5127 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5128 {
5129   tree *n;
5130   tree t;
5131 
5132   /* See if we already encountered this SAVE_EXPR.  */
5133   n = st->get (*tp);
5134 
5135   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5136   if (!n)
5137     {
5138       t = copy_node (*tp);
5139 
5140       /* Remember this SAVE_EXPR.  */
5141       st->put (*tp, t);
5142       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5143       st->put (t, t);
5144     }
5145   else
5146     {
5147       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5148       *walk_subtrees = 0;
5149       t = *n;
5150     }
5151 
5152   /* Replace this SAVE_EXPR with the copy.  */
5153   *tp = t;
5154 }
5155 
5156 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5157    label, copies the declaration and enters it in the splay_tree in DATA (which
5158    is really a 'copy_body_data *'.  */
5159 
5160 static tree
5161 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5162 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5163 		        struct walk_stmt_info *wi)
5164 {
5165   copy_body_data *id = (copy_body_data *) wi->info;
5166   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5167 
5168   if (stmt)
5169     {
5170       tree decl = gimple_label_label (stmt);
5171 
5172       /* Copy the decl and remember the copy.  */
5173       insert_decl_map (id, decl, id->copy_decl (decl, id));
5174     }
5175 
5176   return NULL_TREE;
5177 }
5178 
5179 
5180 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5181    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5182    remaps all local declarations to appropriate replacements in gimple
5183    operands. */
5184 
5185 static tree
5186 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5187 {
5188   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5189   copy_body_data *id = (copy_body_data *) wi->info;
5190   hash_map<tree, tree> *st = id->decl_map;
5191   tree *n;
5192   tree expr = *tp;
5193 
5194   /* Only a local declaration (variable or label).  */
5195   if ((TREE_CODE (expr) == VAR_DECL
5196        && !TREE_STATIC (expr))
5197       || TREE_CODE (expr) == LABEL_DECL)
5198     {
5199       /* Lookup the declaration.  */
5200       n = st->get (expr);
5201 
5202       /* If it's there, remap it.  */
5203       if (n)
5204 	*tp = *n;
5205       *walk_subtrees = 0;
5206     }
5207   else if (TREE_CODE (expr) == STATEMENT_LIST
5208 	   || TREE_CODE (expr) == BIND_EXPR
5209 	   || TREE_CODE (expr) == SAVE_EXPR)
5210     gcc_unreachable ();
5211   else if (TREE_CODE (expr) == TARGET_EXPR)
5212     {
5213       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5214          It's OK for this to happen if it was part of a subtree that
5215          isn't immediately expanded, such as operand 2 of another
5216          TARGET_EXPR.  */
5217       if (!TREE_OPERAND (expr, 1))
5218 	{
5219 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5220 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5221 	}
5222     }
5223 
5224   /* Keep iterating.  */
5225   return NULL_TREE;
5226 }
5227 
5228 
5229 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5230    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5231    remaps all local declarations to appropriate replacements in gimple
5232    statements. */
5233 
5234 static tree
5235 replace_locals_stmt (gimple_stmt_iterator *gsip,
5236 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5237 		     struct walk_stmt_info *wi)
5238 {
5239   copy_body_data *id = (copy_body_data *) wi->info;
5240   gimple gs = gsi_stmt (*gsip);
5241 
5242   if (gbind *stmt = dyn_cast <gbind *> (gs))
5243     {
5244       tree block = gimple_bind_block (stmt);
5245 
5246       if (block)
5247 	{
5248 	  remap_block (&block, id);
5249 	  gimple_bind_set_block (stmt, block);
5250 	}
5251 
5252       /* This will remap a lot of the same decls again, but this should be
5253 	 harmless.  */
5254       if (gimple_bind_vars (stmt))
5255 	gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5256 						 NULL, id));
5257     }
5258 
5259   /* Keep iterating.  */
5260   return NULL_TREE;
5261 }
5262 
5263 
5264 /* Copies everything in SEQ and replaces variables and labels local to
5265    current_function_decl.  */
5266 
5267 gimple_seq
5268 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5269 {
5270   copy_body_data id;
5271   struct walk_stmt_info wi;
5272   gimple_seq copy;
5273 
5274   /* There's nothing to do for NULL_TREE.  */
5275   if (seq == NULL)
5276     return seq;
5277 
5278   /* Set up ID.  */
5279   memset (&id, 0, sizeof (id));
5280   id.src_fn = current_function_decl;
5281   id.dst_fn = current_function_decl;
5282   id.decl_map = new hash_map<tree, tree>;
5283   id.debug_map = NULL;
5284 
5285   id.copy_decl = copy_decl_no_change;
5286   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5287   id.transform_new_cfg = false;
5288   id.transform_return_to_modify = false;
5289   id.transform_parameter = false;
5290   id.transform_lang_insert_block = NULL;
5291 
5292   /* Walk the tree once to find local labels.  */
5293   memset (&wi, 0, sizeof (wi));
5294   hash_set<tree> visited;
5295   wi.info = &id;
5296   wi.pset = &visited;
5297   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5298 
5299   copy = gimple_seq_copy (seq);
5300 
5301   /* Walk the copy, remapping decls.  */
5302   memset (&wi, 0, sizeof (wi));
5303   wi.info = &id;
5304   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5305 
5306   /* Clean up.  */
5307   delete id.decl_map;
5308   if (id.debug_map)
5309     delete id.debug_map;
5310   if (id.dependence_map)
5311     {
5312       delete id.dependence_map;
5313       id.dependence_map = NULL;
5314     }
5315 
5316   return copy;
5317 }
5318 
5319 
5320 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5321 
5322 static tree
5323 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5324 {
5325   if (*tp == data)
5326     return (tree) data;
5327   else
5328     return NULL;
5329 }
5330 
5331 DEBUG_FUNCTION bool
5332 debug_find_tree (tree top, tree search)
5333 {
5334   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5335 }
5336 
5337 
5338 /* Declare the variables created by the inliner.  Add all the variables in
5339    VARS to BIND_EXPR.  */
5340 
5341 static void
5342 declare_inline_vars (tree block, tree vars)
5343 {
5344   tree t;
5345   for (t = vars; t; t = DECL_CHAIN (t))
5346     {
5347       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5348       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5349       add_local_decl (cfun, t);
5350     }
5351 
5352   if (block)
5353     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5354 }
5355 
5356 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5357    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5358    VAR_DECL translation.  */
5359 
5360 static tree
5361 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5362 {
5363   /* Don't generate debug information for the copy if we wouldn't have
5364      generated it for the copy either.  */
5365   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5366   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5367 
5368   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5369      declaration inspired this copy.  */
5370   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5371 
5372   /* The new variable/label has no RTL, yet.  */
5373   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5374       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5375     SET_DECL_RTL (copy, 0);
5376 
5377   /* These args would always appear unused, if not for this.  */
5378   TREE_USED (copy) = 1;
5379 
5380   /* Set the context for the new declaration.  */
5381   if (!DECL_CONTEXT (decl))
5382     /* Globals stay global.  */
5383     ;
5384   else if (DECL_CONTEXT (decl) != id->src_fn)
5385     /* Things that weren't in the scope of the function we're inlining
5386        from aren't in the scope we're inlining to, either.  */
5387     ;
5388   else if (TREE_STATIC (decl))
5389     /* Function-scoped static variables should stay in the original
5390        function.  */
5391     ;
5392   else
5393     /* Ordinary automatic local variables are now in the scope of the
5394        new function.  */
5395     DECL_CONTEXT (copy) = id->dst_fn;
5396 
5397   return copy;
5398 }
5399 
5400 static tree
5401 copy_decl_to_var (tree decl, copy_body_data *id)
5402 {
5403   tree copy, type;
5404 
5405   gcc_assert (TREE_CODE (decl) == PARM_DECL
5406 	      || TREE_CODE (decl) == RESULT_DECL);
5407 
5408   type = TREE_TYPE (decl);
5409 
5410   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5411 		     VAR_DECL, DECL_NAME (decl), type);
5412   if (DECL_PT_UID_SET_P (decl))
5413     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5414   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5415   TREE_READONLY (copy) = TREE_READONLY (decl);
5416   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5417   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5418 
5419   return copy_decl_for_dup_finish (id, decl, copy);
5420 }
5421 
5422 /* Like copy_decl_to_var, but create a return slot object instead of a
5423    pointer variable for return by invisible reference.  */
5424 
5425 static tree
5426 copy_result_decl_to_var (tree decl, copy_body_data *id)
5427 {
5428   tree copy, type;
5429 
5430   gcc_assert (TREE_CODE (decl) == PARM_DECL
5431 	      || TREE_CODE (decl) == RESULT_DECL);
5432 
5433   type = TREE_TYPE (decl);
5434   if (DECL_BY_REFERENCE (decl))
5435     type = TREE_TYPE (type);
5436 
5437   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5438 		     VAR_DECL, DECL_NAME (decl), type);
5439   if (DECL_PT_UID_SET_P (decl))
5440     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5441   TREE_READONLY (copy) = TREE_READONLY (decl);
5442   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5443   if (!DECL_BY_REFERENCE (decl))
5444     {
5445       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5446       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5447     }
5448 
5449   return copy_decl_for_dup_finish (id, decl, copy);
5450 }
5451 
5452 tree
5453 copy_decl_no_change (tree decl, copy_body_data *id)
5454 {
5455   tree copy;
5456 
5457   copy = copy_node (decl);
5458 
5459   /* The COPY is not abstract; it will be generated in DST_FN.  */
5460   DECL_ABSTRACT_P (copy) = false;
5461   lang_hooks.dup_lang_specific_decl (copy);
5462 
5463   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5464      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5465   if (TREE_CODE (copy) == LABEL_DECL)
5466     {
5467       TREE_ADDRESSABLE (copy) = 0;
5468       LABEL_DECL_UID (copy) = -1;
5469     }
5470 
5471   return copy_decl_for_dup_finish (id, decl, copy);
5472 }
5473 
5474 static tree
5475 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5476 {
5477   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5478     return copy_decl_to_var (decl, id);
5479   else
5480     return copy_decl_no_change (decl, id);
5481 }
5482 
5483 /* Return a copy of the function's argument tree.  */
5484 static tree
5485 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5486 			       bitmap args_to_skip, tree *vars)
5487 {
5488   tree arg, *parg;
5489   tree new_parm = NULL;
5490   int i = 0;
5491 
5492   parg = &new_parm;
5493 
5494   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5495     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5496       {
5497         tree new_tree = remap_decl (arg, id);
5498 	if (TREE_CODE (new_tree) != PARM_DECL)
5499 	  new_tree = id->copy_decl (arg, id);
5500         lang_hooks.dup_lang_specific_decl (new_tree);
5501         *parg = new_tree;
5502 	parg = &DECL_CHAIN (new_tree);
5503       }
5504     else if (!id->decl_map->get (arg))
5505       {
5506 	/* Make an equivalent VAR_DECL.  If the argument was used
5507 	   as temporary variable later in function, the uses will be
5508 	   replaced by local variable.  */
5509 	tree var = copy_decl_to_var (arg, id);
5510 	insert_decl_map (id, arg, var);
5511         /* Declare this new variable.  */
5512         DECL_CHAIN (var) = *vars;
5513         *vars = var;
5514       }
5515   return new_parm;
5516 }
5517 
5518 /* Return a copy of the function's static chain.  */
5519 static tree
5520 copy_static_chain (tree static_chain, copy_body_data * id)
5521 {
5522   tree *chain_copy, *pvar;
5523 
5524   chain_copy = &static_chain;
5525   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5526     {
5527       tree new_tree = remap_decl (*pvar, id);
5528       lang_hooks.dup_lang_specific_decl (new_tree);
5529       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5530       *pvar = new_tree;
5531     }
5532   return static_chain;
5533 }
5534 
5535 /* Return true if the function is allowed to be versioned.
5536    This is a guard for the versioning functionality.  */
5537 
5538 bool
5539 tree_versionable_function_p (tree fndecl)
5540 {
5541   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5542 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5543 }
5544 
5545 /* Delete all unreachable basic blocks and update callgraph.
5546    Doing so is somewhat nontrivial because we need to update all clones and
5547    remove inline function that become unreachable.  */
5548 
5549 static bool
5550 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5551 {
5552   bool changed = false;
5553   basic_block b, next_bb;
5554 
5555   find_unreachable_blocks ();
5556 
5557   /* Delete all unreachable basic blocks.  */
5558 
5559   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5560        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5561     {
5562       next_bb = b->next_bb;
5563 
5564       if (!(b->flags & BB_REACHABLE))
5565 	{
5566           gimple_stmt_iterator bsi;
5567 
5568           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5569 	    {
5570 	      struct cgraph_edge *e;
5571 	      struct cgraph_node *node;
5572 
5573 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5574 
5575 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5576 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5577 		{
5578 		  if (!e->inline_failed)
5579 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5580 		  else
5581 		    e->remove ();
5582 		}
5583 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5584 		  && id->dst_node->clones)
5585 		for (node = id->dst_node->clones; node != id->dst_node;)
5586 		  {
5587 		    node->remove_stmt_references (gsi_stmt (bsi));
5588 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5589 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5590 		      {
5591 			if (!e->inline_failed)
5592 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5593 			else
5594 			  e->remove ();
5595 		      }
5596 
5597 		    if (node->clones)
5598 		      node = node->clones;
5599 		    else if (node->next_sibling_clone)
5600 		      node = node->next_sibling_clone;
5601 		    else
5602 		      {
5603 			while (node != id->dst_node && !node->next_sibling_clone)
5604 			  node = node->clone_of;
5605 			if (node != id->dst_node)
5606 			  node = node->next_sibling_clone;
5607 		      }
5608 		  }
5609 	    }
5610 	  delete_basic_block (b);
5611 	  changed = true;
5612 	}
5613     }
5614 
5615   return changed;
5616 }
5617 
5618 /* Update clone info after duplication.  */
5619 
5620 static void
5621 update_clone_info (copy_body_data * id)
5622 {
5623   struct cgraph_node *node;
5624   if (!id->dst_node->clones)
5625     return;
5626   for (node = id->dst_node->clones; node != id->dst_node;)
5627     {
5628       /* First update replace maps to match the new body.  */
5629       if (node->clone.tree_map)
5630         {
5631 	  unsigned int i;
5632           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5633 	    {
5634 	      struct ipa_replace_map *replace_info;
5635 	      replace_info = (*node->clone.tree_map)[i];
5636 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5637 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5638 	    }
5639 	}
5640       if (node->clones)
5641 	node = node->clones;
5642       else if (node->next_sibling_clone)
5643 	node = node->next_sibling_clone;
5644       else
5645 	{
5646 	  while (node != id->dst_node && !node->next_sibling_clone)
5647 	    node = node->clone_of;
5648 	  if (node != id->dst_node)
5649 	    node = node->next_sibling_clone;
5650 	}
5651     }
5652 }
5653 
5654 /* Create a copy of a function's tree.
5655    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5656    of the original function and the new copied function
5657    respectively.  In case we want to replace a DECL
5658    tree with another tree while duplicating the function's
5659    body, TREE_MAP represents the mapping between these
5660    trees. If UPDATE_CLONES is set, the call_stmt fields
5661    of edges of clones of the function will be updated.
5662 
5663    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5664    from new version.
5665    If SKIP_RETURN is true, the new version will return void.
5666    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5667    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5668 */
5669 void
5670 tree_function_versioning (tree old_decl, tree new_decl,
5671 			  vec<ipa_replace_map *, va_gc> *tree_map,
5672 			  bool update_clones, bitmap args_to_skip,
5673 			  bool skip_return, bitmap blocks_to_copy,
5674 			  basic_block new_entry)
5675 {
5676   struct cgraph_node *old_version_node;
5677   struct cgraph_node *new_version_node;
5678   copy_body_data id;
5679   tree p;
5680   unsigned i;
5681   struct ipa_replace_map *replace_info;
5682   basic_block old_entry_block, bb;
5683   auto_vec<gimple, 10> init_stmts;
5684   tree vars = NULL_TREE;
5685 
5686   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5687 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5688   DECL_POSSIBLY_INLINED (old_decl) = 1;
5689 
5690   old_version_node = cgraph_node::get (old_decl);
5691   gcc_checking_assert (old_version_node);
5692   new_version_node = cgraph_node::get (new_decl);
5693   gcc_checking_assert (new_version_node);
5694 
5695   /* Copy over debug args.  */
5696   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5697     {
5698       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5699       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5700       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5701       old_debug_args = decl_debug_args_lookup (old_decl);
5702       if (old_debug_args)
5703 	{
5704 	  new_debug_args = decl_debug_args_insert (new_decl);
5705 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5706 	}
5707     }
5708 
5709   /* Output the inlining info for this abstract function, since it has been
5710      inlined.  If we don't do this now, we can lose the information about the
5711      variables in the function when the blocks get blown away as soon as we
5712      remove the cgraph node.  */
5713   (*debug_hooks->outlining_inline_function) (old_decl);
5714 
5715   DECL_ARTIFICIAL (new_decl) = 1;
5716   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5717   if (DECL_ORIGIN (old_decl) == old_decl)
5718     old_version_node->used_as_abstract_origin = true;
5719   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5720 
5721   /* Prepare the data structures for the tree copy.  */
5722   memset (&id, 0, sizeof (id));
5723 
5724   /* Generate a new name for the new version. */
5725   id.statements_to_fold = new hash_set<gimple>;
5726 
5727   id.decl_map = new hash_map<tree, tree>;
5728   id.debug_map = NULL;
5729   id.src_fn = old_decl;
5730   id.dst_fn = new_decl;
5731   id.src_node = old_version_node;
5732   id.dst_node = new_version_node;
5733   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5734   id.blocks_to_copy = blocks_to_copy;
5735 
5736   id.copy_decl = copy_decl_no_change;
5737   id.transform_call_graph_edges
5738     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5739   id.transform_new_cfg = true;
5740   id.transform_return_to_modify = false;
5741   id.transform_parameter = false;
5742   id.transform_lang_insert_block = NULL;
5743 
5744   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5745     (DECL_STRUCT_FUNCTION (old_decl));
5746   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5747   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5748   initialize_cfun (new_decl, old_decl,
5749 		   old_entry_block->count);
5750   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5751     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5752       = id.src_cfun->gimple_df->ipa_pta;
5753 
5754   /* Copy the function's static chain.  */
5755   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5756   if (p)
5757     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5758       copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5759 			 &id);
5760 
5761   /* If there's a tree_map, prepare for substitution.  */
5762   if (tree_map)
5763     for (i = 0; i < tree_map->length (); i++)
5764       {
5765 	gimple init;
5766 	replace_info = (*tree_map)[i];
5767 	if (replace_info->replace_p)
5768 	  {
5769 	    if (!replace_info->old_tree)
5770 	      {
5771 		int i = replace_info->parm_num;
5772 		tree parm;
5773 		tree req_type;
5774 
5775 		for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5776 		  i --;
5777 		replace_info->old_tree = parm;
5778 		req_type = TREE_TYPE (parm);
5779 		if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5780 		  {
5781 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5782 		      replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5783 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5784 		      replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5785 		    else
5786 		      {
5787 			if (dump_file)
5788 			  {
5789 			    fprintf (dump_file, "    const ");
5790 			    print_generic_expr (dump_file, replace_info->new_tree, 0);
5791 			    fprintf (dump_file, "  can't be converted to param ");
5792 			    print_generic_expr (dump_file, parm, 0);
5793 			    fprintf (dump_file, "\n");
5794 			  }
5795 			replace_info->old_tree = NULL;
5796 		      }
5797 		  }
5798 	      }
5799 	    else
5800 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5801 	    if (replace_info->old_tree)
5802 	      {
5803 		init = setup_one_parameter (&id, replace_info->old_tree,
5804 					    replace_info->new_tree, id.src_fn,
5805 					    NULL,
5806 					    &vars);
5807 		if (init)
5808 		  init_stmts.safe_push (init);
5809 	      }
5810 	  }
5811       }
5812   /* Copy the function's arguments.  */
5813   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5814     DECL_ARGUMENTS (new_decl) =
5815       copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5816       				     args_to_skip, &vars);
5817 
5818   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5819   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5820 
5821   declare_inline_vars (DECL_INITIAL (new_decl), vars);
5822 
5823   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5824     /* Add local vars.  */
5825     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5826 
5827   if (DECL_RESULT (old_decl) == NULL_TREE)
5828     ;
5829   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5830     {
5831       DECL_RESULT (new_decl)
5832 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5833 		      RESULT_DECL, NULL_TREE, void_type_node);
5834       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5835       cfun->returns_struct = 0;
5836       cfun->returns_pcc_struct = 0;
5837     }
5838   else
5839     {
5840       tree old_name;
5841       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5842       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5843       if (gimple_in_ssa_p (id.src_cfun)
5844 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5845 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5846 	{
5847 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5848 	  insert_decl_map (&id, old_name, new_name);
5849 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5850 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5851 	}
5852     }
5853 
5854   /* Set up the destination functions loop tree.  */
5855   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5856     {
5857       cfun->curr_properties &= ~PROP_loops;
5858       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5859       cfun->curr_properties |= PROP_loops;
5860     }
5861 
5862   /* Copy the Function's body.  */
5863   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5864 	     ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5865 	     new_entry);
5866 
5867   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5868   number_blocks (new_decl);
5869 
5870   /* We want to create the BB unconditionally, so that the addition of
5871      debug stmts doesn't affect BB count, which may in the end cause
5872      codegen differences.  */
5873   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5874   while (init_stmts.length ())
5875     insert_init_stmt (&id, bb, init_stmts.pop ());
5876   update_clone_info (&id);
5877 
5878   /* Remap the nonlocal_goto_save_area, if any.  */
5879   if (cfun->nonlocal_goto_save_area)
5880     {
5881       struct walk_stmt_info wi;
5882 
5883       memset (&wi, 0, sizeof (wi));
5884       wi.info = &id;
5885       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5886     }
5887 
5888   /* Clean up.  */
5889   delete id.decl_map;
5890   if (id.debug_map)
5891     delete id.debug_map;
5892   free_dominance_info (CDI_DOMINATORS);
5893   free_dominance_info (CDI_POST_DOMINATORS);
5894 
5895   fold_marked_statements (0, id.statements_to_fold);
5896   delete id.statements_to_fold;
5897   fold_cond_expr_cond ();
5898   delete_unreachable_blocks_update_callgraph (&id);
5899   if (id.dst_node->definition)
5900     cgraph_edge::rebuild_references ();
5901   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5902     {
5903       calculate_dominance_info (CDI_DOMINATORS);
5904       fix_loop_structure (NULL);
5905     }
5906   update_ssa (TODO_update_ssa);
5907 
5908   /* After partial cloning we need to rescale frequencies, so they are
5909      within proper range in the cloned function.  */
5910   if (new_entry)
5911     {
5912       struct cgraph_edge *e;
5913       rebuild_frequencies ();
5914 
5915       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5916       for (e = new_version_node->callees; e; e = e->next_callee)
5917 	{
5918 	  basic_block bb = gimple_bb (e->call_stmt);
5919 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5920 							 bb);
5921 	  e->count = bb->count;
5922 	}
5923       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5924 	{
5925 	  basic_block bb = gimple_bb (e->call_stmt);
5926 	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5927 							 bb);
5928 	  e->count = bb->count;
5929 	}
5930     }
5931 
5932   free_dominance_info (CDI_DOMINATORS);
5933   free_dominance_info (CDI_POST_DOMINATORS);
5934 
5935   gcc_assert (!id.debug_stmts.exists ());
5936   pop_cfun ();
5937   return;
5938 }
5939 
5940 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5941    the callee and return the inlined body on success.  */
5942 
5943 tree
5944 maybe_inline_call_in_expr (tree exp)
5945 {
5946   tree fn = get_callee_fndecl (exp);
5947 
5948   /* We can only try to inline "const" functions.  */
5949   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5950     {
5951       call_expr_arg_iterator iter;
5952       copy_body_data id;
5953       tree param, arg, t;
5954       hash_map<tree, tree> decl_map;
5955 
5956       /* Remap the parameters.  */
5957       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5958 	   param;
5959 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5960 	decl_map.put (param, arg);
5961 
5962       memset (&id, 0, sizeof (id));
5963       id.src_fn = fn;
5964       id.dst_fn = current_function_decl;
5965       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5966       id.decl_map = &decl_map;
5967 
5968       id.copy_decl = copy_decl_no_change;
5969       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5970       id.transform_new_cfg = false;
5971       id.transform_return_to_modify = true;
5972       id.transform_parameter = true;
5973       id.transform_lang_insert_block = NULL;
5974 
5975       /* Make sure not to unshare trees behind the front-end's back
5976 	 since front-end specific mechanisms may rely on sharing.  */
5977       id.regimplify = false;
5978       id.do_not_unshare = true;
5979 
5980       /* We're not inside any EH region.  */
5981       id.eh_lp_nr = 0;
5982 
5983       t = copy_tree_body (&id);
5984 
5985       /* We can only return something suitable for use in a GENERIC
5986 	 expression tree.  */
5987       if (TREE_CODE (t) == MODIFY_EXPR)
5988 	return TREE_OPERAND (t, 1);
5989     }
5990 
5991    return NULL_TREE;
5992 }
5993 
5994 /* Duplicate a type, fields and all.  */
5995 
5996 tree
5997 build_duplicate_type (tree type)
5998 {
5999   struct copy_body_data id;
6000 
6001   memset (&id, 0, sizeof (id));
6002   id.src_fn = current_function_decl;
6003   id.dst_fn = current_function_decl;
6004   id.src_cfun = cfun;
6005   id.decl_map = new hash_map<tree, tree>;
6006   id.debug_map = NULL;
6007   id.copy_decl = copy_decl_no_change;
6008 
6009   type = remap_type_1 (type, &id);
6010 
6011   delete id.decl_map;
6012   if (id.debug_map)
6013     delete id.debug_map;
6014 
6015   TYPE_CANONICAL (type) = type;
6016 
6017   return type;
6018 }
6019 
6020 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6021    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6022    evaluation.  */
6023 
6024 tree
6025 copy_fn (tree fn, tree& parms, tree& result)
6026 {
6027   copy_body_data id;
6028   tree param;
6029   hash_map<tree, tree> decl_map;
6030 
6031   tree *p = &parms;
6032   *p = NULL_TREE;
6033 
6034   memset (&id, 0, sizeof (id));
6035   id.src_fn = fn;
6036   id.dst_fn = current_function_decl;
6037   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6038   id.decl_map = &decl_map;
6039 
6040   id.copy_decl = copy_decl_no_change;
6041   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6042   id.transform_new_cfg = false;
6043   id.transform_return_to_modify = false;
6044   id.transform_parameter = true;
6045   id.transform_lang_insert_block = NULL;
6046 
6047   /* Make sure not to unshare trees behind the front-end's back
6048      since front-end specific mechanisms may rely on sharing.  */
6049   id.regimplify = false;
6050   id.do_not_unshare = true;
6051 
6052   /* We're not inside any EH region.  */
6053   id.eh_lp_nr = 0;
6054 
6055   /* Remap the parameters and result and return them to the caller.  */
6056   for (param = DECL_ARGUMENTS (fn);
6057        param;
6058        param = DECL_CHAIN (param))
6059     {
6060       *p = remap_decl (param, &id);
6061       p = &DECL_CHAIN (*p);
6062     }
6063 
6064   if (DECL_RESULT (fn))
6065     result = remap_decl (DECL_RESULT (fn), &id);
6066   else
6067     result = NULL_TREE;
6068 
6069   return copy_tree_body (&id);
6070 }
6071