xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-inline.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* Tree inlining.
2*38fd1498Szrj    Copyright (C) 2001-2018 Free Software Foundation, Inc.
3*38fd1498Szrj    Contributed by Alexandre Oliva <aoliva@redhat.com>
4*38fd1498Szrj 
5*38fd1498Szrj This file is part of GCC.
6*38fd1498Szrj 
7*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
8*38fd1498Szrj it under the terms of the GNU General Public License as published by
9*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
10*38fd1498Szrj any later version.
11*38fd1498Szrj 
12*38fd1498Szrj GCC is distributed in the hope that it will be useful,
13*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
14*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15*38fd1498Szrj GNU General Public License for more details.
16*38fd1498Szrj 
17*38fd1498Szrj You should have received a copy of the GNU General Public License
18*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
19*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
20*38fd1498Szrj 
21*38fd1498Szrj #include "config.h"
22*38fd1498Szrj #include "system.h"
23*38fd1498Szrj #include "coretypes.h"
24*38fd1498Szrj #include "backend.h"
25*38fd1498Szrj #include "target.h"
26*38fd1498Szrj #include "rtl.h"
27*38fd1498Szrj #include "tree.h"
28*38fd1498Szrj #include "gimple.h"
29*38fd1498Szrj #include "cfghooks.h"
30*38fd1498Szrj #include "tree-pass.h"
31*38fd1498Szrj #include "ssa.h"
32*38fd1498Szrj #include "cgraph.h"
33*38fd1498Szrj #include "tree-pretty-print.h"
34*38fd1498Szrj #include "diagnostic-core.h"
35*38fd1498Szrj #include "gimple-predict.h"
36*38fd1498Szrj #include "fold-const.h"
37*38fd1498Szrj #include "stor-layout.h"
38*38fd1498Szrj #include "calls.h"
39*38fd1498Szrj #include "tree-inline.h"
40*38fd1498Szrj #include "langhooks.h"
41*38fd1498Szrj #include "cfganal.h"
42*38fd1498Szrj #include "tree-iterator.h"
43*38fd1498Szrj #include "intl.h"
44*38fd1498Szrj #include "gimple-fold.h"
45*38fd1498Szrj #include "tree-eh.h"
46*38fd1498Szrj #include "gimplify.h"
47*38fd1498Szrj #include "gimple-iterator.h"
48*38fd1498Szrj #include "gimplify-me.h"
49*38fd1498Szrj #include "gimple-walk.h"
50*38fd1498Szrj #include "tree-cfg.h"
51*38fd1498Szrj #include "tree-into-ssa.h"
52*38fd1498Szrj #include "tree-dfa.h"
53*38fd1498Szrj #include "tree-ssa.h"
54*38fd1498Szrj #include "except.h"
55*38fd1498Szrj #include "debug.h"
56*38fd1498Szrj #include "params.h"
57*38fd1498Szrj #include "value-prof.h"
58*38fd1498Szrj #include "cfgloop.h"
59*38fd1498Szrj #include "builtins.h"
60*38fd1498Szrj #include "tree-chkp.h"
61*38fd1498Szrj #include "stringpool.h"
62*38fd1498Szrj #include "attribs.h"
63*38fd1498Szrj #include "sreal.h"
64*38fd1498Szrj 
65*38fd1498Szrj /* I'm not real happy about this, but we need to handle gimple and
66*38fd1498Szrj    non-gimple trees.  */
67*38fd1498Szrj 
68*38fd1498Szrj /* Inlining, Cloning, Versioning, Parallelization
69*38fd1498Szrj 
70*38fd1498Szrj    Inlining: a function body is duplicated, but the PARM_DECLs are
71*38fd1498Szrj    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72*38fd1498Szrj    MODIFY_EXPRs that store to a dedicated returned-value variable.
73*38fd1498Szrj    The duplicated eh_region info of the copy will later be appended
74*38fd1498Szrj    to the info for the caller; the eh_region info in copied throwing
75*38fd1498Szrj    statements and RESX statements are adjusted accordingly.
76*38fd1498Szrj 
77*38fd1498Szrj    Cloning: (only in C++) We have one body for a con/de/structor, and
78*38fd1498Szrj    multiple function decls, each with a unique parameter list.
79*38fd1498Szrj    Duplicate the body, using the given splay tree; some parameters
80*38fd1498Szrj    will become constants (like 0 or 1).
81*38fd1498Szrj 
82*38fd1498Szrj    Versioning: a function body is duplicated and the result is a new
83*38fd1498Szrj    function rather than into blocks of an existing function as with
84*38fd1498Szrj    inlining.  Some parameters will become constants.
85*38fd1498Szrj 
86*38fd1498Szrj    Parallelization: a region of a function is duplicated resulting in
87*38fd1498Szrj    a new function.  Variables may be replaced with complex expressions
88*38fd1498Szrj    to enable shared variable semantics.
89*38fd1498Szrj 
90*38fd1498Szrj    All of these will simultaneously lookup any callgraph edges.  If
91*38fd1498Szrj    we're going to inline the duplicated function body, and the given
92*38fd1498Szrj    function has some cloned callgraph nodes (one for each place this
93*38fd1498Szrj    function will be inlined) those callgraph edges will be duplicated.
94*38fd1498Szrj    If we're cloning the body, those callgraph edges will be
95*38fd1498Szrj    updated to point into the new body.  (Note that the original
96*38fd1498Szrj    callgraph node and edge list will not be altered.)
97*38fd1498Szrj 
98*38fd1498Szrj    See the CALL_EXPR handling case in copy_tree_body_r ().  */
99*38fd1498Szrj 
100*38fd1498Szrj /* To Do:
101*38fd1498Szrj 
102*38fd1498Szrj    o In order to make inlining-on-trees work, we pessimized
103*38fd1498Szrj      function-local static constants.  In particular, they are now
104*38fd1498Szrj      always output, even when not addressed.  Fix this by treating
105*38fd1498Szrj      function-local static constants just like global static
106*38fd1498Szrj      constants; the back-end already knows not to output them if they
107*38fd1498Szrj      are not needed.
108*38fd1498Szrj 
109*38fd1498Szrj    o Provide heuristics to clamp inlining of recursive template
110*38fd1498Szrj      calls?  */
111*38fd1498Szrj 
112*38fd1498Szrj 
113*38fd1498Szrj /* Weights that estimate_num_insns uses to estimate the size of the
114*38fd1498Szrj    produced code.  */
115*38fd1498Szrj 
116*38fd1498Szrj eni_weights eni_size_weights;
117*38fd1498Szrj 
118*38fd1498Szrj /* Weights that estimate_num_insns uses to estimate the time necessary
119*38fd1498Szrj    to execute the produced code.  */
120*38fd1498Szrj 
121*38fd1498Szrj eni_weights eni_time_weights;
122*38fd1498Szrj 
123*38fd1498Szrj /* Prototypes.  */
124*38fd1498Szrj 
125*38fd1498Szrj static tree declare_return_variable (copy_body_data *, tree, tree, tree,
126*38fd1498Szrj 				     basic_block);
127*38fd1498Szrj static void remap_block (tree *, copy_body_data *);
128*38fd1498Szrj static void copy_bind_expr (tree *, int *, copy_body_data *);
129*38fd1498Szrj static void declare_inline_vars (tree, tree);
130*38fd1498Szrj static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131*38fd1498Szrj static void prepend_lexical_block (tree current_block, tree new_block);
132*38fd1498Szrj static tree copy_decl_to_var (tree, copy_body_data *);
133*38fd1498Szrj static tree copy_result_decl_to_var (tree, copy_body_data *);
134*38fd1498Szrj static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135*38fd1498Szrj static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136*38fd1498Szrj static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
137*38fd1498Szrj static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138*38fd1498Szrj 
139*38fd1498Szrj /* Insert a tree->tree mapping for ID.  Despite the name suggests
140*38fd1498Szrj    that the trees should be variables, it is used for more than that.  */
141*38fd1498Szrj 
142*38fd1498Szrj void
143*38fd1498Szrj insert_decl_map (copy_body_data *id, tree key, tree value)
144*38fd1498Szrj {
145*38fd1498Szrj   id->decl_map->put (key, value);
146*38fd1498Szrj 
147*38fd1498Szrj   /* Always insert an identity map as well.  If we see this same new
148*38fd1498Szrj      node again, we won't want to duplicate it a second time.  */
149*38fd1498Szrj   if (key != value)
150*38fd1498Szrj     id->decl_map->put (value, value);
151*38fd1498Szrj }
152*38fd1498Szrj 
153*38fd1498Szrj /* Insert a tree->tree mapping for ID.  This is only used for
154*38fd1498Szrj    variables.  */
155*38fd1498Szrj 
156*38fd1498Szrj static void
157*38fd1498Szrj insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158*38fd1498Szrj {
159*38fd1498Szrj   if (!gimple_in_ssa_p (id->src_cfun))
160*38fd1498Szrj     return;
161*38fd1498Szrj 
162*38fd1498Szrj   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163*38fd1498Szrj     return;
164*38fd1498Szrj 
165*38fd1498Szrj   if (!target_for_debug_bind (key))
166*38fd1498Szrj     return;
167*38fd1498Szrj 
168*38fd1498Szrj   gcc_assert (TREE_CODE (key) == PARM_DECL);
169*38fd1498Szrj   gcc_assert (VAR_P (value));
170*38fd1498Szrj 
171*38fd1498Szrj   if (!id->debug_map)
172*38fd1498Szrj     id->debug_map = new hash_map<tree, tree>;
173*38fd1498Szrj 
174*38fd1498Szrj   id->debug_map->put (key, value);
175*38fd1498Szrj }
176*38fd1498Szrj 
177*38fd1498Szrj /* If nonzero, we're remapping the contents of inlined debug
178*38fd1498Szrj    statements.  If negative, an error has occurred, such as a
179*38fd1498Szrj    reference to a variable that isn't available in the inlined
180*38fd1498Szrj    context.  */
181*38fd1498Szrj static int processing_debug_stmt = 0;
182*38fd1498Szrj 
183*38fd1498Szrj /* Construct new SSA name for old NAME. ID is the inline context.  */
184*38fd1498Szrj 
185*38fd1498Szrj static tree
186*38fd1498Szrj remap_ssa_name (tree name, copy_body_data *id)
187*38fd1498Szrj {
188*38fd1498Szrj   tree new_tree, var;
189*38fd1498Szrj   tree *n;
190*38fd1498Szrj 
191*38fd1498Szrj   gcc_assert (TREE_CODE (name) == SSA_NAME);
192*38fd1498Szrj 
193*38fd1498Szrj   n = id->decl_map->get (name);
194*38fd1498Szrj   if (n)
195*38fd1498Szrj     return unshare_expr (*n);
196*38fd1498Szrj 
197*38fd1498Szrj   if (processing_debug_stmt)
198*38fd1498Szrj     {
199*38fd1498Szrj       if (SSA_NAME_IS_DEFAULT_DEF (name)
200*38fd1498Szrj 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201*38fd1498Szrj 	  && id->entry_bb == NULL
202*38fd1498Szrj 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203*38fd1498Szrj 	{
204*38fd1498Szrj 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
205*38fd1498Szrj 	  gimple *def_temp;
206*38fd1498Szrj 	  gimple_stmt_iterator gsi;
207*38fd1498Szrj 	  tree val = SSA_NAME_VAR (name);
208*38fd1498Szrj 
209*38fd1498Szrj 	  n = id->decl_map->get (val);
210*38fd1498Szrj 	  if (n != NULL)
211*38fd1498Szrj 	    val = *n;
212*38fd1498Szrj 	  if (TREE_CODE (val) != PARM_DECL)
213*38fd1498Szrj 	    {
214*38fd1498Szrj 	      processing_debug_stmt = -1;
215*38fd1498Szrj 	      return name;
216*38fd1498Szrj 	    }
217*38fd1498Szrj 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
218*38fd1498Szrj 	  DECL_ARTIFICIAL (vexpr) = 1;
219*38fd1498Szrj 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
220*38fd1498Szrj 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
221*38fd1498Szrj 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
222*38fd1498Szrj 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
223*38fd1498Szrj 	  return vexpr;
224*38fd1498Szrj 	}
225*38fd1498Szrj 
226*38fd1498Szrj       processing_debug_stmt = -1;
227*38fd1498Szrj       return name;
228*38fd1498Szrj     }
229*38fd1498Szrj 
230*38fd1498Szrj   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
231*38fd1498Szrj   var = SSA_NAME_VAR (name);
232*38fd1498Szrj   if (!var
233*38fd1498Szrj       || (!SSA_NAME_IS_DEFAULT_DEF (name)
234*38fd1498Szrj 	  && VAR_P (var)
235*38fd1498Szrj 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
236*38fd1498Szrj 	  && DECL_ARTIFICIAL (var)
237*38fd1498Szrj 	  && DECL_IGNORED_P (var)
238*38fd1498Szrj 	  && !DECL_NAME (var)))
239*38fd1498Szrj     {
240*38fd1498Szrj       struct ptr_info_def *pi;
241*38fd1498Szrj       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
242*38fd1498Szrj       if (!var && SSA_NAME_IDENTIFIER (name))
243*38fd1498Szrj 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
244*38fd1498Szrj       insert_decl_map (id, name, new_tree);
245*38fd1498Szrj       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
246*38fd1498Szrj 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
247*38fd1498Szrj       /* At least IPA points-to info can be directly transferred.  */
248*38fd1498Szrj       if (id->src_cfun->gimple_df
249*38fd1498Szrj 	  && id->src_cfun->gimple_df->ipa_pta
250*38fd1498Szrj 	  && POINTER_TYPE_P (TREE_TYPE (name))
251*38fd1498Szrj 	  && (pi = SSA_NAME_PTR_INFO (name))
252*38fd1498Szrj 	  && !pi->pt.anything)
253*38fd1498Szrj 	{
254*38fd1498Szrj 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
255*38fd1498Szrj 	  new_pi->pt = pi->pt;
256*38fd1498Szrj 	}
257*38fd1498Szrj       return new_tree;
258*38fd1498Szrj     }
259*38fd1498Szrj 
260*38fd1498Szrj   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
261*38fd1498Szrj      in copy_bb.  */
262*38fd1498Szrj   new_tree = remap_decl (var, id);
263*38fd1498Szrj 
264*38fd1498Szrj   /* We might've substituted constant or another SSA_NAME for
265*38fd1498Szrj      the variable.
266*38fd1498Szrj 
267*38fd1498Szrj      Replace the SSA name representing RESULT_DECL by variable during
268*38fd1498Szrj      inlining:  this saves us from need to introduce PHI node in a case
269*38fd1498Szrj      return value is just partly initialized.  */
270*38fd1498Szrj   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
271*38fd1498Szrj       && (!SSA_NAME_VAR (name)
272*38fd1498Szrj 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
273*38fd1498Szrj 	  || !id->transform_return_to_modify))
274*38fd1498Szrj     {
275*38fd1498Szrj       struct ptr_info_def *pi;
276*38fd1498Szrj       new_tree = make_ssa_name (new_tree);
277*38fd1498Szrj       insert_decl_map (id, name, new_tree);
278*38fd1498Szrj       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
279*38fd1498Szrj 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
280*38fd1498Szrj       /* At least IPA points-to info can be directly transferred.  */
281*38fd1498Szrj       if (id->src_cfun->gimple_df
282*38fd1498Szrj 	  && id->src_cfun->gimple_df->ipa_pta
283*38fd1498Szrj 	  && POINTER_TYPE_P (TREE_TYPE (name))
284*38fd1498Szrj 	  && (pi = SSA_NAME_PTR_INFO (name))
285*38fd1498Szrj 	  && !pi->pt.anything)
286*38fd1498Szrj 	{
287*38fd1498Szrj 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
288*38fd1498Szrj 	  new_pi->pt = pi->pt;
289*38fd1498Szrj 	}
290*38fd1498Szrj       if (SSA_NAME_IS_DEFAULT_DEF (name))
291*38fd1498Szrj 	{
292*38fd1498Szrj 	  /* By inlining function having uninitialized variable, we might
293*38fd1498Szrj 	     extend the lifetime (variable might get reused).  This cause
294*38fd1498Szrj 	     ICE in the case we end up extending lifetime of SSA name across
295*38fd1498Szrj 	     abnormal edge, but also increase register pressure.
296*38fd1498Szrj 
297*38fd1498Szrj 	     We simply initialize all uninitialized vars by 0 except
298*38fd1498Szrj 	     for case we are inlining to very first BB.  We can avoid
299*38fd1498Szrj 	     this for all BBs that are not inside strongly connected
300*38fd1498Szrj 	     regions of the CFG, but this is expensive to test.  */
301*38fd1498Szrj 	  if (id->entry_bb
302*38fd1498Szrj 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
303*38fd1498Szrj 	      && (!SSA_NAME_VAR (name)
304*38fd1498Szrj 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
305*38fd1498Szrj 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
306*38fd1498Szrj 					     0)->dest
307*38fd1498Szrj 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
308*38fd1498Szrj 	    {
309*38fd1498Szrj 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
310*38fd1498Szrj 	      gimple *init_stmt;
311*38fd1498Szrj 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
312*38fd1498Szrj 
313*38fd1498Szrj 	      init_stmt = gimple_build_assign (new_tree, zero);
314*38fd1498Szrj 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
315*38fd1498Szrj 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316*38fd1498Szrj 	    }
317*38fd1498Szrj 	  else
318*38fd1498Szrj 	    {
319*38fd1498Szrj 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
320*38fd1498Szrj 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
321*38fd1498Szrj 	    }
322*38fd1498Szrj 	}
323*38fd1498Szrj     }
324*38fd1498Szrj   else
325*38fd1498Szrj     insert_decl_map (id, name, new_tree);
326*38fd1498Szrj   return new_tree;
327*38fd1498Szrj }
328*38fd1498Szrj 
329*38fd1498Szrj /* Remap DECL during the copying of the BLOCK tree for the function.  */
330*38fd1498Szrj 
331*38fd1498Szrj tree
332*38fd1498Szrj remap_decl (tree decl, copy_body_data *id)
333*38fd1498Szrj {
334*38fd1498Szrj   tree *n;
335*38fd1498Szrj 
336*38fd1498Szrj   /* We only remap local variables in the current function.  */
337*38fd1498Szrj 
338*38fd1498Szrj   /* See if we have remapped this declaration.  */
339*38fd1498Szrj 
340*38fd1498Szrj   n = id->decl_map->get (decl);
341*38fd1498Szrj 
342*38fd1498Szrj   if (!n && processing_debug_stmt)
343*38fd1498Szrj     {
344*38fd1498Szrj       processing_debug_stmt = -1;
345*38fd1498Szrj       return decl;
346*38fd1498Szrj     }
347*38fd1498Szrj 
348*38fd1498Szrj   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
349*38fd1498Szrj      necessary DECLs have already been remapped and we do not want to duplicate
350*38fd1498Szrj      a decl coming from outside of the sequence we are copying.  */
351*38fd1498Szrj   if (!n
352*38fd1498Szrj       && id->prevent_decl_creation_for_types
353*38fd1498Szrj       && id->remapping_type_depth > 0
354*38fd1498Szrj       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
355*38fd1498Szrj     return decl;
356*38fd1498Szrj 
357*38fd1498Szrj   /* If we didn't already have an equivalent for this declaration, create one
358*38fd1498Szrj      now.  */
359*38fd1498Szrj   if (!n)
360*38fd1498Szrj     {
361*38fd1498Szrj       /* Make a copy of the variable or label.  */
362*38fd1498Szrj       tree t = id->copy_decl (decl, id);
363*38fd1498Szrj 
364*38fd1498Szrj       /* Remember it, so that if we encounter this local entity again
365*38fd1498Szrj 	 we can reuse this copy.  Do this early because remap_type may
366*38fd1498Szrj 	 need this decl for TYPE_STUB_DECL.  */
367*38fd1498Szrj       insert_decl_map (id, decl, t);
368*38fd1498Szrj 
369*38fd1498Szrj       if (!DECL_P (t))
370*38fd1498Szrj 	return t;
371*38fd1498Szrj 
372*38fd1498Szrj       /* Remap types, if necessary.  */
373*38fd1498Szrj       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
374*38fd1498Szrj       if (TREE_CODE (t) == TYPE_DECL)
375*38fd1498Szrj 	{
376*38fd1498Szrj 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377*38fd1498Szrj 
378*38fd1498Szrj 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
379*38fd1498Szrj 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
380*38fd1498Szrj 	     is not set on the TYPE_DECL, for example in LTO mode.  */
381*38fd1498Szrj 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
382*38fd1498Szrj 	    {
383*38fd1498Szrj 	      tree x = build_variant_type_copy (TREE_TYPE (t));
384*38fd1498Szrj 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
385*38fd1498Szrj 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
386*38fd1498Szrj 	      DECL_ORIGINAL_TYPE (t) = x;
387*38fd1498Szrj 	    }
388*38fd1498Szrj 	}
389*38fd1498Szrj 
390*38fd1498Szrj       /* Remap sizes as necessary.  */
391*38fd1498Szrj       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
392*38fd1498Szrj       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
393*38fd1498Szrj 
394*38fd1498Szrj       /* If fields, do likewise for offset and qualifier.  */
395*38fd1498Szrj       if (TREE_CODE (t) == FIELD_DECL)
396*38fd1498Szrj 	{
397*38fd1498Szrj 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
398*38fd1498Szrj 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
399*38fd1498Szrj 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
400*38fd1498Szrj 	}
401*38fd1498Szrj 
402*38fd1498Szrj       return t;
403*38fd1498Szrj     }
404*38fd1498Szrj 
405*38fd1498Szrj   if (id->do_not_unshare)
406*38fd1498Szrj     return *n;
407*38fd1498Szrj   else
408*38fd1498Szrj     return unshare_expr (*n);
409*38fd1498Szrj }
410*38fd1498Szrj 
411*38fd1498Szrj static tree
412*38fd1498Szrj remap_type_1 (tree type, copy_body_data *id)
413*38fd1498Szrj {
414*38fd1498Szrj   tree new_tree, t;
415*38fd1498Szrj 
416*38fd1498Szrj   /* We do need a copy.  build and register it now.  If this is a pointer or
417*38fd1498Szrj      reference type, remap the designated type and make a new pointer or
418*38fd1498Szrj      reference type.  */
419*38fd1498Szrj   if (TREE_CODE (type) == POINTER_TYPE)
420*38fd1498Szrj     {
421*38fd1498Szrj       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
422*38fd1498Szrj 					 TYPE_MODE (type),
423*38fd1498Szrj 					 TYPE_REF_CAN_ALIAS_ALL (type));
424*38fd1498Szrj       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425*38fd1498Szrj 	new_tree = build_type_attribute_qual_variant (new_tree,
426*38fd1498Szrj 						      TYPE_ATTRIBUTES (type),
427*38fd1498Szrj 						      TYPE_QUALS (type));
428*38fd1498Szrj       insert_decl_map (id, type, new_tree);
429*38fd1498Szrj       return new_tree;
430*38fd1498Szrj     }
431*38fd1498Szrj   else if (TREE_CODE (type) == REFERENCE_TYPE)
432*38fd1498Szrj     {
433*38fd1498Szrj       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
434*38fd1498Szrj 					    TYPE_MODE (type),
435*38fd1498Szrj 					    TYPE_REF_CAN_ALIAS_ALL (type));
436*38fd1498Szrj       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
437*38fd1498Szrj 	new_tree = build_type_attribute_qual_variant (new_tree,
438*38fd1498Szrj 						      TYPE_ATTRIBUTES (type),
439*38fd1498Szrj 						      TYPE_QUALS (type));
440*38fd1498Szrj       insert_decl_map (id, type, new_tree);
441*38fd1498Szrj       return new_tree;
442*38fd1498Szrj     }
443*38fd1498Szrj   else
444*38fd1498Szrj     new_tree = copy_node (type);
445*38fd1498Szrj 
446*38fd1498Szrj   insert_decl_map (id, type, new_tree);
447*38fd1498Szrj 
448*38fd1498Szrj   /* This is a new type, not a copy of an old type.  Need to reassociate
449*38fd1498Szrj      variants.  We can handle everything except the main variant lazily.  */
450*38fd1498Szrj   t = TYPE_MAIN_VARIANT (type);
451*38fd1498Szrj   if (type != t)
452*38fd1498Szrj     {
453*38fd1498Szrj       t = remap_type (t, id);
454*38fd1498Szrj       TYPE_MAIN_VARIANT (new_tree) = t;
455*38fd1498Szrj       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
456*38fd1498Szrj       TYPE_NEXT_VARIANT (t) = new_tree;
457*38fd1498Szrj     }
458*38fd1498Szrj   else
459*38fd1498Szrj     {
460*38fd1498Szrj       TYPE_MAIN_VARIANT (new_tree) = new_tree;
461*38fd1498Szrj       TYPE_NEXT_VARIANT (new_tree) = NULL;
462*38fd1498Szrj     }
463*38fd1498Szrj 
464*38fd1498Szrj   if (TYPE_STUB_DECL (type))
465*38fd1498Szrj     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
466*38fd1498Szrj 
467*38fd1498Szrj   /* Lazily create pointer and reference types.  */
468*38fd1498Szrj   TYPE_POINTER_TO (new_tree) = NULL;
469*38fd1498Szrj   TYPE_REFERENCE_TO (new_tree) = NULL;
470*38fd1498Szrj 
471*38fd1498Szrj   /* Copy all types that may contain references to local variables; be sure to
472*38fd1498Szrj      preserve sharing in between type and its main variant when possible.  */
473*38fd1498Szrj   switch (TREE_CODE (new_tree))
474*38fd1498Szrj     {
475*38fd1498Szrj     case INTEGER_TYPE:
476*38fd1498Szrj     case REAL_TYPE:
477*38fd1498Szrj     case FIXED_POINT_TYPE:
478*38fd1498Szrj     case ENUMERAL_TYPE:
479*38fd1498Szrj     case BOOLEAN_TYPE:
480*38fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
481*38fd1498Szrj 	{
482*38fd1498Szrj 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
483*38fd1498Szrj 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
484*38fd1498Szrj 
485*38fd1498Szrj 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
486*38fd1498Szrj 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
487*38fd1498Szrj 	}
488*38fd1498Szrj       else
489*38fd1498Szrj 	{
490*38fd1498Szrj 	  t = TYPE_MIN_VALUE (new_tree);
491*38fd1498Szrj 	  if (t && TREE_CODE (t) != INTEGER_CST)
492*38fd1498Szrj 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
493*38fd1498Szrj 
494*38fd1498Szrj 	  t = TYPE_MAX_VALUE (new_tree);
495*38fd1498Szrj 	  if (t && TREE_CODE (t) != INTEGER_CST)
496*38fd1498Szrj 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
497*38fd1498Szrj 	}
498*38fd1498Szrj       return new_tree;
499*38fd1498Szrj 
500*38fd1498Szrj     case FUNCTION_TYPE:
501*38fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
502*38fd1498Szrj 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
503*38fd1498Szrj 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
504*38fd1498Szrj       else
505*38fd1498Szrj         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
506*38fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507*38fd1498Szrj 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
508*38fd1498Szrj 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
509*38fd1498Szrj       else
510*38fd1498Szrj         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
511*38fd1498Szrj       return new_tree;
512*38fd1498Szrj 
513*38fd1498Szrj     case ARRAY_TYPE:
514*38fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
515*38fd1498Szrj 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
516*38fd1498Szrj 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
517*38fd1498Szrj       else
518*38fd1498Szrj 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
519*38fd1498Szrj 
520*38fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
521*38fd1498Szrj 	{
522*38fd1498Szrj 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
523*38fd1498Szrj 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
524*38fd1498Szrj 	}
525*38fd1498Szrj       else
526*38fd1498Szrj 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
527*38fd1498Szrj       break;
528*38fd1498Szrj 
529*38fd1498Szrj     case RECORD_TYPE:
530*38fd1498Szrj     case UNION_TYPE:
531*38fd1498Szrj     case QUAL_UNION_TYPE:
532*38fd1498Szrj       if (TYPE_MAIN_VARIANT (type) != type
533*38fd1498Szrj 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
534*38fd1498Szrj 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
535*38fd1498Szrj       else
536*38fd1498Szrj 	{
537*38fd1498Szrj 	  tree f, nf = NULL;
538*38fd1498Szrj 
539*38fd1498Szrj 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
540*38fd1498Szrj 	    {
541*38fd1498Szrj 	      t = remap_decl (f, id);
542*38fd1498Szrj 	      DECL_CONTEXT (t) = new_tree;
543*38fd1498Szrj 	      DECL_CHAIN (t) = nf;
544*38fd1498Szrj 	      nf = t;
545*38fd1498Szrj 	    }
546*38fd1498Szrj 	  TYPE_FIELDS (new_tree) = nreverse (nf);
547*38fd1498Szrj 	}
548*38fd1498Szrj       break;
549*38fd1498Szrj 
550*38fd1498Szrj     case OFFSET_TYPE:
551*38fd1498Szrj     default:
552*38fd1498Szrj       /* Shouldn't have been thought variable sized.  */
553*38fd1498Szrj       gcc_unreachable ();
554*38fd1498Szrj     }
555*38fd1498Szrj 
556*38fd1498Szrj   /* All variants of type share the same size, so use the already remaped data.  */
557*38fd1498Szrj   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
558*38fd1498Szrj     {
559*38fd1498Szrj       tree s = TYPE_SIZE (type);
560*38fd1498Szrj       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
561*38fd1498Szrj       tree su = TYPE_SIZE_UNIT (type);
562*38fd1498Szrj       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
563*38fd1498Szrj       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
564*38fd1498Szrj 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
565*38fd1498Szrj 			   || s == mvs);
566*38fd1498Szrj       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
567*38fd1498Szrj 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
568*38fd1498Szrj 			   || su == mvsu);
569*38fd1498Szrj       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
570*38fd1498Szrj       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
571*38fd1498Szrj     }
572*38fd1498Szrj   else
573*38fd1498Szrj     {
574*38fd1498Szrj       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
575*38fd1498Szrj       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
576*38fd1498Szrj     }
577*38fd1498Szrj 
578*38fd1498Szrj   return new_tree;
579*38fd1498Szrj }
580*38fd1498Szrj 
581*38fd1498Szrj tree
582*38fd1498Szrj remap_type (tree type, copy_body_data *id)
583*38fd1498Szrj {
584*38fd1498Szrj   tree *node;
585*38fd1498Szrj   tree tmp;
586*38fd1498Szrj 
587*38fd1498Szrj   if (type == NULL)
588*38fd1498Szrj     return type;
589*38fd1498Szrj 
590*38fd1498Szrj   /* See if we have remapped this type.  */
591*38fd1498Szrj   node = id->decl_map->get (type);
592*38fd1498Szrj   if (node)
593*38fd1498Szrj     return *node;
594*38fd1498Szrj 
595*38fd1498Szrj   /* The type only needs remapping if it's variably modified.  */
596*38fd1498Szrj   if (! variably_modified_type_p (type, id->src_fn))
597*38fd1498Szrj     {
598*38fd1498Szrj       insert_decl_map (id, type, type);
599*38fd1498Szrj       return type;
600*38fd1498Szrj     }
601*38fd1498Szrj 
602*38fd1498Szrj   id->remapping_type_depth++;
603*38fd1498Szrj   tmp = remap_type_1 (type, id);
604*38fd1498Szrj   id->remapping_type_depth--;
605*38fd1498Szrj 
606*38fd1498Szrj   return tmp;
607*38fd1498Szrj }
608*38fd1498Szrj 
609*38fd1498Szrj /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
610*38fd1498Szrj 
611*38fd1498Szrj static bool
612*38fd1498Szrj can_be_nonlocal (tree decl, copy_body_data *id)
613*38fd1498Szrj {
614*38fd1498Szrj   /* We can not duplicate function decls.  */
615*38fd1498Szrj   if (TREE_CODE (decl) == FUNCTION_DECL)
616*38fd1498Szrj     return true;
617*38fd1498Szrj 
618*38fd1498Szrj   /* Local static vars must be non-local or we get multiple declaration
619*38fd1498Szrj      problems.  */
620*38fd1498Szrj   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
621*38fd1498Szrj     return true;
622*38fd1498Szrj 
623*38fd1498Szrj   return false;
624*38fd1498Szrj }
625*38fd1498Szrj 
626*38fd1498Szrj static tree
627*38fd1498Szrj remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
628*38fd1498Szrj 	     copy_body_data *id)
629*38fd1498Szrj {
630*38fd1498Szrj   tree old_var;
631*38fd1498Szrj   tree new_decls = NULL_TREE;
632*38fd1498Szrj 
633*38fd1498Szrj   /* Remap its variables.  */
634*38fd1498Szrj   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
635*38fd1498Szrj     {
636*38fd1498Szrj       tree new_var;
637*38fd1498Szrj 
638*38fd1498Szrj       if (can_be_nonlocal (old_var, id))
639*38fd1498Szrj 	{
640*38fd1498Szrj 	  /* We need to add this variable to the local decls as otherwise
641*38fd1498Szrj 	     nothing else will do so.  */
642*38fd1498Szrj 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
643*38fd1498Szrj 	    add_local_decl (cfun, old_var);
644*38fd1498Szrj 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
645*38fd1498Szrj 	      && !DECL_IGNORED_P (old_var)
646*38fd1498Szrj 	      && nonlocalized_list)
647*38fd1498Szrj 	    vec_safe_push (*nonlocalized_list, old_var);
648*38fd1498Szrj 	  continue;
649*38fd1498Szrj 	}
650*38fd1498Szrj 
651*38fd1498Szrj       /* Remap the variable.  */
652*38fd1498Szrj       new_var = remap_decl (old_var, id);
653*38fd1498Szrj 
654*38fd1498Szrj       /* If we didn't remap this variable, we can't mess with its
655*38fd1498Szrj 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
656*38fd1498Szrj 	 already declared somewhere else, so don't declare it here.  */
657*38fd1498Szrj 
658*38fd1498Szrj       if (new_var == id->retvar)
659*38fd1498Szrj 	;
660*38fd1498Szrj       else if (!new_var)
661*38fd1498Szrj         {
662*38fd1498Szrj 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
663*38fd1498Szrj 	      && !DECL_IGNORED_P (old_var)
664*38fd1498Szrj 	      && nonlocalized_list)
665*38fd1498Szrj 	    vec_safe_push (*nonlocalized_list, old_var);
666*38fd1498Szrj 	}
667*38fd1498Szrj       else
668*38fd1498Szrj 	{
669*38fd1498Szrj 	  gcc_assert (DECL_P (new_var));
670*38fd1498Szrj 	  DECL_CHAIN (new_var) = new_decls;
671*38fd1498Szrj 	  new_decls = new_var;
672*38fd1498Szrj 
673*38fd1498Szrj 	  /* Also copy value-expressions.  */
674*38fd1498Szrj 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
675*38fd1498Szrj 	    {
676*38fd1498Szrj 	      tree tem = DECL_VALUE_EXPR (new_var);
677*38fd1498Szrj 	      bool old_regimplify = id->regimplify;
678*38fd1498Szrj 	      id->remapping_type_depth++;
679*38fd1498Szrj 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
680*38fd1498Szrj 	      id->remapping_type_depth--;
681*38fd1498Szrj 	      id->regimplify = old_regimplify;
682*38fd1498Szrj 	      SET_DECL_VALUE_EXPR (new_var, tem);
683*38fd1498Szrj 	    }
684*38fd1498Szrj 	}
685*38fd1498Szrj     }
686*38fd1498Szrj 
687*38fd1498Szrj   return nreverse (new_decls);
688*38fd1498Szrj }
689*38fd1498Szrj 
690*38fd1498Szrj /* Copy the BLOCK to contain remapped versions of the variables
691*38fd1498Szrj    therein.  And hook the new block into the block-tree.  */
692*38fd1498Szrj 
693*38fd1498Szrj static void
694*38fd1498Szrj remap_block (tree *block, copy_body_data *id)
695*38fd1498Szrj {
696*38fd1498Szrj   tree old_block;
697*38fd1498Szrj   tree new_block;
698*38fd1498Szrj 
699*38fd1498Szrj   /* Make the new block.  */
700*38fd1498Szrj   old_block = *block;
701*38fd1498Szrj   new_block = make_node (BLOCK);
702*38fd1498Szrj   TREE_USED (new_block) = TREE_USED (old_block);
703*38fd1498Szrj   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
704*38fd1498Szrj   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
705*38fd1498Szrj   BLOCK_NONLOCALIZED_VARS (new_block)
706*38fd1498Szrj     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
707*38fd1498Szrj   *block = new_block;
708*38fd1498Szrj 
709*38fd1498Szrj   /* Remap its variables.  */
710*38fd1498Szrj   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
711*38fd1498Szrj   					&BLOCK_NONLOCALIZED_VARS (new_block),
712*38fd1498Szrj 					id);
713*38fd1498Szrj 
714*38fd1498Szrj   if (id->transform_lang_insert_block)
715*38fd1498Szrj     id->transform_lang_insert_block (new_block);
716*38fd1498Szrj 
717*38fd1498Szrj   /* Remember the remapped block.  */
718*38fd1498Szrj   insert_decl_map (id, old_block, new_block);
719*38fd1498Szrj }
720*38fd1498Szrj 
721*38fd1498Szrj /* Copy the whole block tree and root it in id->block.  */
722*38fd1498Szrj static tree
723*38fd1498Szrj remap_blocks (tree block, copy_body_data *id)
724*38fd1498Szrj {
725*38fd1498Szrj   tree t;
726*38fd1498Szrj   tree new_tree = block;
727*38fd1498Szrj 
728*38fd1498Szrj   if (!block)
729*38fd1498Szrj     return NULL;
730*38fd1498Szrj 
731*38fd1498Szrj   remap_block (&new_tree, id);
732*38fd1498Szrj   gcc_assert (new_tree != block);
733*38fd1498Szrj   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734*38fd1498Szrj     prepend_lexical_block (new_tree, remap_blocks (t, id));
735*38fd1498Szrj   /* Blocks are in arbitrary order, but make things slightly prettier and do
736*38fd1498Szrj      not swap order when producing a copy.  */
737*38fd1498Szrj   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
738*38fd1498Szrj   return new_tree;
739*38fd1498Szrj }
740*38fd1498Szrj 
741*38fd1498Szrj /* Remap the block tree rooted at BLOCK to nothing.  */
742*38fd1498Szrj static void
743*38fd1498Szrj remap_blocks_to_null (tree block, copy_body_data *id)
744*38fd1498Szrj {
745*38fd1498Szrj   tree t;
746*38fd1498Szrj   insert_decl_map (id, block, NULL_TREE);
747*38fd1498Szrj   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
748*38fd1498Szrj     remap_blocks_to_null (t, id);
749*38fd1498Szrj }
750*38fd1498Szrj 
751*38fd1498Szrj static void
752*38fd1498Szrj copy_statement_list (tree *tp)
753*38fd1498Szrj {
754*38fd1498Szrj   tree_stmt_iterator oi, ni;
755*38fd1498Szrj   tree new_tree;
756*38fd1498Szrj 
757*38fd1498Szrj   new_tree = alloc_stmt_list ();
758*38fd1498Szrj   ni = tsi_start (new_tree);
759*38fd1498Szrj   oi = tsi_start (*tp);
760*38fd1498Szrj   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
761*38fd1498Szrj   *tp = new_tree;
762*38fd1498Szrj 
763*38fd1498Szrj   for (; !tsi_end_p (oi); tsi_next (&oi))
764*38fd1498Szrj     {
765*38fd1498Szrj       tree stmt = tsi_stmt (oi);
766*38fd1498Szrj       if (TREE_CODE (stmt) == STATEMENT_LIST)
767*38fd1498Szrj 	/* This copy is not redundant; tsi_link_after will smash this
768*38fd1498Szrj 	   STATEMENT_LIST into the end of the one we're building, and we
769*38fd1498Szrj 	   don't want to do that with the original.  */
770*38fd1498Szrj 	copy_statement_list (&stmt);
771*38fd1498Szrj       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
772*38fd1498Szrj     }
773*38fd1498Szrj }
774*38fd1498Szrj 
775*38fd1498Szrj static void
776*38fd1498Szrj copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
777*38fd1498Szrj {
778*38fd1498Szrj   tree block = BIND_EXPR_BLOCK (*tp);
779*38fd1498Szrj   /* Copy (and replace) the statement.  */
780*38fd1498Szrj   copy_tree_r (tp, walk_subtrees, NULL);
781*38fd1498Szrj   if (block)
782*38fd1498Szrj     {
783*38fd1498Szrj       remap_block (&block, id);
784*38fd1498Szrj       BIND_EXPR_BLOCK (*tp) = block;
785*38fd1498Szrj     }
786*38fd1498Szrj 
787*38fd1498Szrj   if (BIND_EXPR_VARS (*tp))
788*38fd1498Szrj     /* This will remap a lot of the same decls again, but this should be
789*38fd1498Szrj        harmless.  */
790*38fd1498Szrj     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
791*38fd1498Szrj }
792*38fd1498Szrj 
793*38fd1498Szrj 
794*38fd1498Szrj /* Create a new gimple_seq by remapping all the statements in BODY
795*38fd1498Szrj    using the inlining information in ID.  */
796*38fd1498Szrj 
797*38fd1498Szrj static gimple_seq
798*38fd1498Szrj remap_gimple_seq (gimple_seq body, copy_body_data *id)
799*38fd1498Szrj {
800*38fd1498Szrj   gimple_stmt_iterator si;
801*38fd1498Szrj   gimple_seq new_body = NULL;
802*38fd1498Szrj 
803*38fd1498Szrj   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
804*38fd1498Szrj     {
805*38fd1498Szrj       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
806*38fd1498Szrj       gimple_seq_add_seq (&new_body, new_stmts);
807*38fd1498Szrj     }
808*38fd1498Szrj 
809*38fd1498Szrj   return new_body;
810*38fd1498Szrj }
811*38fd1498Szrj 
812*38fd1498Szrj 
813*38fd1498Szrj /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
814*38fd1498Szrj    block using the mapping information in ID.  */
815*38fd1498Szrj 
816*38fd1498Szrj static gimple *
817*38fd1498Szrj copy_gimple_bind (gbind *stmt, copy_body_data *id)
818*38fd1498Szrj {
819*38fd1498Szrj   gimple *new_bind;
820*38fd1498Szrj   tree new_block, new_vars;
821*38fd1498Szrj   gimple_seq body, new_body;
822*38fd1498Szrj 
823*38fd1498Szrj   /* Copy the statement.  Note that we purposely don't use copy_stmt
824*38fd1498Szrj      here because we need to remap statements as we copy.  */
825*38fd1498Szrj   body = gimple_bind_body (stmt);
826*38fd1498Szrj   new_body = remap_gimple_seq (body, id);
827*38fd1498Szrj 
828*38fd1498Szrj   new_block = gimple_bind_block (stmt);
829*38fd1498Szrj   if (new_block)
830*38fd1498Szrj     remap_block (&new_block, id);
831*38fd1498Szrj 
832*38fd1498Szrj   /* This will remap a lot of the same decls again, but this should be
833*38fd1498Szrj      harmless.  */
834*38fd1498Szrj   new_vars = gimple_bind_vars (stmt);
835*38fd1498Szrj   if (new_vars)
836*38fd1498Szrj     new_vars = remap_decls (new_vars, NULL, id);
837*38fd1498Szrj 
838*38fd1498Szrj   new_bind = gimple_build_bind (new_vars, new_body, new_block);
839*38fd1498Szrj 
840*38fd1498Szrj   return new_bind;
841*38fd1498Szrj }
842*38fd1498Szrj 
843*38fd1498Szrj /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
844*38fd1498Szrj 
845*38fd1498Szrj static bool
846*38fd1498Szrj is_parm (tree decl)
847*38fd1498Szrj {
848*38fd1498Szrj   if (TREE_CODE (decl) == SSA_NAME)
849*38fd1498Szrj     {
850*38fd1498Szrj       decl = SSA_NAME_VAR (decl);
851*38fd1498Szrj       if (!decl)
852*38fd1498Szrj 	return false;
853*38fd1498Szrj     }
854*38fd1498Szrj 
855*38fd1498Szrj   return (TREE_CODE (decl) == PARM_DECL);
856*38fd1498Szrj }
857*38fd1498Szrj 
858*38fd1498Szrj /* Remap the dependence CLIQUE from the source to the destination function
859*38fd1498Szrj    as specified in ID.  */
860*38fd1498Szrj 
861*38fd1498Szrj static unsigned short
862*38fd1498Szrj remap_dependence_clique (copy_body_data *id, unsigned short clique)
863*38fd1498Szrj {
864*38fd1498Szrj   if (clique == 0 || processing_debug_stmt)
865*38fd1498Szrj     return 0;
866*38fd1498Szrj   if (!id->dependence_map)
867*38fd1498Szrj     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
868*38fd1498Szrj   bool existed;
869*38fd1498Szrj   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
870*38fd1498Szrj   if (!existed)
871*38fd1498Szrj     newc = ++cfun->last_clique;
872*38fd1498Szrj   return newc;
873*38fd1498Szrj }
874*38fd1498Szrj 
875*38fd1498Szrj /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
876*38fd1498Szrj    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
877*38fd1498Szrj    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
878*38fd1498Szrj    recursing into the children nodes of *TP.  */
879*38fd1498Szrj 
880*38fd1498Szrj static tree
881*38fd1498Szrj remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
882*38fd1498Szrj {
883*38fd1498Szrj   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
884*38fd1498Szrj   copy_body_data *id = (copy_body_data *) wi_p->info;
885*38fd1498Szrj   tree fn = id->src_fn;
886*38fd1498Szrj 
887*38fd1498Szrj   /* For recursive invocations this is no longer the LHS itself.  */
888*38fd1498Szrj   bool is_lhs = wi_p->is_lhs;
889*38fd1498Szrj   wi_p->is_lhs = false;
890*38fd1498Szrj 
891*38fd1498Szrj   if (TREE_CODE (*tp) == SSA_NAME)
892*38fd1498Szrj     {
893*38fd1498Szrj       *tp = remap_ssa_name (*tp, id);
894*38fd1498Szrj       *walk_subtrees = 0;
895*38fd1498Szrj       if (is_lhs)
896*38fd1498Szrj 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
897*38fd1498Szrj       return NULL;
898*38fd1498Szrj     }
899*38fd1498Szrj   else if (auto_var_in_fn_p (*tp, fn))
900*38fd1498Szrj     {
901*38fd1498Szrj       /* Local variables and labels need to be replaced by equivalent
902*38fd1498Szrj 	 variables.  We don't want to copy static variables; there's
903*38fd1498Szrj 	 only one of those, no matter how many times we inline the
904*38fd1498Szrj 	 containing function.  Similarly for globals from an outer
905*38fd1498Szrj 	 function.  */
906*38fd1498Szrj       tree new_decl;
907*38fd1498Szrj 
908*38fd1498Szrj       /* Remap the declaration.  */
909*38fd1498Szrj       new_decl = remap_decl (*tp, id);
910*38fd1498Szrj       gcc_assert (new_decl);
911*38fd1498Szrj       /* Replace this variable with the copy.  */
912*38fd1498Szrj       STRIP_TYPE_NOPS (new_decl);
913*38fd1498Szrj       /* ???  The C++ frontend uses void * pointer zero to initialize
914*38fd1498Szrj          any other type.  This confuses the middle-end type verification.
915*38fd1498Szrj 	 As cloned bodies do not go through gimplification again the fixup
916*38fd1498Szrj 	 there doesn't trigger.  */
917*38fd1498Szrj       if (TREE_CODE (new_decl) == INTEGER_CST
918*38fd1498Szrj 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
919*38fd1498Szrj 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
920*38fd1498Szrj       *tp = new_decl;
921*38fd1498Szrj       *walk_subtrees = 0;
922*38fd1498Szrj     }
923*38fd1498Szrj   else if (TREE_CODE (*tp) == STATEMENT_LIST)
924*38fd1498Szrj     gcc_unreachable ();
925*38fd1498Szrj   else if (TREE_CODE (*tp) == SAVE_EXPR)
926*38fd1498Szrj     gcc_unreachable ();
927*38fd1498Szrj   else if (TREE_CODE (*tp) == LABEL_DECL
928*38fd1498Szrj 	   && (!DECL_CONTEXT (*tp)
929*38fd1498Szrj 	       || decl_function_context (*tp) == id->src_fn))
930*38fd1498Szrj     /* These may need to be remapped for EH handling.  */
931*38fd1498Szrj     *tp = remap_decl (*tp, id);
932*38fd1498Szrj   else if (TREE_CODE (*tp) == FIELD_DECL)
933*38fd1498Szrj     {
934*38fd1498Szrj       /* If the enclosing record type is variably_modified_type_p, the field
935*38fd1498Szrj 	 has already been remapped.  Otherwise, it need not be.  */
936*38fd1498Szrj       tree *n = id->decl_map->get (*tp);
937*38fd1498Szrj       if (n)
938*38fd1498Szrj 	*tp = *n;
939*38fd1498Szrj       *walk_subtrees = 0;
940*38fd1498Szrj     }
941*38fd1498Szrj   else if (TYPE_P (*tp))
942*38fd1498Szrj     /* Types may need remapping as well.  */
943*38fd1498Szrj     *tp = remap_type (*tp, id);
944*38fd1498Szrj   else if (CONSTANT_CLASS_P (*tp))
945*38fd1498Szrj     {
946*38fd1498Szrj       /* If this is a constant, we have to copy the node iff the type
947*38fd1498Szrj 	 will be remapped.  copy_tree_r will not copy a constant.  */
948*38fd1498Szrj       tree new_type = remap_type (TREE_TYPE (*tp), id);
949*38fd1498Szrj 
950*38fd1498Szrj       if (new_type == TREE_TYPE (*tp))
951*38fd1498Szrj 	*walk_subtrees = 0;
952*38fd1498Szrj 
953*38fd1498Szrj       else if (TREE_CODE (*tp) == INTEGER_CST)
954*38fd1498Szrj 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
955*38fd1498Szrj       else
956*38fd1498Szrj 	{
957*38fd1498Szrj 	  *tp = copy_node (*tp);
958*38fd1498Szrj 	  TREE_TYPE (*tp) = new_type;
959*38fd1498Szrj 	}
960*38fd1498Szrj     }
961*38fd1498Szrj   else
962*38fd1498Szrj     {
963*38fd1498Szrj       /* Otherwise, just copy the node.  Note that copy_tree_r already
964*38fd1498Szrj 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
965*38fd1498Szrj 
966*38fd1498Szrj       if (TREE_CODE (*tp) == MEM_REF)
967*38fd1498Szrj 	{
968*38fd1498Szrj 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
969*38fd1498Szrj 	     that can happen when a pointer argument is an ADDR_EXPR.
970*38fd1498Szrj 	     Recurse here manually to allow that.  */
971*38fd1498Szrj 	  tree ptr = TREE_OPERAND (*tp, 0);
972*38fd1498Szrj 	  tree type = remap_type (TREE_TYPE (*tp), id);
973*38fd1498Szrj 	  tree old = *tp;
974*38fd1498Szrj 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
975*38fd1498Szrj 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
976*38fd1498Szrj 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
977*38fd1498Szrj 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
978*38fd1498Szrj 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
979*38fd1498Szrj 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
980*38fd1498Szrj 	    {
981*38fd1498Szrj 	      MR_DEPENDENCE_CLIQUE (*tp)
982*38fd1498Szrj 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
983*38fd1498Szrj 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
984*38fd1498Szrj 	    }
985*38fd1498Szrj 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
986*38fd1498Szrj 	     remapped a parameter as the property might be valid only
987*38fd1498Szrj 	     for the parameter itself.  */
988*38fd1498Szrj 	  if (TREE_THIS_NOTRAP (old)
989*38fd1498Szrj 	      && (!is_parm (TREE_OPERAND (old, 0))
990*38fd1498Szrj 		  || (!id->transform_parameter && is_parm (ptr))))
991*38fd1498Szrj 	    TREE_THIS_NOTRAP (*tp) = 1;
992*38fd1498Szrj 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
993*38fd1498Szrj 	  *walk_subtrees = 0;
994*38fd1498Szrj 	  return NULL;
995*38fd1498Szrj 	}
996*38fd1498Szrj 
997*38fd1498Szrj       /* Here is the "usual case".  Copy this tree node, and then
998*38fd1498Szrj 	 tweak some special cases.  */
999*38fd1498Szrj       copy_tree_r (tp, walk_subtrees, NULL);
1000*38fd1498Szrj 
1001*38fd1498Szrj       if (TREE_CODE (*tp) != OMP_CLAUSE)
1002*38fd1498Szrj 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1003*38fd1498Szrj 
1004*38fd1498Szrj       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1005*38fd1498Szrj 	{
1006*38fd1498Szrj 	  /* The copied TARGET_EXPR has never been expanded, even if the
1007*38fd1498Szrj 	     original node was expanded already.  */
1008*38fd1498Szrj 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1009*38fd1498Szrj 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1010*38fd1498Szrj 	}
1011*38fd1498Szrj       else if (TREE_CODE (*tp) == ADDR_EXPR)
1012*38fd1498Szrj 	{
1013*38fd1498Szrj 	  /* Variable substitution need not be simple.  In particular,
1014*38fd1498Szrj 	     the MEM_REF substitution above.  Make sure that
1015*38fd1498Szrj 	     TREE_CONSTANT and friends are up-to-date.  */
1016*38fd1498Szrj 	  int invariant = is_gimple_min_invariant (*tp);
1017*38fd1498Szrj 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1018*38fd1498Szrj 	  recompute_tree_invariant_for_addr_expr (*tp);
1019*38fd1498Szrj 
1020*38fd1498Szrj 	  /* If this used to be invariant, but is not any longer,
1021*38fd1498Szrj 	     then regimplification is probably needed.  */
1022*38fd1498Szrj 	  if (invariant && !is_gimple_min_invariant (*tp))
1023*38fd1498Szrj 	    id->regimplify = true;
1024*38fd1498Szrj 
1025*38fd1498Szrj 	  *walk_subtrees = 0;
1026*38fd1498Szrj 	}
1027*38fd1498Szrj     }
1028*38fd1498Szrj 
1029*38fd1498Szrj   /* Update the TREE_BLOCK for the cloned expr.  */
1030*38fd1498Szrj   if (EXPR_P (*tp))
1031*38fd1498Szrj     {
1032*38fd1498Szrj       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1033*38fd1498Szrj       tree old_block = TREE_BLOCK (*tp);
1034*38fd1498Szrj       if (old_block)
1035*38fd1498Szrj 	{
1036*38fd1498Szrj 	  tree *n;
1037*38fd1498Szrj 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1038*38fd1498Szrj 	  if (n)
1039*38fd1498Szrj 	    new_block = *n;
1040*38fd1498Szrj 	}
1041*38fd1498Szrj       TREE_SET_BLOCK (*tp, new_block);
1042*38fd1498Szrj     }
1043*38fd1498Szrj 
1044*38fd1498Szrj   /* Keep iterating.  */
1045*38fd1498Szrj   return NULL_TREE;
1046*38fd1498Szrj }
1047*38fd1498Szrj 
1048*38fd1498Szrj 
1049*38fd1498Szrj /* Called from copy_body_id via walk_tree.  DATA is really a
1050*38fd1498Szrj    `copy_body_data *'.  */
1051*38fd1498Szrj 
1052*38fd1498Szrj tree
1053*38fd1498Szrj copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1054*38fd1498Szrj {
1055*38fd1498Szrj   copy_body_data *id = (copy_body_data *) data;
1056*38fd1498Szrj   tree fn = id->src_fn;
1057*38fd1498Szrj   tree new_block;
1058*38fd1498Szrj 
1059*38fd1498Szrj   /* Begin by recognizing trees that we'll completely rewrite for the
1060*38fd1498Szrj      inlining context.  Our output for these trees is completely
1061*38fd1498Szrj      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1062*38fd1498Szrj      into an edge).  Further down, we'll handle trees that get
1063*38fd1498Szrj      duplicated and/or tweaked.  */
1064*38fd1498Szrj 
1065*38fd1498Szrj   /* When requested, RETURN_EXPRs should be transformed to just the
1066*38fd1498Szrj      contained MODIFY_EXPR.  The branch semantics of the return will
1067*38fd1498Szrj      be handled elsewhere by manipulating the CFG rather than a statement.  */
1068*38fd1498Szrj   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1069*38fd1498Szrj     {
1070*38fd1498Szrj       tree assignment = TREE_OPERAND (*tp, 0);
1071*38fd1498Szrj 
1072*38fd1498Szrj       /* If we're returning something, just turn that into an
1073*38fd1498Szrj 	 assignment into the equivalent of the original RESULT_DECL.
1074*38fd1498Szrj 	 If the "assignment" is just the result decl, the result
1075*38fd1498Szrj 	 decl has already been set (e.g. a recent "foo (&result_decl,
1076*38fd1498Szrj 	 ...)"); just toss the entire RETURN_EXPR.  */
1077*38fd1498Szrj       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1078*38fd1498Szrj 	{
1079*38fd1498Szrj 	  /* Replace the RETURN_EXPR with (a copy of) the
1080*38fd1498Szrj 	     MODIFY_EXPR hanging underneath.  */
1081*38fd1498Szrj 	  *tp = copy_node (assignment);
1082*38fd1498Szrj 	}
1083*38fd1498Szrj       else /* Else the RETURN_EXPR returns no value.  */
1084*38fd1498Szrj 	{
1085*38fd1498Szrj 	  *tp = NULL;
1086*38fd1498Szrj 	  return (tree) (void *)1;
1087*38fd1498Szrj 	}
1088*38fd1498Szrj     }
1089*38fd1498Szrj   else if (TREE_CODE (*tp) == SSA_NAME)
1090*38fd1498Szrj     {
1091*38fd1498Szrj       *tp = remap_ssa_name (*tp, id);
1092*38fd1498Szrj       *walk_subtrees = 0;
1093*38fd1498Szrj       return NULL;
1094*38fd1498Szrj     }
1095*38fd1498Szrj 
1096*38fd1498Szrj   /* Local variables and labels need to be replaced by equivalent
1097*38fd1498Szrj      variables.  We don't want to copy static variables; there's only
1098*38fd1498Szrj      one of those, no matter how many times we inline the containing
1099*38fd1498Szrj      function.  Similarly for globals from an outer function.  */
1100*38fd1498Szrj   else if (auto_var_in_fn_p (*tp, fn))
1101*38fd1498Szrj     {
1102*38fd1498Szrj       tree new_decl;
1103*38fd1498Szrj 
1104*38fd1498Szrj       /* Remap the declaration.  */
1105*38fd1498Szrj       new_decl = remap_decl (*tp, id);
1106*38fd1498Szrj       gcc_assert (new_decl);
1107*38fd1498Szrj       /* Replace this variable with the copy.  */
1108*38fd1498Szrj       STRIP_TYPE_NOPS (new_decl);
1109*38fd1498Szrj       *tp = new_decl;
1110*38fd1498Szrj       *walk_subtrees = 0;
1111*38fd1498Szrj     }
1112*38fd1498Szrj   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1113*38fd1498Szrj     copy_statement_list (tp);
1114*38fd1498Szrj   else if (TREE_CODE (*tp) == SAVE_EXPR
1115*38fd1498Szrj 	   || TREE_CODE (*tp) == TARGET_EXPR)
1116*38fd1498Szrj     remap_save_expr (tp, id->decl_map, walk_subtrees);
1117*38fd1498Szrj   else if (TREE_CODE (*tp) == LABEL_DECL
1118*38fd1498Szrj 	   && (! DECL_CONTEXT (*tp)
1119*38fd1498Szrj 	       || decl_function_context (*tp) == id->src_fn))
1120*38fd1498Szrj     /* These may need to be remapped for EH handling.  */
1121*38fd1498Szrj     *tp = remap_decl (*tp, id);
1122*38fd1498Szrj   else if (TREE_CODE (*tp) == BIND_EXPR)
1123*38fd1498Szrj     copy_bind_expr (tp, walk_subtrees, id);
1124*38fd1498Szrj   /* Types may need remapping as well.  */
1125*38fd1498Szrj   else if (TYPE_P (*tp))
1126*38fd1498Szrj     *tp = remap_type (*tp, id);
1127*38fd1498Szrj 
1128*38fd1498Szrj   /* If this is a constant, we have to copy the node iff the type will be
1129*38fd1498Szrj      remapped.  copy_tree_r will not copy a constant.  */
1130*38fd1498Szrj   else if (CONSTANT_CLASS_P (*tp))
1131*38fd1498Szrj     {
1132*38fd1498Szrj       tree new_type = remap_type (TREE_TYPE (*tp), id);
1133*38fd1498Szrj 
1134*38fd1498Szrj       if (new_type == TREE_TYPE (*tp))
1135*38fd1498Szrj 	*walk_subtrees = 0;
1136*38fd1498Szrj 
1137*38fd1498Szrj       else if (TREE_CODE (*tp) == INTEGER_CST)
1138*38fd1498Szrj 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1139*38fd1498Szrj       else
1140*38fd1498Szrj 	{
1141*38fd1498Szrj 	  *tp = copy_node (*tp);
1142*38fd1498Szrj 	  TREE_TYPE (*tp) = new_type;
1143*38fd1498Szrj 	}
1144*38fd1498Szrj     }
1145*38fd1498Szrj 
1146*38fd1498Szrj   /* Otherwise, just copy the node.  Note that copy_tree_r already
1147*38fd1498Szrj      knows not to copy VAR_DECLs, etc., so this is safe.  */
1148*38fd1498Szrj   else
1149*38fd1498Szrj     {
1150*38fd1498Szrj       /* Here we handle trees that are not completely rewritten.
1151*38fd1498Szrj 	 First we detect some inlining-induced bogosities for
1152*38fd1498Szrj 	 discarding.  */
1153*38fd1498Szrj       if (TREE_CODE (*tp) == MODIFY_EXPR
1154*38fd1498Szrj 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1155*38fd1498Szrj 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1156*38fd1498Szrj 	{
1157*38fd1498Szrj 	  /* Some assignments VAR = VAR; don't generate any rtl code
1158*38fd1498Szrj 	     and thus don't count as variable modification.  Avoid
1159*38fd1498Szrj 	     keeping bogosities like 0 = 0.  */
1160*38fd1498Szrj 	  tree decl = TREE_OPERAND (*tp, 0), value;
1161*38fd1498Szrj 	  tree *n;
1162*38fd1498Szrj 
1163*38fd1498Szrj 	  n = id->decl_map->get (decl);
1164*38fd1498Szrj 	  if (n)
1165*38fd1498Szrj 	    {
1166*38fd1498Szrj 	      value = *n;
1167*38fd1498Szrj 	      STRIP_TYPE_NOPS (value);
1168*38fd1498Szrj 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1169*38fd1498Szrj 		{
1170*38fd1498Szrj 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1171*38fd1498Szrj 		  return copy_tree_body_r (tp, walk_subtrees, data);
1172*38fd1498Szrj 		}
1173*38fd1498Szrj 	    }
1174*38fd1498Szrj 	}
1175*38fd1498Szrj       else if (TREE_CODE (*tp) == INDIRECT_REF)
1176*38fd1498Szrj 	{
1177*38fd1498Szrj 	  /* Get rid of *& from inline substitutions that can happen when a
1178*38fd1498Szrj 	     pointer argument is an ADDR_EXPR.  */
1179*38fd1498Szrj 	  tree decl = TREE_OPERAND (*tp, 0);
1180*38fd1498Szrj 	  tree *n = id->decl_map->get (decl);
1181*38fd1498Szrj 	  if (n)
1182*38fd1498Szrj 	    {
1183*38fd1498Szrj 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1184*38fd1498Szrj 	         it manually here as we'll eventually get ADDR_EXPRs
1185*38fd1498Szrj 		 which lie about their types pointed to.  In this case
1186*38fd1498Szrj 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1187*38fd1498Szrj 		 but we absolutely rely on that.  As fold_indirect_ref
1188*38fd1498Szrj 	         does other useful transformations, try that first, though.  */
1189*38fd1498Szrj 	      tree type = TREE_TYPE (*tp);
1190*38fd1498Szrj 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1191*38fd1498Szrj 	      tree old = *tp;
1192*38fd1498Szrj 	      *tp = gimple_fold_indirect_ref (ptr);
1193*38fd1498Szrj 	      if (! *tp)
1194*38fd1498Szrj 	        {
1195*38fd1498Szrj 		  type = remap_type (type, id);
1196*38fd1498Szrj 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1197*38fd1498Szrj 		    {
1198*38fd1498Szrj 		      *tp
1199*38fd1498Szrj 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1200*38fd1498Szrj 		      /* ???  We should either assert here or build
1201*38fd1498Szrj 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1202*38fd1498Szrj 			 incompatible types to our IL.  */
1203*38fd1498Szrj 		      if (! *tp)
1204*38fd1498Szrj 			*tp = TREE_OPERAND (ptr, 0);
1205*38fd1498Szrj 		    }
1206*38fd1498Szrj 	          else
1207*38fd1498Szrj 		    {
1208*38fd1498Szrj 	              *tp = build1 (INDIRECT_REF, type, ptr);
1209*38fd1498Szrj 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1210*38fd1498Szrj 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1211*38fd1498Szrj 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1212*38fd1498Szrj 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1213*38fd1498Szrj 			 have remapped a parameter as the property might be
1214*38fd1498Szrj 			 valid only for the parameter itself.  */
1215*38fd1498Szrj 		      if (TREE_THIS_NOTRAP (old)
1216*38fd1498Szrj 			  && (!is_parm (TREE_OPERAND (old, 0))
1217*38fd1498Szrj 			      || (!id->transform_parameter && is_parm (ptr))))
1218*38fd1498Szrj 		        TREE_THIS_NOTRAP (*tp) = 1;
1219*38fd1498Szrj 		    }
1220*38fd1498Szrj 		}
1221*38fd1498Szrj 	      *walk_subtrees = 0;
1222*38fd1498Szrj 	      return NULL;
1223*38fd1498Szrj 	    }
1224*38fd1498Szrj 	}
1225*38fd1498Szrj       else if (TREE_CODE (*tp) == MEM_REF)
1226*38fd1498Szrj 	{
1227*38fd1498Szrj 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1228*38fd1498Szrj 	     that can happen when a pointer argument is an ADDR_EXPR.
1229*38fd1498Szrj 	     Recurse here manually to allow that.  */
1230*38fd1498Szrj 	  tree ptr = TREE_OPERAND (*tp, 0);
1231*38fd1498Szrj 	  tree type = remap_type (TREE_TYPE (*tp), id);
1232*38fd1498Szrj 	  tree old = *tp;
1233*38fd1498Szrj 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1234*38fd1498Szrj 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1235*38fd1498Szrj 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1236*38fd1498Szrj 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1237*38fd1498Szrj 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1238*38fd1498Szrj 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1239*38fd1498Szrj 	    {
1240*38fd1498Szrj 	      MR_DEPENDENCE_CLIQUE (*tp)
1241*38fd1498Szrj 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1242*38fd1498Szrj 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1243*38fd1498Szrj 	    }
1244*38fd1498Szrj 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1245*38fd1498Szrj 	     remapped a parameter as the property might be valid only
1246*38fd1498Szrj 	     for the parameter itself.  */
1247*38fd1498Szrj 	  if (TREE_THIS_NOTRAP (old)
1248*38fd1498Szrj 	      && (!is_parm (TREE_OPERAND (old, 0))
1249*38fd1498Szrj 		  || (!id->transform_parameter && is_parm (ptr))))
1250*38fd1498Szrj 	    TREE_THIS_NOTRAP (*tp) = 1;
1251*38fd1498Szrj 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1252*38fd1498Szrj 	  *walk_subtrees = 0;
1253*38fd1498Szrj 	  return NULL;
1254*38fd1498Szrj 	}
1255*38fd1498Szrj 
1256*38fd1498Szrj       /* Here is the "usual case".  Copy this tree node, and then
1257*38fd1498Szrj 	 tweak some special cases.  */
1258*38fd1498Szrj       copy_tree_r (tp, walk_subtrees, NULL);
1259*38fd1498Szrj 
1260*38fd1498Szrj       /* If EXPR has block defined, map it to newly constructed block.
1261*38fd1498Szrj          When inlining we want EXPRs without block appear in the block
1262*38fd1498Szrj 	 of function call if we are not remapping a type.  */
1263*38fd1498Szrj       if (EXPR_P (*tp))
1264*38fd1498Szrj 	{
1265*38fd1498Szrj 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1266*38fd1498Szrj 	  if (TREE_BLOCK (*tp))
1267*38fd1498Szrj 	    {
1268*38fd1498Szrj 	      tree *n;
1269*38fd1498Szrj 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1270*38fd1498Szrj 	      if (n)
1271*38fd1498Szrj 		new_block = *n;
1272*38fd1498Szrj 	    }
1273*38fd1498Szrj 	  TREE_SET_BLOCK (*tp, new_block);
1274*38fd1498Szrj 	}
1275*38fd1498Szrj 
1276*38fd1498Szrj       if (TREE_CODE (*tp) != OMP_CLAUSE)
1277*38fd1498Szrj 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1278*38fd1498Szrj 
1279*38fd1498Szrj       /* The copied TARGET_EXPR has never been expanded, even if the
1280*38fd1498Szrj 	 original node was expanded already.  */
1281*38fd1498Szrj       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1282*38fd1498Szrj 	{
1283*38fd1498Szrj 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1284*38fd1498Szrj 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1285*38fd1498Szrj 	}
1286*38fd1498Szrj 
1287*38fd1498Szrj       /* Variable substitution need not be simple.  In particular, the
1288*38fd1498Szrj 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1289*38fd1498Szrj 	 and friends are up-to-date.  */
1290*38fd1498Szrj       else if (TREE_CODE (*tp) == ADDR_EXPR)
1291*38fd1498Szrj 	{
1292*38fd1498Szrj 	  int invariant = is_gimple_min_invariant (*tp);
1293*38fd1498Szrj 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1294*38fd1498Szrj 
1295*38fd1498Szrj 	  /* Handle the case where we substituted an INDIRECT_REF
1296*38fd1498Szrj 	     into the operand of the ADDR_EXPR.  */
1297*38fd1498Szrj 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1298*38fd1498Szrj 	    {
1299*38fd1498Szrj 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1300*38fd1498Szrj 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1301*38fd1498Szrj 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1302*38fd1498Szrj 	      *tp = t;
1303*38fd1498Szrj 	    }
1304*38fd1498Szrj 	  else
1305*38fd1498Szrj 	    recompute_tree_invariant_for_addr_expr (*tp);
1306*38fd1498Szrj 
1307*38fd1498Szrj 	  /* If this used to be invariant, but is not any longer,
1308*38fd1498Szrj 	     then regimplification is probably needed.  */
1309*38fd1498Szrj 	  if (invariant && !is_gimple_min_invariant (*tp))
1310*38fd1498Szrj 	    id->regimplify = true;
1311*38fd1498Szrj 
1312*38fd1498Szrj 	  *walk_subtrees = 0;
1313*38fd1498Szrj 	}
1314*38fd1498Szrj     }
1315*38fd1498Szrj 
1316*38fd1498Szrj   /* Keep iterating.  */
1317*38fd1498Szrj   return NULL_TREE;
1318*38fd1498Szrj }
1319*38fd1498Szrj 
1320*38fd1498Szrj /* Helper for remap_gimple_stmt.  Given an EH region number for the
1321*38fd1498Szrj    source function, map that to the duplicate EH region number in
1322*38fd1498Szrj    the destination function.  */
1323*38fd1498Szrj 
1324*38fd1498Szrj static int
1325*38fd1498Szrj remap_eh_region_nr (int old_nr, copy_body_data *id)
1326*38fd1498Szrj {
1327*38fd1498Szrj   eh_region old_r, new_r;
1328*38fd1498Szrj 
1329*38fd1498Szrj   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1330*38fd1498Szrj   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1331*38fd1498Szrj 
1332*38fd1498Szrj   return new_r->index;
1333*38fd1498Szrj }
1334*38fd1498Szrj 
1335*38fd1498Szrj /* Similar, but operate on INTEGER_CSTs.  */
1336*38fd1498Szrj 
1337*38fd1498Szrj static tree
1338*38fd1498Szrj remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1339*38fd1498Szrj {
1340*38fd1498Szrj   int old_nr, new_nr;
1341*38fd1498Szrj 
1342*38fd1498Szrj   old_nr = tree_to_shwi (old_t_nr);
1343*38fd1498Szrj   new_nr = remap_eh_region_nr (old_nr, id);
1344*38fd1498Szrj 
1345*38fd1498Szrj   return build_int_cst (integer_type_node, new_nr);
1346*38fd1498Szrj }
1347*38fd1498Szrj 
1348*38fd1498Szrj /* Helper for copy_bb.  Remap statement STMT using the inlining
1349*38fd1498Szrj    information in ID.  Return the new statement copy.  */
1350*38fd1498Szrj 
1351*38fd1498Szrj static gimple_seq
1352*38fd1498Szrj remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1353*38fd1498Szrj {
1354*38fd1498Szrj   gimple *copy = NULL;
1355*38fd1498Szrj   struct walk_stmt_info wi;
1356*38fd1498Szrj   bool skip_first = false;
1357*38fd1498Szrj   gimple_seq stmts = NULL;
1358*38fd1498Szrj 
1359*38fd1498Szrj   if (is_gimple_debug (stmt)
1360*38fd1498Szrj       && (gimple_debug_nonbind_marker_p (stmt)
1361*38fd1498Szrj 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1362*38fd1498Szrj 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1363*38fd1498Szrj     return stmts;
1364*38fd1498Szrj 
1365*38fd1498Szrj   /* Begin by recognizing trees that we'll completely rewrite for the
1366*38fd1498Szrj      inlining context.  Our output for these trees is completely
1367*38fd1498Szrj      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1368*38fd1498Szrj      into an edge).  Further down, we'll handle trees that get
1369*38fd1498Szrj      duplicated and/or tweaked.  */
1370*38fd1498Szrj 
1371*38fd1498Szrj   /* When requested, GIMPLE_RETURNs should be transformed to just the
1372*38fd1498Szrj      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1373*38fd1498Szrj      be handled elsewhere by manipulating the CFG rather than the
1374*38fd1498Szrj      statement.  */
1375*38fd1498Szrj   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1376*38fd1498Szrj     {
1377*38fd1498Szrj       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1378*38fd1498Szrj       tree retbnd = gimple_return_retbnd (stmt);
1379*38fd1498Szrj       tree bndslot = id->retbnd;
1380*38fd1498Szrj 
1381*38fd1498Szrj       if (retbnd && bndslot)
1382*38fd1498Szrj 	{
1383*38fd1498Szrj 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1384*38fd1498Szrj 	  memset (&wi, 0, sizeof (wi));
1385*38fd1498Szrj 	  wi.info = id;
1386*38fd1498Szrj 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1387*38fd1498Szrj 	  gimple_seq_add_stmt (&stmts, bndcopy);
1388*38fd1498Szrj 	}
1389*38fd1498Szrj 
1390*38fd1498Szrj       /* If we're returning something, just turn that into an
1391*38fd1498Szrj 	 assignment into the equivalent of the original RESULT_DECL.
1392*38fd1498Szrj 	 If RETVAL is just the result decl, the result decl has
1393*38fd1498Szrj 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1394*38fd1498Szrj 	 just toss the entire GIMPLE_RETURN.  */
1395*38fd1498Szrj       if (retval
1396*38fd1498Szrj 	  && (TREE_CODE (retval) != RESULT_DECL
1397*38fd1498Szrj 	      && (TREE_CODE (retval) != SSA_NAME
1398*38fd1498Szrj 		  || ! SSA_NAME_VAR (retval)
1399*38fd1498Szrj 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1400*38fd1498Szrj         {
1401*38fd1498Szrj 	  copy = gimple_build_assign (id->do_not_unshare
1402*38fd1498Szrj 				      ? id->retvar : unshare_expr (id->retvar),
1403*38fd1498Szrj 				      retval);
1404*38fd1498Szrj 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1405*38fd1498Szrj 	  skip_first = true;
1406*38fd1498Szrj 
1407*38fd1498Szrj 	  /* We need to copy bounds if return structure with pointers into
1408*38fd1498Szrj 	     instrumented function.  */
1409*38fd1498Szrj 	  if (chkp_function_instrumented_p (id->dst_fn)
1410*38fd1498Szrj 	      && !bndslot
1411*38fd1498Szrj 	      && !BOUNDED_P (id->retvar)
1412*38fd1498Szrj 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1413*38fd1498Szrj 	    id->assign_stmts.safe_push (copy);
1414*38fd1498Szrj 
1415*38fd1498Szrj 	}
1416*38fd1498Szrj       else
1417*38fd1498Szrj 	return stmts;
1418*38fd1498Szrj     }
1419*38fd1498Szrj   else if (gimple_has_substatements (stmt))
1420*38fd1498Szrj     {
1421*38fd1498Szrj       gimple_seq s1, s2;
1422*38fd1498Szrj 
1423*38fd1498Szrj       /* When cloning bodies from the C++ front end, we will be handed bodies
1424*38fd1498Szrj 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1425*38fd1498Szrj 	 have embedded statements.  */
1426*38fd1498Szrj       switch (gimple_code (stmt))
1427*38fd1498Szrj 	{
1428*38fd1498Szrj 	case GIMPLE_BIND:
1429*38fd1498Szrj 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1430*38fd1498Szrj 	  break;
1431*38fd1498Szrj 
1432*38fd1498Szrj 	case GIMPLE_CATCH:
1433*38fd1498Szrj 	  {
1434*38fd1498Szrj 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1435*38fd1498Szrj 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1436*38fd1498Szrj 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1437*38fd1498Szrj 	  }
1438*38fd1498Szrj 	  break;
1439*38fd1498Szrj 
1440*38fd1498Szrj 	case GIMPLE_EH_FILTER:
1441*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1442*38fd1498Szrj 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1443*38fd1498Szrj 	  break;
1444*38fd1498Szrj 
1445*38fd1498Szrj 	case GIMPLE_TRY:
1446*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1447*38fd1498Szrj 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1448*38fd1498Szrj 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1449*38fd1498Szrj 	  break;
1450*38fd1498Szrj 
1451*38fd1498Szrj 	case GIMPLE_WITH_CLEANUP_EXPR:
1452*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1453*38fd1498Szrj 	  copy = gimple_build_wce (s1);
1454*38fd1498Szrj 	  break;
1455*38fd1498Szrj 
1456*38fd1498Szrj 	case GIMPLE_OMP_PARALLEL:
1457*38fd1498Szrj 	  {
1458*38fd1498Szrj 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1459*38fd1498Szrj 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1460*38fd1498Szrj 	    copy = gimple_build_omp_parallel
1461*38fd1498Szrj 	             (s1,
1462*38fd1498Szrj 		      gimple_omp_parallel_clauses (omp_par_stmt),
1463*38fd1498Szrj 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1464*38fd1498Szrj 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1465*38fd1498Szrj 	  }
1466*38fd1498Szrj 	  break;
1467*38fd1498Szrj 
1468*38fd1498Szrj 	case GIMPLE_OMP_TASK:
1469*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1470*38fd1498Szrj 	  copy = gimple_build_omp_task
1471*38fd1498Szrj 	           (s1,
1472*38fd1498Szrj 		    gimple_omp_task_clauses (stmt),
1473*38fd1498Szrj 		    gimple_omp_task_child_fn (stmt),
1474*38fd1498Szrj 		    gimple_omp_task_data_arg (stmt),
1475*38fd1498Szrj 		    gimple_omp_task_copy_fn (stmt),
1476*38fd1498Szrj 		    gimple_omp_task_arg_size (stmt),
1477*38fd1498Szrj 		    gimple_omp_task_arg_align (stmt));
1478*38fd1498Szrj 	  break;
1479*38fd1498Szrj 
1480*38fd1498Szrj 	case GIMPLE_OMP_FOR:
1481*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1482*38fd1498Szrj 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1483*38fd1498Szrj 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1484*38fd1498Szrj 				       gimple_omp_for_clauses (stmt),
1485*38fd1498Szrj 				       gimple_omp_for_collapse (stmt), s2);
1486*38fd1498Szrj 	  {
1487*38fd1498Szrj 	    size_t i;
1488*38fd1498Szrj 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1489*38fd1498Szrj 	      {
1490*38fd1498Szrj 		gimple_omp_for_set_index (copy, i,
1491*38fd1498Szrj 					  gimple_omp_for_index (stmt, i));
1492*38fd1498Szrj 		gimple_omp_for_set_initial (copy, i,
1493*38fd1498Szrj 					    gimple_omp_for_initial (stmt, i));
1494*38fd1498Szrj 		gimple_omp_for_set_final (copy, i,
1495*38fd1498Szrj 					  gimple_omp_for_final (stmt, i));
1496*38fd1498Szrj 		gimple_omp_for_set_incr (copy, i,
1497*38fd1498Szrj 					 gimple_omp_for_incr (stmt, i));
1498*38fd1498Szrj 		gimple_omp_for_set_cond (copy, i,
1499*38fd1498Szrj 					 gimple_omp_for_cond (stmt, i));
1500*38fd1498Szrj 	      }
1501*38fd1498Szrj 	  }
1502*38fd1498Szrj 	  break;
1503*38fd1498Szrj 
1504*38fd1498Szrj 	case GIMPLE_OMP_MASTER:
1505*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506*38fd1498Szrj 	  copy = gimple_build_omp_master (s1);
1507*38fd1498Szrj 	  break;
1508*38fd1498Szrj 
1509*38fd1498Szrj 	case GIMPLE_OMP_TASKGROUP:
1510*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1511*38fd1498Szrj 	  copy = gimple_build_omp_taskgroup (s1);
1512*38fd1498Szrj 	  break;
1513*38fd1498Szrj 
1514*38fd1498Szrj 	case GIMPLE_OMP_ORDERED:
1515*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1516*38fd1498Szrj 	  copy = gimple_build_omp_ordered
1517*38fd1498Szrj 		   (s1,
1518*38fd1498Szrj 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1519*38fd1498Szrj 	  break;
1520*38fd1498Szrj 
1521*38fd1498Szrj 	case GIMPLE_OMP_SECTION:
1522*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523*38fd1498Szrj 	  copy = gimple_build_omp_section (s1);
1524*38fd1498Szrj 	  break;
1525*38fd1498Szrj 
1526*38fd1498Szrj 	case GIMPLE_OMP_SECTIONS:
1527*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1528*38fd1498Szrj 	  copy = gimple_build_omp_sections
1529*38fd1498Szrj 	           (s1, gimple_omp_sections_clauses (stmt));
1530*38fd1498Szrj 	  break;
1531*38fd1498Szrj 
1532*38fd1498Szrj 	case GIMPLE_OMP_SINGLE:
1533*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534*38fd1498Szrj 	  copy = gimple_build_omp_single
1535*38fd1498Szrj 	           (s1, gimple_omp_single_clauses (stmt));
1536*38fd1498Szrj 	  break;
1537*38fd1498Szrj 
1538*38fd1498Szrj 	case GIMPLE_OMP_TARGET:
1539*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1540*38fd1498Szrj 	  copy = gimple_build_omp_target
1541*38fd1498Szrj 		   (s1, gimple_omp_target_kind (stmt),
1542*38fd1498Szrj 		    gimple_omp_target_clauses (stmt));
1543*38fd1498Szrj 	  break;
1544*38fd1498Szrj 
1545*38fd1498Szrj 	case GIMPLE_OMP_TEAMS:
1546*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547*38fd1498Szrj 	  copy = gimple_build_omp_teams
1548*38fd1498Szrj 		   (s1, gimple_omp_teams_clauses (stmt));
1549*38fd1498Szrj 	  break;
1550*38fd1498Szrj 
1551*38fd1498Szrj 	case GIMPLE_OMP_CRITICAL:
1552*38fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1553*38fd1498Szrj 	  copy = gimple_build_omp_critical (s1,
1554*38fd1498Szrj 					    gimple_omp_critical_name
1555*38fd1498Szrj 					      (as_a <gomp_critical *> (stmt)),
1556*38fd1498Szrj 					    gimple_omp_critical_clauses
1557*38fd1498Szrj 					      (as_a <gomp_critical *> (stmt)));
1558*38fd1498Szrj 	  break;
1559*38fd1498Szrj 
1560*38fd1498Szrj 	case GIMPLE_TRANSACTION:
1561*38fd1498Szrj 	  {
1562*38fd1498Szrj 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1563*38fd1498Szrj 	    gtransaction *new_trans_stmt;
1564*38fd1498Szrj 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1565*38fd1498Szrj 				   id);
1566*38fd1498Szrj 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1567*38fd1498Szrj 	    gimple_transaction_set_subcode (new_trans_stmt,
1568*38fd1498Szrj 	      gimple_transaction_subcode (old_trans_stmt));
1569*38fd1498Szrj 	    gimple_transaction_set_label_norm (new_trans_stmt,
1570*38fd1498Szrj 	      gimple_transaction_label_norm (old_trans_stmt));
1571*38fd1498Szrj 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1572*38fd1498Szrj 	      gimple_transaction_label_uninst (old_trans_stmt));
1573*38fd1498Szrj 	    gimple_transaction_set_label_over (new_trans_stmt,
1574*38fd1498Szrj 	      gimple_transaction_label_over (old_trans_stmt));
1575*38fd1498Szrj 	  }
1576*38fd1498Szrj 	  break;
1577*38fd1498Szrj 
1578*38fd1498Szrj 	default:
1579*38fd1498Szrj 	  gcc_unreachable ();
1580*38fd1498Szrj 	}
1581*38fd1498Szrj     }
1582*38fd1498Szrj   else
1583*38fd1498Szrj     {
1584*38fd1498Szrj       if (gimple_assign_copy_p (stmt)
1585*38fd1498Szrj 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1586*38fd1498Szrj 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1587*38fd1498Szrj 	{
1588*38fd1498Szrj 	  /* Here we handle statements that are not completely rewritten.
1589*38fd1498Szrj 	     First we detect some inlining-induced bogosities for
1590*38fd1498Szrj 	     discarding.  */
1591*38fd1498Szrj 
1592*38fd1498Szrj 	  /* Some assignments VAR = VAR; don't generate any rtl code
1593*38fd1498Szrj 	     and thus don't count as variable modification.  Avoid
1594*38fd1498Szrj 	     keeping bogosities like 0 = 0.  */
1595*38fd1498Szrj 	  tree decl = gimple_assign_lhs (stmt), value;
1596*38fd1498Szrj 	  tree *n;
1597*38fd1498Szrj 
1598*38fd1498Szrj 	  n = id->decl_map->get (decl);
1599*38fd1498Szrj 	  if (n)
1600*38fd1498Szrj 	    {
1601*38fd1498Szrj 	      value = *n;
1602*38fd1498Szrj 	      STRIP_TYPE_NOPS (value);
1603*38fd1498Szrj 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1604*38fd1498Szrj 		return NULL;
1605*38fd1498Szrj 	    }
1606*38fd1498Szrj 	}
1607*38fd1498Szrj 
1608*38fd1498Szrj       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1609*38fd1498Szrj 	 in a block that we aren't copying during tree_function_versioning,
1610*38fd1498Szrj 	 just drop the clobber stmt.  */
1611*38fd1498Szrj       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1612*38fd1498Szrj 	{
1613*38fd1498Szrj 	  tree lhs = gimple_assign_lhs (stmt);
1614*38fd1498Szrj 	  if (TREE_CODE (lhs) == MEM_REF
1615*38fd1498Szrj 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1616*38fd1498Szrj 	    {
1617*38fd1498Szrj 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1618*38fd1498Szrj 	      if (gimple_bb (def_stmt)
1619*38fd1498Szrj 		  && !bitmap_bit_p (id->blocks_to_copy,
1620*38fd1498Szrj 				    gimple_bb (def_stmt)->index))
1621*38fd1498Szrj 		return NULL;
1622*38fd1498Szrj 	    }
1623*38fd1498Szrj 	}
1624*38fd1498Szrj 
1625*38fd1498Szrj       if (gimple_debug_bind_p (stmt))
1626*38fd1498Szrj 	{
1627*38fd1498Szrj 	  gdebug *copy
1628*38fd1498Szrj 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1629*38fd1498Szrj 				       gimple_debug_bind_get_value (stmt),
1630*38fd1498Szrj 				       stmt);
1631*38fd1498Szrj 	  id->debug_stmts.safe_push (copy);
1632*38fd1498Szrj 	  gimple_seq_add_stmt (&stmts, copy);
1633*38fd1498Szrj 	  return stmts;
1634*38fd1498Szrj 	}
1635*38fd1498Szrj       if (gimple_debug_source_bind_p (stmt))
1636*38fd1498Szrj 	{
1637*38fd1498Szrj 	  gdebug *copy = gimple_build_debug_source_bind
1638*38fd1498Szrj 	                   (gimple_debug_source_bind_get_var (stmt),
1639*38fd1498Szrj 			    gimple_debug_source_bind_get_value (stmt),
1640*38fd1498Szrj 			    stmt);
1641*38fd1498Szrj 	  id->debug_stmts.safe_push (copy);
1642*38fd1498Szrj 	  gimple_seq_add_stmt (&stmts, copy);
1643*38fd1498Szrj 	  return stmts;
1644*38fd1498Szrj 	}
1645*38fd1498Szrj       if (gimple_debug_nonbind_marker_p (stmt))
1646*38fd1498Szrj 	{
1647*38fd1498Szrj 	  /* If the inlined function has too many debug markers,
1648*38fd1498Szrj 	     don't copy them.  */
1649*38fd1498Szrj 	  if (id->src_cfun->debug_marker_count
1650*38fd1498Szrj 	      > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1651*38fd1498Szrj 	    return stmts;
1652*38fd1498Szrj 
1653*38fd1498Szrj 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1654*38fd1498Szrj 	  id->debug_stmts.safe_push (copy);
1655*38fd1498Szrj 	  gimple_seq_add_stmt (&stmts, copy);
1656*38fd1498Szrj 	  return stmts;
1657*38fd1498Szrj 	}
1658*38fd1498Szrj       gcc_checking_assert (!is_gimple_debug (stmt));
1659*38fd1498Szrj 
1660*38fd1498Szrj       /* Create a new deep copy of the statement.  */
1661*38fd1498Szrj       copy = gimple_copy (stmt);
1662*38fd1498Szrj 
1663*38fd1498Szrj       /* Clear flags that need revisiting.  */
1664*38fd1498Szrj       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1665*38fd1498Szrj         {
1666*38fd1498Szrj 	  if (gimple_call_tail_p (call_stmt))
1667*38fd1498Szrj 	    gimple_call_set_tail (call_stmt, false);
1668*38fd1498Szrj 	  if (gimple_call_from_thunk_p (call_stmt))
1669*38fd1498Szrj 	    gimple_call_set_from_thunk (call_stmt, false);
1670*38fd1498Szrj 	  if (gimple_call_internal_p (call_stmt))
1671*38fd1498Szrj 	    switch (gimple_call_internal_fn (call_stmt))
1672*38fd1498Szrj 	      {
1673*38fd1498Szrj 	      case IFN_GOMP_SIMD_LANE:
1674*38fd1498Szrj 	      case IFN_GOMP_SIMD_VF:
1675*38fd1498Szrj 	      case IFN_GOMP_SIMD_LAST_LANE:
1676*38fd1498Szrj 	      case IFN_GOMP_SIMD_ORDERED_START:
1677*38fd1498Szrj 	      case IFN_GOMP_SIMD_ORDERED_END:
1678*38fd1498Szrj 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1679*38fd1498Szrj 	        break;
1680*38fd1498Szrj 	      default:
1681*38fd1498Szrj 		break;
1682*38fd1498Szrj 	      }
1683*38fd1498Szrj 	}
1684*38fd1498Szrj 
1685*38fd1498Szrj       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1686*38fd1498Szrj 	 RESX and EH_DISPATCH.  */
1687*38fd1498Szrj       if (id->eh_map)
1688*38fd1498Szrj 	switch (gimple_code (copy))
1689*38fd1498Szrj 	  {
1690*38fd1498Szrj 	  case GIMPLE_CALL:
1691*38fd1498Szrj 	    {
1692*38fd1498Szrj 	      tree r, fndecl = gimple_call_fndecl (copy);
1693*38fd1498Szrj 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1694*38fd1498Szrj 		switch (DECL_FUNCTION_CODE (fndecl))
1695*38fd1498Szrj 		  {
1696*38fd1498Szrj 		  case BUILT_IN_EH_COPY_VALUES:
1697*38fd1498Szrj 		    r = gimple_call_arg (copy, 1);
1698*38fd1498Szrj 		    r = remap_eh_region_tree_nr (r, id);
1699*38fd1498Szrj 		    gimple_call_set_arg (copy, 1, r);
1700*38fd1498Szrj 		    /* FALLTHRU */
1701*38fd1498Szrj 
1702*38fd1498Szrj 		  case BUILT_IN_EH_POINTER:
1703*38fd1498Szrj 		  case BUILT_IN_EH_FILTER:
1704*38fd1498Szrj 		    r = gimple_call_arg (copy, 0);
1705*38fd1498Szrj 		    r = remap_eh_region_tree_nr (r, id);
1706*38fd1498Szrj 		    gimple_call_set_arg (copy, 0, r);
1707*38fd1498Szrj 		    break;
1708*38fd1498Szrj 
1709*38fd1498Szrj 		  default:
1710*38fd1498Szrj 		    break;
1711*38fd1498Szrj 		  }
1712*38fd1498Szrj 
1713*38fd1498Szrj 	      /* Reset alias info if we didn't apply measures to
1714*38fd1498Szrj 		 keep it valid over inlining by setting DECL_PT_UID.  */
1715*38fd1498Szrj 	      if (!id->src_cfun->gimple_df
1716*38fd1498Szrj 		  || !id->src_cfun->gimple_df->ipa_pta)
1717*38fd1498Szrj 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1718*38fd1498Szrj 	    }
1719*38fd1498Szrj 	    break;
1720*38fd1498Szrj 
1721*38fd1498Szrj 	  case GIMPLE_RESX:
1722*38fd1498Szrj 	    {
1723*38fd1498Szrj 	      gresx *resx_stmt = as_a <gresx *> (copy);
1724*38fd1498Szrj 	      int r = gimple_resx_region (resx_stmt);
1725*38fd1498Szrj 	      r = remap_eh_region_nr (r, id);
1726*38fd1498Szrj 	      gimple_resx_set_region (resx_stmt, r);
1727*38fd1498Szrj 	    }
1728*38fd1498Szrj 	    break;
1729*38fd1498Szrj 
1730*38fd1498Szrj 	  case GIMPLE_EH_DISPATCH:
1731*38fd1498Szrj 	    {
1732*38fd1498Szrj 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1733*38fd1498Szrj 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1734*38fd1498Szrj 	      r = remap_eh_region_nr (r, id);
1735*38fd1498Szrj 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1736*38fd1498Szrj 	    }
1737*38fd1498Szrj 	    break;
1738*38fd1498Szrj 
1739*38fd1498Szrj 	  default:
1740*38fd1498Szrj 	    break;
1741*38fd1498Szrj 	  }
1742*38fd1498Szrj     }
1743*38fd1498Szrj 
1744*38fd1498Szrj   /* If STMT has a block defined, map it to the newly constructed
1745*38fd1498Szrj      block.  */
1746*38fd1498Szrj   if (gimple_block (copy))
1747*38fd1498Szrj     {
1748*38fd1498Szrj       tree *n;
1749*38fd1498Szrj       n = id->decl_map->get (gimple_block (copy));
1750*38fd1498Szrj       gcc_assert (n);
1751*38fd1498Szrj       gimple_set_block (copy, *n);
1752*38fd1498Szrj     }
1753*38fd1498Szrj 
1754*38fd1498Szrj   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
1755*38fd1498Szrj       || gimple_debug_nonbind_marker_p (copy))
1756*38fd1498Szrj     {
1757*38fd1498Szrj       gimple_seq_add_stmt (&stmts, copy);
1758*38fd1498Szrj       return stmts;
1759*38fd1498Szrj     }
1760*38fd1498Szrj 
1761*38fd1498Szrj   /* Remap all the operands in COPY.  */
1762*38fd1498Szrj   memset (&wi, 0, sizeof (wi));
1763*38fd1498Szrj   wi.info = id;
1764*38fd1498Szrj   if (skip_first)
1765*38fd1498Szrj     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1766*38fd1498Szrj   else
1767*38fd1498Szrj     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1768*38fd1498Szrj 
1769*38fd1498Szrj   /* Clear the copied virtual operands.  We are not remapping them here
1770*38fd1498Szrj      but are going to recreate them from scratch.  */
1771*38fd1498Szrj   if (gimple_has_mem_ops (copy))
1772*38fd1498Szrj     {
1773*38fd1498Szrj       gimple_set_vdef (copy, NULL_TREE);
1774*38fd1498Szrj       gimple_set_vuse (copy, NULL_TREE);
1775*38fd1498Szrj     }
1776*38fd1498Szrj 
1777*38fd1498Szrj   gimple_seq_add_stmt (&stmts, copy);
1778*38fd1498Szrj   return stmts;
1779*38fd1498Szrj }
1780*38fd1498Szrj 
1781*38fd1498Szrj 
1782*38fd1498Szrj /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1783*38fd1498Szrj    later  */
1784*38fd1498Szrj 
1785*38fd1498Szrj static basic_block
1786*38fd1498Szrj copy_bb (copy_body_data *id, basic_block bb,
1787*38fd1498Szrj          profile_count num, profile_count den)
1788*38fd1498Szrj {
1789*38fd1498Szrj   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1790*38fd1498Szrj   basic_block copy_basic_block;
1791*38fd1498Szrj   tree decl;
1792*38fd1498Szrj   basic_block prev;
1793*38fd1498Szrj 
1794*38fd1498Szrj   profile_count::adjust_for_ipa_scaling (&num, &den);
1795*38fd1498Szrj 
1796*38fd1498Szrj   /* Search for previous copied basic block.  */
1797*38fd1498Szrj   prev = bb->prev_bb;
1798*38fd1498Szrj   while (!prev->aux)
1799*38fd1498Szrj     prev = prev->prev_bb;
1800*38fd1498Szrj 
1801*38fd1498Szrj   /* create_basic_block() will append every new block to
1802*38fd1498Szrj      basic_block_info automatically.  */
1803*38fd1498Szrj   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1804*38fd1498Szrj   copy_basic_block->count = bb->count.apply_scale (num, den);
1805*38fd1498Szrj 
1806*38fd1498Szrj   copy_gsi = gsi_start_bb (copy_basic_block);
1807*38fd1498Szrj 
1808*38fd1498Szrj   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1809*38fd1498Szrj     {
1810*38fd1498Szrj       gimple_seq stmts;
1811*38fd1498Szrj       gimple *stmt = gsi_stmt (gsi);
1812*38fd1498Szrj       gimple *orig_stmt = stmt;
1813*38fd1498Szrj       gimple_stmt_iterator stmts_gsi;
1814*38fd1498Szrj       bool stmt_added = false;
1815*38fd1498Szrj 
1816*38fd1498Szrj       id->regimplify = false;
1817*38fd1498Szrj       stmts = remap_gimple_stmt (stmt, id);
1818*38fd1498Szrj 
1819*38fd1498Szrj       if (gimple_seq_empty_p (stmts))
1820*38fd1498Szrj 	continue;
1821*38fd1498Szrj 
1822*38fd1498Szrj       seq_gsi = copy_gsi;
1823*38fd1498Szrj 
1824*38fd1498Szrj       for (stmts_gsi = gsi_start (stmts);
1825*38fd1498Szrj 	   !gsi_end_p (stmts_gsi); )
1826*38fd1498Szrj 	{
1827*38fd1498Szrj 	  stmt = gsi_stmt (stmts_gsi);
1828*38fd1498Szrj 
1829*38fd1498Szrj 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1830*38fd1498Szrj 	  gsi_next (&stmts_gsi);
1831*38fd1498Szrj 
1832*38fd1498Szrj 	  if (gimple_nop_p (stmt))
1833*38fd1498Szrj 	      continue;
1834*38fd1498Szrj 
1835*38fd1498Szrj 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1836*38fd1498Szrj 					    orig_stmt);
1837*38fd1498Szrj 
1838*38fd1498Szrj 	  /* With return slot optimization we can end up with
1839*38fd1498Szrj 	     non-gimple (foo *)&this->m, fix that here.  */
1840*38fd1498Szrj 	  if (is_gimple_assign (stmt)
1841*38fd1498Szrj 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1842*38fd1498Szrj 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1843*38fd1498Szrj 	    {
1844*38fd1498Szrj 	      tree new_rhs;
1845*38fd1498Szrj 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1846*38fd1498Szrj 						  gimple_assign_rhs1 (stmt),
1847*38fd1498Szrj 						  true, NULL, false,
1848*38fd1498Szrj 						  GSI_CONTINUE_LINKING);
1849*38fd1498Szrj 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1850*38fd1498Szrj 	      id->regimplify = false;
1851*38fd1498Szrj 	    }
1852*38fd1498Szrj 
1853*38fd1498Szrj 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1854*38fd1498Szrj 
1855*38fd1498Szrj 	  if (id->regimplify)
1856*38fd1498Szrj 	    gimple_regimplify_operands (stmt, &seq_gsi);
1857*38fd1498Szrj 
1858*38fd1498Szrj 	  stmt_added = true;
1859*38fd1498Szrj 	}
1860*38fd1498Szrj 
1861*38fd1498Szrj       if (!stmt_added)
1862*38fd1498Szrj 	continue;
1863*38fd1498Szrj 
1864*38fd1498Szrj       /* If copy_basic_block has been empty at the start of this iteration,
1865*38fd1498Szrj 	 call gsi_start_bb again to get at the newly added statements.  */
1866*38fd1498Szrj       if (gsi_end_p (copy_gsi))
1867*38fd1498Szrj 	copy_gsi = gsi_start_bb (copy_basic_block);
1868*38fd1498Szrj       else
1869*38fd1498Szrj 	gsi_next (&copy_gsi);
1870*38fd1498Szrj 
1871*38fd1498Szrj       /* Process the new statement.  The call to gimple_regimplify_operands
1872*38fd1498Szrj 	 possibly turned the statement into multiple statements, we
1873*38fd1498Szrj 	 need to process all of them.  */
1874*38fd1498Szrj       do
1875*38fd1498Szrj 	{
1876*38fd1498Szrj 	  tree fn;
1877*38fd1498Szrj 	  gcall *call_stmt;
1878*38fd1498Szrj 
1879*38fd1498Szrj 	  stmt = gsi_stmt (copy_gsi);
1880*38fd1498Szrj 	  call_stmt = dyn_cast <gcall *> (stmt);
1881*38fd1498Szrj 	  if (call_stmt
1882*38fd1498Szrj 	      && gimple_call_va_arg_pack_p (call_stmt)
1883*38fd1498Szrj 	      && id->call_stmt
1884*38fd1498Szrj 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1885*38fd1498Szrj 	    {
1886*38fd1498Szrj 	      /* __builtin_va_arg_pack () should be replaced by
1887*38fd1498Szrj 		 all arguments corresponding to ... in the caller.  */
1888*38fd1498Szrj 	      tree p;
1889*38fd1498Szrj 	      gcall *new_call;
1890*38fd1498Szrj 	      vec<tree> argarray;
1891*38fd1498Szrj 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1892*38fd1498Szrj 	      size_t n, i, nargs_to_copy;
1893*38fd1498Szrj 	      bool remove_bounds = false;
1894*38fd1498Szrj 
1895*38fd1498Szrj 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1896*38fd1498Szrj 		nargs--;
1897*38fd1498Szrj 
1898*38fd1498Szrj 	      /* Bounds should be removed from arg pack in case
1899*38fd1498Szrj 		 we handle not instrumented call in instrumented
1900*38fd1498Szrj 		 function.  */
1901*38fd1498Szrj 	      nargs_to_copy = nargs;
1902*38fd1498Szrj 	      if (gimple_call_with_bounds_p (id->call_stmt)
1903*38fd1498Szrj 		  && !gimple_call_with_bounds_p (stmt))
1904*38fd1498Szrj 		{
1905*38fd1498Szrj 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1906*38fd1498Szrj 		       i < gimple_call_num_args (id->call_stmt);
1907*38fd1498Szrj 		       i++)
1908*38fd1498Szrj 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1909*38fd1498Szrj 		      nargs_to_copy--;
1910*38fd1498Szrj 		  remove_bounds = true;
1911*38fd1498Szrj 		}
1912*38fd1498Szrj 
1913*38fd1498Szrj 	      /* Create the new array of arguments.  */
1914*38fd1498Szrj 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1915*38fd1498Szrj 	      argarray.create (n);
1916*38fd1498Szrj 	      argarray.safe_grow_cleared (n);
1917*38fd1498Szrj 
1918*38fd1498Szrj 	      /* Copy all the arguments before '...'  */
1919*38fd1498Szrj 	      memcpy (argarray.address (),
1920*38fd1498Szrj 		      gimple_call_arg_ptr (call_stmt, 0),
1921*38fd1498Szrj 		      gimple_call_num_args (call_stmt) * sizeof (tree));
1922*38fd1498Szrj 
1923*38fd1498Szrj 	      if (remove_bounds)
1924*38fd1498Szrj 		{
1925*38fd1498Szrj 		  /* Append the rest of arguments removing bounds.  */
1926*38fd1498Szrj 		  unsigned cur = gimple_call_num_args (call_stmt);
1927*38fd1498Szrj 		  i = gimple_call_num_args (id->call_stmt) - nargs;
1928*38fd1498Szrj 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1929*38fd1498Szrj 		       i < gimple_call_num_args (id->call_stmt);
1930*38fd1498Szrj 		       i++)
1931*38fd1498Szrj 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1932*38fd1498Szrj 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1933*38fd1498Szrj 		  gcc_assert (cur == n);
1934*38fd1498Szrj 		}
1935*38fd1498Szrj 	      else
1936*38fd1498Szrj 		{
1937*38fd1498Szrj 		  /* Append the arguments passed in '...'  */
1938*38fd1498Szrj 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1939*38fd1498Szrj 			  gimple_call_arg_ptr (id->call_stmt, 0)
1940*38fd1498Szrj 			  + (gimple_call_num_args (id->call_stmt) - nargs),
1941*38fd1498Szrj 			  nargs * sizeof (tree));
1942*38fd1498Szrj 		}
1943*38fd1498Szrj 
1944*38fd1498Szrj 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1945*38fd1498Szrj 						argarray);
1946*38fd1498Szrj 
1947*38fd1498Szrj 	      argarray.release ();
1948*38fd1498Szrj 
1949*38fd1498Szrj 	      /* Copy all GIMPLE_CALL flags, location and block, except
1950*38fd1498Szrj 		 GF_CALL_VA_ARG_PACK.  */
1951*38fd1498Szrj 	      gimple_call_copy_flags (new_call, call_stmt);
1952*38fd1498Szrj 	      gimple_call_set_va_arg_pack (new_call, false);
1953*38fd1498Szrj 	      gimple_set_location (new_call, gimple_location (stmt));
1954*38fd1498Szrj 	      gimple_set_block (new_call, gimple_block (stmt));
1955*38fd1498Szrj 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1956*38fd1498Szrj 
1957*38fd1498Szrj 	      gsi_replace (&copy_gsi, new_call, false);
1958*38fd1498Szrj 	      stmt = new_call;
1959*38fd1498Szrj 	    }
1960*38fd1498Szrj 	  else if (call_stmt
1961*38fd1498Szrj 		   && id->call_stmt
1962*38fd1498Szrj 		   && (decl = gimple_call_fndecl (stmt))
1963*38fd1498Szrj 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1964*38fd1498Szrj 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1965*38fd1498Szrj 		   && ! gimple_call_va_arg_pack_p (id->call_stmt))
1966*38fd1498Szrj 	    {
1967*38fd1498Szrj 	      /* __builtin_va_arg_pack_len () should be replaced by
1968*38fd1498Szrj 		 the number of anonymous arguments.  */
1969*38fd1498Szrj 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
1970*38fd1498Szrj 	      tree count, p;
1971*38fd1498Szrj 	      gimple *new_stmt;
1972*38fd1498Szrj 
1973*38fd1498Szrj 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1974*38fd1498Szrj 		nargs--;
1975*38fd1498Szrj 
1976*38fd1498Szrj 	      /* For instrumented calls we should ignore bounds.  */
1977*38fd1498Szrj 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
1978*38fd1498Szrj 		   i < gimple_call_num_args (id->call_stmt);
1979*38fd1498Szrj 		   i++)
1980*38fd1498Szrj 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1981*38fd1498Szrj 		  nargs--;
1982*38fd1498Szrj 
1983*38fd1498Szrj 	      count = build_int_cst (integer_type_node, nargs);
1984*38fd1498Szrj 	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1985*38fd1498Szrj 	      gsi_replace (&copy_gsi, new_stmt, false);
1986*38fd1498Szrj 	      stmt = new_stmt;
1987*38fd1498Szrj 	    }
1988*38fd1498Szrj 	  else if (call_stmt
1989*38fd1498Szrj 		   && id->call_stmt
1990*38fd1498Szrj 		   && gimple_call_internal_p (stmt)
1991*38fd1498Szrj 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1992*38fd1498Szrj 	    {
1993*38fd1498Szrj 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
1994*38fd1498Szrj 	      gsi_remove (&copy_gsi, false);
1995*38fd1498Szrj 	      continue;
1996*38fd1498Szrj 	    }
1997*38fd1498Szrj 
1998*38fd1498Szrj 	  /* Statements produced by inlining can be unfolded, especially
1999*38fd1498Szrj 	     when we constant propagated some operands.  We can't fold
2000*38fd1498Szrj 	     them right now for two reasons:
2001*38fd1498Szrj 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2002*38fd1498Szrj 	     2) we can't change function calls to builtins.
2003*38fd1498Szrj 	     So we just mark statement for later folding.  We mark
2004*38fd1498Szrj 	     all new statements, instead just statements that has changed
2005*38fd1498Szrj 	     by some nontrivial substitution so even statements made
2006*38fd1498Szrj 	     foldable indirectly are updated.  If this turns out to be
2007*38fd1498Szrj 	     expensive, copy_body can be told to watch for nontrivial
2008*38fd1498Szrj 	     changes.  */
2009*38fd1498Szrj 	  if (id->statements_to_fold)
2010*38fd1498Szrj 	    id->statements_to_fold->add (stmt);
2011*38fd1498Szrj 
2012*38fd1498Szrj 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2013*38fd1498Szrj 	     callgraph edges and update or duplicate them.  */
2014*38fd1498Szrj 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2015*38fd1498Szrj 	    {
2016*38fd1498Szrj 	      struct cgraph_edge *edge;
2017*38fd1498Szrj 
2018*38fd1498Szrj 	      switch (id->transform_call_graph_edges)
2019*38fd1498Szrj 		{
2020*38fd1498Szrj 		case CB_CGE_DUPLICATE:
2021*38fd1498Szrj 		  edge = id->src_node->get_edge (orig_stmt);
2022*38fd1498Szrj 		  if (edge)
2023*38fd1498Szrj 		    {
2024*38fd1498Szrj 		      struct cgraph_edge *old_edge = edge;
2025*38fd1498Szrj 		      profile_count old_cnt = edge->count;
2026*38fd1498Szrj 		      edge = edge->clone (id->dst_node, call_stmt,
2027*38fd1498Szrj 					  gimple_uid (stmt),
2028*38fd1498Szrj 					  num, den,
2029*38fd1498Szrj 					  true);
2030*38fd1498Szrj 
2031*38fd1498Szrj 		      /* Speculative calls consist of two edges - direct and
2032*38fd1498Szrj 			 indirect.  Duplicate the whole thing and distribute
2033*38fd1498Szrj 			 frequencies accordingly.  */
2034*38fd1498Szrj 		      if (edge->speculative)
2035*38fd1498Szrj 			{
2036*38fd1498Szrj 			  struct cgraph_edge *direct, *indirect;
2037*38fd1498Szrj 			  struct ipa_ref *ref;
2038*38fd1498Szrj 
2039*38fd1498Szrj 			  gcc_assert (!edge->indirect_unknown_callee);
2040*38fd1498Szrj 			  old_edge->speculative_call_info (direct, indirect, ref);
2041*38fd1498Szrj 
2042*38fd1498Szrj 			  profile_count indir_cnt = indirect->count;
2043*38fd1498Szrj 			  indirect = indirect->clone (id->dst_node, call_stmt,
2044*38fd1498Szrj 						      gimple_uid (stmt),
2045*38fd1498Szrj 						      num, den,
2046*38fd1498Szrj 						      true);
2047*38fd1498Szrj 
2048*38fd1498Szrj 			  profile_probability prob
2049*38fd1498Szrj 			     = indir_cnt.probability_in (old_cnt + indir_cnt);
2050*38fd1498Szrj 			  indirect->count
2051*38fd1498Szrj 			     = copy_basic_block->count.apply_probability (prob);
2052*38fd1498Szrj 			  edge->count = copy_basic_block->count - indirect->count;
2053*38fd1498Szrj 			  id->dst_node->clone_reference (ref, stmt);
2054*38fd1498Szrj 			}
2055*38fd1498Szrj 		      else
2056*38fd1498Szrj 			edge->count = copy_basic_block->count;
2057*38fd1498Szrj 		    }
2058*38fd1498Szrj 		  break;
2059*38fd1498Szrj 
2060*38fd1498Szrj 		case CB_CGE_MOVE_CLONES:
2061*38fd1498Szrj 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2062*38fd1498Szrj 								call_stmt);
2063*38fd1498Szrj 		  edge = id->dst_node->get_edge (stmt);
2064*38fd1498Szrj 		  break;
2065*38fd1498Szrj 
2066*38fd1498Szrj 		case CB_CGE_MOVE:
2067*38fd1498Szrj 		  edge = id->dst_node->get_edge (orig_stmt);
2068*38fd1498Szrj 		  if (edge)
2069*38fd1498Szrj 		    edge->set_call_stmt (call_stmt);
2070*38fd1498Szrj 		  break;
2071*38fd1498Szrj 
2072*38fd1498Szrj 		default:
2073*38fd1498Szrj 		  gcc_unreachable ();
2074*38fd1498Szrj 		}
2075*38fd1498Szrj 
2076*38fd1498Szrj 	      /* Constant propagation on argument done during inlining
2077*38fd1498Szrj 		 may create new direct call.  Produce an edge for it.  */
2078*38fd1498Szrj 	      if ((!edge
2079*38fd1498Szrj 		   || (edge->indirect_inlining_edge
2080*38fd1498Szrj 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2081*38fd1498Szrj 		  && id->dst_node->definition
2082*38fd1498Szrj 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2083*38fd1498Szrj 		{
2084*38fd1498Szrj 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2085*38fd1498Szrj 
2086*38fd1498Szrj 		  /* We have missing edge in the callgraph.  This can happen
2087*38fd1498Szrj 		     when previous inlining turned an indirect call into a
2088*38fd1498Szrj 		     direct call by constant propagating arguments or we are
2089*38fd1498Szrj 		     producing dead clone (for further cloning).  In all
2090*38fd1498Szrj 		     other cases we hit a bug (incorrect node sharing is the
2091*38fd1498Szrj 		     most common reason for missing edges).  */
2092*38fd1498Szrj 		  gcc_assert (!dest->definition
2093*38fd1498Szrj 			      || dest->address_taken
2094*38fd1498Szrj 		  	      || !id->src_node->definition
2095*38fd1498Szrj 			      || !id->dst_node->definition);
2096*38fd1498Szrj 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2097*38fd1498Szrj 		    id->dst_node->create_edge_including_clones
2098*38fd1498Szrj 		      (dest, orig_stmt, call_stmt, bb->count,
2099*38fd1498Szrj 		       CIF_ORIGINALLY_INDIRECT_CALL);
2100*38fd1498Szrj 		  else
2101*38fd1498Szrj 		    id->dst_node->create_edge (dest, call_stmt,
2102*38fd1498Szrj 					bb->count)->inline_failed
2103*38fd1498Szrj 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2104*38fd1498Szrj 		  if (dump_file)
2105*38fd1498Szrj 		    {
2106*38fd1498Szrj 		      fprintf (dump_file, "Created new direct edge to %s\n",
2107*38fd1498Szrj 			       dest->name ());
2108*38fd1498Szrj 		    }
2109*38fd1498Szrj 		}
2110*38fd1498Szrj 
2111*38fd1498Szrj 	      notice_special_calls (as_a <gcall *> (stmt));
2112*38fd1498Szrj 	    }
2113*38fd1498Szrj 
2114*38fd1498Szrj 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2115*38fd1498Szrj 				      id->eh_map, id->eh_lp_nr);
2116*38fd1498Szrj 
2117*38fd1498Szrj 	  gsi_next (&copy_gsi);
2118*38fd1498Szrj 	}
2119*38fd1498Szrj       while (!gsi_end_p (copy_gsi));
2120*38fd1498Szrj 
2121*38fd1498Szrj       copy_gsi = gsi_last_bb (copy_basic_block);
2122*38fd1498Szrj     }
2123*38fd1498Szrj 
2124*38fd1498Szrj   return copy_basic_block;
2125*38fd1498Szrj }
2126*38fd1498Szrj 
2127*38fd1498Szrj /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2128*38fd1498Szrj    form is quite easy, since dominator relationship for old basic blocks does
2129*38fd1498Szrj    not change.
2130*38fd1498Szrj 
2131*38fd1498Szrj    There is however exception where inlining might change dominator relation
2132*38fd1498Szrj    across EH edges from basic block within inlined functions destinating
2133*38fd1498Szrj    to landing pads in function we inline into.
2134*38fd1498Szrj 
2135*38fd1498Szrj    The function fills in PHI_RESULTs of such PHI nodes if they refer
2136*38fd1498Szrj    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2137*38fd1498Szrj    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2138*38fd1498Szrj    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2139*38fd1498Szrj    set, and this means that there will be no overlapping live ranges
2140*38fd1498Szrj    for the underlying symbol.
2141*38fd1498Szrj 
2142*38fd1498Szrj    This might change in future if we allow redirecting of EH edges and
2143*38fd1498Szrj    we might want to change way build CFG pre-inlining to include
2144*38fd1498Szrj    all the possible edges then.  */
2145*38fd1498Szrj static void
2146*38fd1498Szrj update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2147*38fd1498Szrj 				  bool can_throw, bool nonlocal_goto)
2148*38fd1498Szrj {
2149*38fd1498Szrj   edge e;
2150*38fd1498Szrj   edge_iterator ei;
2151*38fd1498Szrj 
2152*38fd1498Szrj   FOR_EACH_EDGE (e, ei, bb->succs)
2153*38fd1498Szrj     if (!e->dest->aux
2154*38fd1498Szrj 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2155*38fd1498Szrj       {
2156*38fd1498Szrj 	gphi *phi;
2157*38fd1498Szrj 	gphi_iterator si;
2158*38fd1498Szrj 
2159*38fd1498Szrj 	if (!nonlocal_goto)
2160*38fd1498Szrj 	  gcc_assert (e->flags & EDGE_EH);
2161*38fd1498Szrj 
2162*38fd1498Szrj 	if (!can_throw)
2163*38fd1498Szrj 	  gcc_assert (!(e->flags & EDGE_EH));
2164*38fd1498Szrj 
2165*38fd1498Szrj 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2166*38fd1498Szrj 	  {
2167*38fd1498Szrj 	    edge re;
2168*38fd1498Szrj 
2169*38fd1498Szrj 	    phi = si.phi ();
2170*38fd1498Szrj 
2171*38fd1498Szrj 	    /* For abnormal goto/call edges the receiver can be the
2172*38fd1498Szrj 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2173*38fd1498Szrj 
2174*38fd1498Szrj 	    gcc_assert ((e->flags & EDGE_EH)
2175*38fd1498Szrj 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2176*38fd1498Szrj 
2177*38fd1498Szrj 	    re = find_edge (ret_bb, e->dest);
2178*38fd1498Szrj 	    gcc_checking_assert (re);
2179*38fd1498Szrj 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2180*38fd1498Szrj 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2181*38fd1498Szrj 
2182*38fd1498Szrj 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2183*38fd1498Szrj 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2184*38fd1498Szrj 	  }
2185*38fd1498Szrj       }
2186*38fd1498Szrj }
2187*38fd1498Szrj 
2188*38fd1498Szrj 
2189*38fd1498Szrj /* Copy edges from BB into its copy constructed earlier, scale profile
2190*38fd1498Szrj    accordingly.  Edges will be taken care of later.  Assume aux
2191*38fd1498Szrj    pointers to point to the copies of each BB.  Return true if any
2192*38fd1498Szrj    debug stmts are left after a statement that must end the basic block.  */
2193*38fd1498Szrj 
2194*38fd1498Szrj static bool
2195*38fd1498Szrj copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2196*38fd1498Szrj 		   basic_block ret_bb, basic_block abnormal_goto_dest)
2197*38fd1498Szrj {
2198*38fd1498Szrj   basic_block new_bb = (basic_block) bb->aux;
2199*38fd1498Szrj   edge_iterator ei;
2200*38fd1498Szrj   edge old_edge;
2201*38fd1498Szrj   gimple_stmt_iterator si;
2202*38fd1498Szrj   int flags;
2203*38fd1498Szrj   bool need_debug_cleanup = false;
2204*38fd1498Szrj 
2205*38fd1498Szrj   /* Use the indices from the original blocks to create edges for the
2206*38fd1498Szrj      new ones.  */
2207*38fd1498Szrj   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2208*38fd1498Szrj     if (!(old_edge->flags & EDGE_EH))
2209*38fd1498Szrj       {
2210*38fd1498Szrj 	edge new_edge;
2211*38fd1498Szrj 
2212*38fd1498Szrj 	flags = old_edge->flags;
2213*38fd1498Szrj 
2214*38fd1498Szrj 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2215*38fd1498Szrj 	if (old_edge->dest->index == EXIT_BLOCK
2216*38fd1498Szrj 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2217*38fd1498Szrj 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2218*38fd1498Szrj 	  flags |= EDGE_FALLTHRU;
2219*38fd1498Szrj 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2220*38fd1498Szrj 	new_edge->probability = old_edge->probability;
2221*38fd1498Szrj       }
2222*38fd1498Szrj 
2223*38fd1498Szrj   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2224*38fd1498Szrj     return false;
2225*38fd1498Szrj 
2226*38fd1498Szrj   /* When doing function splitting, we must decreate count of the return block
2227*38fd1498Szrj      which was previously reachable by block we did not copy.  */
2228*38fd1498Szrj   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2229*38fd1498Szrj     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2230*38fd1498Szrj       if (old_edge->src->index != ENTRY_BLOCK
2231*38fd1498Szrj 	  && !old_edge->src->aux)
2232*38fd1498Szrj 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2233*38fd1498Szrj 
2234*38fd1498Szrj   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2235*38fd1498Szrj     {
2236*38fd1498Szrj       gimple *copy_stmt;
2237*38fd1498Szrj       bool can_throw, nonlocal_goto;
2238*38fd1498Szrj 
2239*38fd1498Szrj       copy_stmt = gsi_stmt (si);
2240*38fd1498Szrj       if (!is_gimple_debug (copy_stmt))
2241*38fd1498Szrj 	update_stmt (copy_stmt);
2242*38fd1498Szrj 
2243*38fd1498Szrj       /* Do this before the possible split_block.  */
2244*38fd1498Szrj       gsi_next (&si);
2245*38fd1498Szrj 
2246*38fd1498Szrj       /* If this tree could throw an exception, there are two
2247*38fd1498Szrj          cases where we need to add abnormal edge(s): the
2248*38fd1498Szrj          tree wasn't in a region and there is a "current
2249*38fd1498Szrj          region" in the caller; or the original tree had
2250*38fd1498Szrj          EH edges.  In both cases split the block after the tree,
2251*38fd1498Szrj          and add abnormal edge(s) as needed; we need both
2252*38fd1498Szrj          those from the callee and the caller.
2253*38fd1498Szrj          We check whether the copy can throw, because the const
2254*38fd1498Szrj          propagation can change an INDIRECT_REF which throws
2255*38fd1498Szrj          into a COMPONENT_REF which doesn't.  If the copy
2256*38fd1498Szrj          can throw, the original could also throw.  */
2257*38fd1498Szrj       can_throw = stmt_can_throw_internal (copy_stmt);
2258*38fd1498Szrj       nonlocal_goto
2259*38fd1498Szrj 	= (stmt_can_make_abnormal_goto (copy_stmt)
2260*38fd1498Szrj 	   && !computed_goto_p (copy_stmt));
2261*38fd1498Szrj 
2262*38fd1498Szrj       if (can_throw || nonlocal_goto)
2263*38fd1498Szrj 	{
2264*38fd1498Szrj 	  if (!gsi_end_p (si))
2265*38fd1498Szrj 	    {
2266*38fd1498Szrj 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2267*38fd1498Szrj 		gsi_next (&si);
2268*38fd1498Szrj 	      if (gsi_end_p (si))
2269*38fd1498Szrj 		need_debug_cleanup = true;
2270*38fd1498Szrj 	    }
2271*38fd1498Szrj 	  if (!gsi_end_p (si))
2272*38fd1498Szrj 	    /* Note that bb's predecessor edges aren't necessarily
2273*38fd1498Szrj 	       right at this point; split_block doesn't care.  */
2274*38fd1498Szrj 	    {
2275*38fd1498Szrj 	      edge e = split_block (new_bb, copy_stmt);
2276*38fd1498Szrj 
2277*38fd1498Szrj 	      new_bb = e->dest;
2278*38fd1498Szrj 	      new_bb->aux = e->src->aux;
2279*38fd1498Szrj 	      si = gsi_start_bb (new_bb);
2280*38fd1498Szrj 	    }
2281*38fd1498Szrj 	}
2282*38fd1498Szrj 
2283*38fd1498Szrj       bool update_probs = false;
2284*38fd1498Szrj 
2285*38fd1498Szrj       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2286*38fd1498Szrj 	{
2287*38fd1498Szrj 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2288*38fd1498Szrj 	  update_probs = true;
2289*38fd1498Szrj 	}
2290*38fd1498Szrj       else if (can_throw)
2291*38fd1498Szrj 	{
2292*38fd1498Szrj 	  make_eh_edges (copy_stmt);
2293*38fd1498Szrj 	  update_probs = true;
2294*38fd1498Szrj 	}
2295*38fd1498Szrj 
2296*38fd1498Szrj       /* EH edges may not match old edges.  Copy as much as possible.  */
2297*38fd1498Szrj       if (update_probs)
2298*38fd1498Szrj 	{
2299*38fd1498Szrj           edge e;
2300*38fd1498Szrj           edge_iterator ei;
2301*38fd1498Szrj 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2302*38fd1498Szrj 
2303*38fd1498Szrj           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2304*38fd1498Szrj             if ((old_edge->flags & EDGE_EH)
2305*38fd1498Szrj 		&& (e = find_edge (copy_stmt_bb,
2306*38fd1498Szrj 				   (basic_block) old_edge->dest->aux))
2307*38fd1498Szrj 		&& (e->flags & EDGE_EH))
2308*38fd1498Szrj 	      e->probability = old_edge->probability;
2309*38fd1498Szrj 
2310*38fd1498Szrj           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2311*38fd1498Szrj 	    if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2312*38fd1498Szrj 	      e->probability = profile_probability::never ();
2313*38fd1498Szrj         }
2314*38fd1498Szrj 
2315*38fd1498Szrj 
2316*38fd1498Szrj       /* If the call we inline cannot make abnormal goto do not add
2317*38fd1498Szrj          additional abnormal edges but only retain those already present
2318*38fd1498Szrj 	 in the original function body.  */
2319*38fd1498Szrj       if (abnormal_goto_dest == NULL)
2320*38fd1498Szrj 	nonlocal_goto = false;
2321*38fd1498Szrj       if (nonlocal_goto)
2322*38fd1498Szrj 	{
2323*38fd1498Szrj 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2324*38fd1498Szrj 
2325*38fd1498Szrj 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2326*38fd1498Szrj 	    nonlocal_goto = false;
2327*38fd1498Szrj 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2328*38fd1498Szrj 	     in OpenMP regions which aren't allowed to be left abnormally.
2329*38fd1498Szrj 	     So, no need to add abnormal edge in that case.  */
2330*38fd1498Szrj 	  else if (is_gimple_call (copy_stmt)
2331*38fd1498Szrj 		   && gimple_call_internal_p (copy_stmt)
2332*38fd1498Szrj 		   && (gimple_call_internal_fn (copy_stmt)
2333*38fd1498Szrj 		       == IFN_ABNORMAL_DISPATCHER)
2334*38fd1498Szrj 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2335*38fd1498Szrj 	    nonlocal_goto = false;
2336*38fd1498Szrj 	  else
2337*38fd1498Szrj 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2338*38fd1498Szrj 				   EDGE_ABNORMAL);
2339*38fd1498Szrj 	}
2340*38fd1498Szrj 
2341*38fd1498Szrj       if ((can_throw || nonlocal_goto)
2342*38fd1498Szrj 	  && gimple_in_ssa_p (cfun))
2343*38fd1498Szrj 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2344*38fd1498Szrj 					  can_throw, nonlocal_goto);
2345*38fd1498Szrj     }
2346*38fd1498Szrj   return need_debug_cleanup;
2347*38fd1498Szrj }
2348*38fd1498Szrj 
2349*38fd1498Szrj /* Copy the PHIs.  All blocks and edges are copied, some blocks
2350*38fd1498Szrj    was possibly split and new outgoing EH edges inserted.
2351*38fd1498Szrj    BB points to the block of original function and AUX pointers links
2352*38fd1498Szrj    the original and newly copied blocks.  */
2353*38fd1498Szrj 
2354*38fd1498Szrj static void
2355*38fd1498Szrj copy_phis_for_bb (basic_block bb, copy_body_data *id)
2356*38fd1498Szrj {
2357*38fd1498Szrj   basic_block const new_bb = (basic_block) bb->aux;
2358*38fd1498Szrj   edge_iterator ei;
2359*38fd1498Szrj   gphi *phi;
2360*38fd1498Szrj   gphi_iterator si;
2361*38fd1498Szrj   edge new_edge;
2362*38fd1498Szrj   bool inserted = false;
2363*38fd1498Szrj 
2364*38fd1498Szrj   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2365*38fd1498Szrj     {
2366*38fd1498Szrj       tree res, new_res;
2367*38fd1498Szrj       gphi *new_phi;
2368*38fd1498Szrj 
2369*38fd1498Szrj       phi = si.phi ();
2370*38fd1498Szrj       res = PHI_RESULT (phi);
2371*38fd1498Szrj       new_res = res;
2372*38fd1498Szrj       if (!virtual_operand_p (res))
2373*38fd1498Szrj 	{
2374*38fd1498Szrj 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2375*38fd1498Szrj 	  if (EDGE_COUNT (new_bb->preds) == 0)
2376*38fd1498Szrj 	    {
2377*38fd1498Szrj 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2378*38fd1498Szrj 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2379*38fd1498Szrj 	    }
2380*38fd1498Szrj 	  else
2381*38fd1498Szrj 	    {
2382*38fd1498Szrj 	      new_phi = create_phi_node (new_res, new_bb);
2383*38fd1498Szrj 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2384*38fd1498Szrj 		{
2385*38fd1498Szrj 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2386*38fd1498Szrj 					     bb);
2387*38fd1498Szrj 		  tree arg;
2388*38fd1498Szrj 		  tree new_arg;
2389*38fd1498Szrj 		  edge_iterator ei2;
2390*38fd1498Szrj 		  location_t locus;
2391*38fd1498Szrj 
2392*38fd1498Szrj 		  /* When doing partial cloning, we allow PHIs on the entry
2393*38fd1498Szrj 		     block as long as all the arguments are the same.
2394*38fd1498Szrj 		     Find any input edge to see argument to copy.  */
2395*38fd1498Szrj 		  if (!old_edge)
2396*38fd1498Szrj 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2397*38fd1498Szrj 		      if (!old_edge->src->aux)
2398*38fd1498Szrj 			break;
2399*38fd1498Szrj 
2400*38fd1498Szrj 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2401*38fd1498Szrj 		  new_arg = arg;
2402*38fd1498Szrj 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2403*38fd1498Szrj 		  gcc_assert (new_arg);
2404*38fd1498Szrj 		  /* With return slot optimization we can end up with
2405*38fd1498Szrj 		     non-gimple (foo *)&this->m, fix that here.  */
2406*38fd1498Szrj 		  if (TREE_CODE (new_arg) != SSA_NAME
2407*38fd1498Szrj 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2408*38fd1498Szrj 		      && !is_gimple_val (new_arg))
2409*38fd1498Szrj 		    {
2410*38fd1498Szrj 		      gimple_seq stmts = NULL;
2411*38fd1498Szrj 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2412*38fd1498Szrj 						      NULL);
2413*38fd1498Szrj 		      gsi_insert_seq_on_edge (new_edge, stmts);
2414*38fd1498Szrj 		      inserted = true;
2415*38fd1498Szrj 		    }
2416*38fd1498Szrj 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2417*38fd1498Szrj 		  if (LOCATION_BLOCK (locus))
2418*38fd1498Szrj 		    {
2419*38fd1498Szrj 		      tree *n;
2420*38fd1498Szrj 		      n = id->decl_map->get (LOCATION_BLOCK (locus));
2421*38fd1498Szrj 		      gcc_assert (n);
2422*38fd1498Szrj 		      locus = set_block (locus, *n);
2423*38fd1498Szrj 		    }
2424*38fd1498Szrj 		  else
2425*38fd1498Szrj 		    locus = LOCATION_LOCUS (locus);
2426*38fd1498Szrj 
2427*38fd1498Szrj 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2428*38fd1498Szrj 		}
2429*38fd1498Szrj 	    }
2430*38fd1498Szrj 	}
2431*38fd1498Szrj     }
2432*38fd1498Szrj 
2433*38fd1498Szrj   /* Commit the delayed edge insertions.  */
2434*38fd1498Szrj   if (inserted)
2435*38fd1498Szrj     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2436*38fd1498Szrj       gsi_commit_one_edge_insert (new_edge, NULL);
2437*38fd1498Szrj }
2438*38fd1498Szrj 
2439*38fd1498Szrj 
2440*38fd1498Szrj /* Wrapper for remap_decl so it can be used as a callback.  */
2441*38fd1498Szrj 
2442*38fd1498Szrj static tree
2443*38fd1498Szrj remap_decl_1 (tree decl, void *data)
2444*38fd1498Szrj {
2445*38fd1498Szrj   return remap_decl (decl, (copy_body_data *) data);
2446*38fd1498Szrj }
2447*38fd1498Szrj 
2448*38fd1498Szrj /* Build struct function and associated datastructures for the new clone
2449*38fd1498Szrj    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2450*38fd1498Szrj    the cfun to the function of new_fndecl (and current_function_decl too).  */
2451*38fd1498Szrj 
2452*38fd1498Szrj static void
2453*38fd1498Szrj initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2454*38fd1498Szrj {
2455*38fd1498Szrj   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2456*38fd1498Szrj 
2457*38fd1498Szrj   if (!DECL_ARGUMENTS (new_fndecl))
2458*38fd1498Szrj     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2459*38fd1498Szrj   if (!DECL_RESULT (new_fndecl))
2460*38fd1498Szrj     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2461*38fd1498Szrj 
2462*38fd1498Szrj   /* Register specific tree functions.  */
2463*38fd1498Szrj   gimple_register_cfg_hooks ();
2464*38fd1498Szrj 
2465*38fd1498Szrj   /* Get clean struct function.  */
2466*38fd1498Szrj   push_struct_function (new_fndecl);
2467*38fd1498Szrj 
2468*38fd1498Szrj   /* We will rebuild these, so just sanity check that they are empty.  */
2469*38fd1498Szrj   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2470*38fd1498Szrj   gcc_assert (cfun->local_decls == NULL);
2471*38fd1498Szrj   gcc_assert (cfun->cfg == NULL);
2472*38fd1498Szrj   gcc_assert (cfun->decl == new_fndecl);
2473*38fd1498Szrj 
2474*38fd1498Szrj   /* Copy items we preserve during cloning.  */
2475*38fd1498Szrj   cfun->static_chain_decl = src_cfun->static_chain_decl;
2476*38fd1498Szrj   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2477*38fd1498Szrj   cfun->function_end_locus = src_cfun->function_end_locus;
2478*38fd1498Szrj   cfun->curr_properties = src_cfun->curr_properties;
2479*38fd1498Szrj   cfun->last_verified = src_cfun->last_verified;
2480*38fd1498Szrj   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2481*38fd1498Szrj   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2482*38fd1498Szrj   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2483*38fd1498Szrj   cfun->stdarg = src_cfun->stdarg;
2484*38fd1498Szrj   cfun->after_inlining = src_cfun->after_inlining;
2485*38fd1498Szrj   cfun->can_throw_non_call_exceptions
2486*38fd1498Szrj     = src_cfun->can_throw_non_call_exceptions;
2487*38fd1498Szrj   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2488*38fd1498Szrj   cfun->returns_struct = src_cfun->returns_struct;
2489*38fd1498Szrj   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2490*38fd1498Szrj 
2491*38fd1498Szrj   init_empty_tree_cfg ();
2492*38fd1498Szrj 
2493*38fd1498Szrj   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2494*38fd1498Szrj 
2495*38fd1498Szrj   profile_count num = count;
2496*38fd1498Szrj   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2497*38fd1498Szrj   profile_count::adjust_for_ipa_scaling (&num, &den);
2498*38fd1498Szrj 
2499*38fd1498Szrj   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2500*38fd1498Szrj     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2501*38fd1498Szrj 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2502*38fd1498Szrj   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2503*38fd1498Szrj     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2504*38fd1498Szrj 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2505*38fd1498Szrj   if (src_cfun->eh)
2506*38fd1498Szrj     init_eh_for_function ();
2507*38fd1498Szrj 
2508*38fd1498Szrj   if (src_cfun->gimple_df)
2509*38fd1498Szrj     {
2510*38fd1498Szrj       init_tree_ssa (cfun);
2511*38fd1498Szrj       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2512*38fd1498Szrj       if (cfun->gimple_df->in_ssa_p)
2513*38fd1498Szrj 	init_ssa_operands (cfun);
2514*38fd1498Szrj     }
2515*38fd1498Szrj }
2516*38fd1498Szrj 
2517*38fd1498Szrj /* Helper function for copy_cfg_body.  Move debug stmts from the end
2518*38fd1498Szrj    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2519*38fd1498Szrj    successor has multiple predecessors, reset them, otherwise keep
2520*38fd1498Szrj    their value.  */
2521*38fd1498Szrj 
2522*38fd1498Szrj static void
2523*38fd1498Szrj maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2524*38fd1498Szrj {
2525*38fd1498Szrj   edge e;
2526*38fd1498Szrj   edge_iterator ei;
2527*38fd1498Szrj   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2528*38fd1498Szrj 
2529*38fd1498Szrj   if (gsi_end_p (si)
2530*38fd1498Szrj       || gsi_one_before_end_p (si)
2531*38fd1498Szrj       || !(stmt_can_throw_internal (gsi_stmt (si))
2532*38fd1498Szrj 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2533*38fd1498Szrj     return;
2534*38fd1498Szrj 
2535*38fd1498Szrj   FOR_EACH_EDGE (e, ei, new_bb->succs)
2536*38fd1498Szrj     {
2537*38fd1498Szrj       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2538*38fd1498Szrj       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2539*38fd1498Szrj       while (is_gimple_debug (gsi_stmt (ssi)))
2540*38fd1498Szrj 	{
2541*38fd1498Szrj 	  gimple *stmt = gsi_stmt (ssi);
2542*38fd1498Szrj 	  gdebug *new_stmt;
2543*38fd1498Szrj 	  tree var;
2544*38fd1498Szrj 	  tree value;
2545*38fd1498Szrj 
2546*38fd1498Szrj 	  /* For the last edge move the debug stmts instead of copying
2547*38fd1498Szrj 	     them.  */
2548*38fd1498Szrj 	  if (ei_one_before_end_p (ei))
2549*38fd1498Szrj 	    {
2550*38fd1498Szrj 	      si = ssi;
2551*38fd1498Szrj 	      gsi_prev (&ssi);
2552*38fd1498Szrj 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2553*38fd1498Szrj 		gimple_debug_bind_reset_value (stmt);
2554*38fd1498Szrj 	      gsi_remove (&si, false);
2555*38fd1498Szrj 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2556*38fd1498Szrj 	      continue;
2557*38fd1498Szrj 	    }
2558*38fd1498Szrj 
2559*38fd1498Szrj 	  if (gimple_debug_bind_p (stmt))
2560*38fd1498Szrj 	    {
2561*38fd1498Szrj 	      var = gimple_debug_bind_get_var (stmt);
2562*38fd1498Szrj 	      if (single_pred_p (e->dest))
2563*38fd1498Szrj 		{
2564*38fd1498Szrj 		  value = gimple_debug_bind_get_value (stmt);
2565*38fd1498Szrj 		  value = unshare_expr (value);
2566*38fd1498Szrj 		}
2567*38fd1498Szrj 	      else
2568*38fd1498Szrj 		value = NULL_TREE;
2569*38fd1498Szrj 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2570*38fd1498Szrj 	    }
2571*38fd1498Szrj 	  else if (gimple_debug_source_bind_p (stmt))
2572*38fd1498Szrj 	    {
2573*38fd1498Szrj 	      var = gimple_debug_source_bind_get_var (stmt);
2574*38fd1498Szrj 	      value = gimple_debug_source_bind_get_value (stmt);
2575*38fd1498Szrj 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2576*38fd1498Szrj 	    }
2577*38fd1498Szrj 	  else if (gimple_debug_nonbind_marker_p (stmt))
2578*38fd1498Szrj 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2579*38fd1498Szrj 	  else
2580*38fd1498Szrj 	    gcc_unreachable ();
2581*38fd1498Szrj 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2582*38fd1498Szrj 	  id->debug_stmts.safe_push (new_stmt);
2583*38fd1498Szrj 	  gsi_prev (&ssi);
2584*38fd1498Szrj 	}
2585*38fd1498Szrj     }
2586*38fd1498Szrj }
2587*38fd1498Szrj 
2588*38fd1498Szrj /* Make a copy of the sub-loops of SRC_PARENT and place them
2589*38fd1498Szrj    as siblings of DEST_PARENT.  */
2590*38fd1498Szrj 
2591*38fd1498Szrj static void
2592*38fd1498Szrj copy_loops (copy_body_data *id,
2593*38fd1498Szrj 	    struct loop *dest_parent, struct loop *src_parent)
2594*38fd1498Szrj {
2595*38fd1498Szrj   struct loop *src_loop = src_parent->inner;
2596*38fd1498Szrj   while (src_loop)
2597*38fd1498Szrj     {
2598*38fd1498Szrj       if (!id->blocks_to_copy
2599*38fd1498Szrj 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2600*38fd1498Szrj 	{
2601*38fd1498Szrj 	  struct loop *dest_loop = alloc_loop ();
2602*38fd1498Szrj 
2603*38fd1498Szrj 	  /* Assign the new loop its header and latch and associate
2604*38fd1498Szrj 	     those with the new loop.  */
2605*38fd1498Szrj 	  dest_loop->header = (basic_block)src_loop->header->aux;
2606*38fd1498Szrj 	  dest_loop->header->loop_father = dest_loop;
2607*38fd1498Szrj 	  if (src_loop->latch != NULL)
2608*38fd1498Szrj 	    {
2609*38fd1498Szrj 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2610*38fd1498Szrj 	      dest_loop->latch->loop_father = dest_loop;
2611*38fd1498Szrj 	    }
2612*38fd1498Szrj 
2613*38fd1498Szrj 	  /* Copy loop meta-data.  */
2614*38fd1498Szrj 	  copy_loop_info (src_loop, dest_loop);
2615*38fd1498Szrj 
2616*38fd1498Szrj 	  /* Finally place it into the loop array and the loop tree.  */
2617*38fd1498Szrj 	  place_new_loop (cfun, dest_loop);
2618*38fd1498Szrj 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2619*38fd1498Szrj 
2620*38fd1498Szrj 	  dest_loop->safelen = src_loop->safelen;
2621*38fd1498Szrj 	  if (src_loop->unroll)
2622*38fd1498Szrj 	    {
2623*38fd1498Szrj 	      dest_loop->unroll = src_loop->unroll;
2624*38fd1498Szrj 	      cfun->has_unroll = true;
2625*38fd1498Szrj 	    }
2626*38fd1498Szrj 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2627*38fd1498Szrj 	  if (src_loop->force_vectorize)
2628*38fd1498Szrj 	    {
2629*38fd1498Szrj 	      dest_loop->force_vectorize = true;
2630*38fd1498Szrj 	      cfun->has_force_vectorize_loops = true;
2631*38fd1498Szrj 	    }
2632*38fd1498Szrj 	  if (src_loop->simduid)
2633*38fd1498Szrj 	    {
2634*38fd1498Szrj 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2635*38fd1498Szrj 	      cfun->has_simduid_loops = true;
2636*38fd1498Szrj 	    }
2637*38fd1498Szrj 
2638*38fd1498Szrj 	  /* Recurse.  */
2639*38fd1498Szrj 	  copy_loops (id, dest_loop, src_loop);
2640*38fd1498Szrj 	}
2641*38fd1498Szrj       src_loop = src_loop->next;
2642*38fd1498Szrj     }
2643*38fd1498Szrj }
2644*38fd1498Szrj 
2645*38fd1498Szrj /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2646*38fd1498Szrj 
2647*38fd1498Szrj void
2648*38fd1498Szrj redirect_all_calls (copy_body_data * id, basic_block bb)
2649*38fd1498Szrj {
2650*38fd1498Szrj   gimple_stmt_iterator si;
2651*38fd1498Szrj   gimple *last = last_stmt (bb);
2652*38fd1498Szrj   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2653*38fd1498Szrj     {
2654*38fd1498Szrj       gimple *stmt = gsi_stmt (si);
2655*38fd1498Szrj       if (is_gimple_call (stmt))
2656*38fd1498Szrj 	{
2657*38fd1498Szrj 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2658*38fd1498Szrj 	  if (edge)
2659*38fd1498Szrj 	    {
2660*38fd1498Szrj 	      edge->redirect_call_stmt_to_callee ();
2661*38fd1498Szrj 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2662*38fd1498Szrj 		gimple_purge_dead_eh_edges (bb);
2663*38fd1498Szrj 	    }
2664*38fd1498Szrj 	}
2665*38fd1498Szrj     }
2666*38fd1498Szrj }
2667*38fd1498Szrj 
2668*38fd1498Szrj /* Make a copy of the body of FN so that it can be inserted inline in
2669*38fd1498Szrj    another function.  Walks FN via CFG, returns new fndecl.  */
2670*38fd1498Szrj 
2671*38fd1498Szrj static tree
2672*38fd1498Szrj copy_cfg_body (copy_body_data * id,
2673*38fd1498Szrj 	       basic_block entry_block_map, basic_block exit_block_map,
2674*38fd1498Szrj 	       basic_block new_entry)
2675*38fd1498Szrj {
2676*38fd1498Szrj   tree callee_fndecl = id->src_fn;
2677*38fd1498Szrj   /* Original cfun for the callee, doesn't change.  */
2678*38fd1498Szrj   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2679*38fd1498Szrj   struct function *cfun_to_copy;
2680*38fd1498Szrj   basic_block bb;
2681*38fd1498Szrj   tree new_fndecl = NULL;
2682*38fd1498Szrj   bool need_debug_cleanup = false;
2683*38fd1498Szrj   int last;
2684*38fd1498Szrj   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2685*38fd1498Szrj   profile_count num = entry_block_map->count;
2686*38fd1498Szrj 
2687*38fd1498Szrj   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2688*38fd1498Szrj 
2689*38fd1498Szrj   /* Register specific tree functions.  */
2690*38fd1498Szrj   gimple_register_cfg_hooks ();
2691*38fd1498Szrj 
2692*38fd1498Szrj   /* If we are inlining just region of the function, make sure to connect
2693*38fd1498Szrj      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2694*38fd1498Szrj      part of loop, we must compute frequency and probability of
2695*38fd1498Szrj      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2696*38fd1498Szrj      probabilities of edges incoming from nonduplicated region.  */
2697*38fd1498Szrj   if (new_entry)
2698*38fd1498Szrj     {
2699*38fd1498Szrj       edge e;
2700*38fd1498Szrj       edge_iterator ei;
2701*38fd1498Szrj       den = profile_count::zero ();
2702*38fd1498Szrj 
2703*38fd1498Szrj       FOR_EACH_EDGE (e, ei, new_entry->preds)
2704*38fd1498Szrj 	if (!e->src->aux)
2705*38fd1498Szrj 	  den += e->count ();
2706*38fd1498Szrj       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2707*38fd1498Szrj     }
2708*38fd1498Szrj 
2709*38fd1498Szrj   profile_count::adjust_for_ipa_scaling (&num, &den);
2710*38fd1498Szrj 
2711*38fd1498Szrj   /* Must have a CFG here at this point.  */
2712*38fd1498Szrj   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2713*38fd1498Szrj 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2714*38fd1498Szrj 
2715*38fd1498Szrj 
2716*38fd1498Szrj   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2717*38fd1498Szrj   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2718*38fd1498Szrj   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2719*38fd1498Szrj   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2720*38fd1498Szrj 
2721*38fd1498Szrj   /* Duplicate any exception-handling regions.  */
2722*38fd1498Szrj   if (cfun->eh)
2723*38fd1498Szrj     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2724*38fd1498Szrj 				       remap_decl_1, id);
2725*38fd1498Szrj 
2726*38fd1498Szrj   /* Use aux pointers to map the original blocks to copy.  */
2727*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun_to_copy)
2728*38fd1498Szrj     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2729*38fd1498Szrj       {
2730*38fd1498Szrj 	basic_block new_bb = copy_bb (id, bb, num, den);
2731*38fd1498Szrj 	bb->aux = new_bb;
2732*38fd1498Szrj 	new_bb->aux = bb;
2733*38fd1498Szrj 	new_bb->loop_father = entry_block_map->loop_father;
2734*38fd1498Szrj       }
2735*38fd1498Szrj 
2736*38fd1498Szrj   last = last_basic_block_for_fn (cfun);
2737*38fd1498Szrj 
2738*38fd1498Szrj   /* Now that we've duplicated the blocks, duplicate their edges.  */
2739*38fd1498Szrj   basic_block abnormal_goto_dest = NULL;
2740*38fd1498Szrj   if (id->call_stmt
2741*38fd1498Szrj       && stmt_can_make_abnormal_goto (id->call_stmt))
2742*38fd1498Szrj     {
2743*38fd1498Szrj       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2744*38fd1498Szrj 
2745*38fd1498Szrj       bb = gimple_bb (id->call_stmt);
2746*38fd1498Szrj       gsi_next (&gsi);
2747*38fd1498Szrj       if (gsi_end_p (gsi))
2748*38fd1498Szrj 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2749*38fd1498Szrj     }
2750*38fd1498Szrj   FOR_ALL_BB_FN (bb, cfun_to_copy)
2751*38fd1498Szrj     if (!id->blocks_to_copy
2752*38fd1498Szrj 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2753*38fd1498Szrj       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2754*38fd1498Szrj 					       abnormal_goto_dest);
2755*38fd1498Szrj 
2756*38fd1498Szrj   if (new_entry)
2757*38fd1498Szrj     {
2758*38fd1498Szrj       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2759*38fd1498Szrj 			  EDGE_FALLTHRU);
2760*38fd1498Szrj       e->probability = profile_probability::always ();
2761*38fd1498Szrj     }
2762*38fd1498Szrj 
2763*38fd1498Szrj   /* Duplicate the loop tree, if available and wanted.  */
2764*38fd1498Szrj   if (loops_for_fn (src_cfun) != NULL
2765*38fd1498Szrj       && current_loops != NULL)
2766*38fd1498Szrj     {
2767*38fd1498Szrj       copy_loops (id, entry_block_map->loop_father,
2768*38fd1498Szrj 		  get_loop (src_cfun, 0));
2769*38fd1498Szrj       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2770*38fd1498Szrj       loops_state_set (LOOPS_NEED_FIXUP);
2771*38fd1498Szrj     }
2772*38fd1498Szrj 
2773*38fd1498Szrj   /* If the loop tree in the source function needed fixup, mark the
2774*38fd1498Szrj      destination loop tree for fixup, too.  */
2775*38fd1498Szrj   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2776*38fd1498Szrj     loops_state_set (LOOPS_NEED_FIXUP);
2777*38fd1498Szrj 
2778*38fd1498Szrj   if (gimple_in_ssa_p (cfun))
2779*38fd1498Szrj     FOR_ALL_BB_FN (bb, cfun_to_copy)
2780*38fd1498Szrj       if (!id->blocks_to_copy
2781*38fd1498Szrj 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2782*38fd1498Szrj 	copy_phis_for_bb (bb, id);
2783*38fd1498Szrj 
2784*38fd1498Szrj   FOR_ALL_BB_FN (bb, cfun_to_copy)
2785*38fd1498Szrj     if (bb->aux)
2786*38fd1498Szrj       {
2787*38fd1498Szrj 	if (need_debug_cleanup
2788*38fd1498Szrj 	    && bb->index != ENTRY_BLOCK
2789*38fd1498Szrj 	    && bb->index != EXIT_BLOCK)
2790*38fd1498Szrj 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2791*38fd1498Szrj 	/* Update call edge destinations.  This can not be done before loop
2792*38fd1498Szrj 	   info is updated, because we may split basic blocks.  */
2793*38fd1498Szrj 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2794*38fd1498Szrj 	    && bb->index != ENTRY_BLOCK
2795*38fd1498Szrj 	    && bb->index != EXIT_BLOCK)
2796*38fd1498Szrj 	  redirect_all_calls (id, (basic_block)bb->aux);
2797*38fd1498Szrj 	((basic_block)bb->aux)->aux = NULL;
2798*38fd1498Szrj 	bb->aux = NULL;
2799*38fd1498Szrj       }
2800*38fd1498Szrj 
2801*38fd1498Szrj   /* Zero out AUX fields of newly created block during EH edge
2802*38fd1498Szrj      insertion. */
2803*38fd1498Szrj   for (; last < last_basic_block_for_fn (cfun); last++)
2804*38fd1498Szrj     {
2805*38fd1498Szrj       if (need_debug_cleanup)
2806*38fd1498Szrj 	maybe_move_debug_stmts_to_successors (id,
2807*38fd1498Szrj 					      BASIC_BLOCK_FOR_FN (cfun, last));
2808*38fd1498Szrj       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2809*38fd1498Szrj       /* Update call edge destinations.  This can not be done before loop
2810*38fd1498Szrj 	 info is updated, because we may split basic blocks.  */
2811*38fd1498Szrj       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2812*38fd1498Szrj 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2813*38fd1498Szrj     }
2814*38fd1498Szrj   entry_block_map->aux = NULL;
2815*38fd1498Szrj   exit_block_map->aux = NULL;
2816*38fd1498Szrj 
2817*38fd1498Szrj   if (id->eh_map)
2818*38fd1498Szrj     {
2819*38fd1498Szrj       delete id->eh_map;
2820*38fd1498Szrj       id->eh_map = NULL;
2821*38fd1498Szrj     }
2822*38fd1498Szrj   if (id->dependence_map)
2823*38fd1498Szrj     {
2824*38fd1498Szrj       delete id->dependence_map;
2825*38fd1498Szrj       id->dependence_map = NULL;
2826*38fd1498Szrj     }
2827*38fd1498Szrj 
2828*38fd1498Szrj   return new_fndecl;
2829*38fd1498Szrj }
2830*38fd1498Szrj 
2831*38fd1498Szrj /* Copy the debug STMT using ID.  We deal with these statements in a
2832*38fd1498Szrj    special way: if any variable in their VALUE expression wasn't
2833*38fd1498Szrj    remapped yet, we won't remap it, because that would get decl uids
2834*38fd1498Szrj    out of sync, causing codegen differences between -g and -g0.  If
2835*38fd1498Szrj    this arises, we drop the VALUE expression altogether.  */
2836*38fd1498Szrj 
2837*38fd1498Szrj static void
2838*38fd1498Szrj copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2839*38fd1498Szrj {
2840*38fd1498Szrj   tree t, *n;
2841*38fd1498Szrj   struct walk_stmt_info wi;
2842*38fd1498Szrj 
2843*38fd1498Szrj   if (gimple_block (stmt))
2844*38fd1498Szrj     {
2845*38fd1498Szrj       n = id->decl_map->get (gimple_block (stmt));
2846*38fd1498Szrj       gimple_set_block (stmt, n ? *n : id->block);
2847*38fd1498Szrj     }
2848*38fd1498Szrj 
2849*38fd1498Szrj   if (gimple_debug_nonbind_marker_p (stmt))
2850*38fd1498Szrj     return;
2851*38fd1498Szrj 
2852*38fd1498Szrj   /* Remap all the operands in COPY.  */
2853*38fd1498Szrj   memset (&wi, 0, sizeof (wi));
2854*38fd1498Szrj   wi.info = id;
2855*38fd1498Szrj 
2856*38fd1498Szrj   processing_debug_stmt = 1;
2857*38fd1498Szrj 
2858*38fd1498Szrj   if (gimple_debug_source_bind_p (stmt))
2859*38fd1498Szrj     t = gimple_debug_source_bind_get_var (stmt);
2860*38fd1498Szrj   else if (gimple_debug_bind_p (stmt))
2861*38fd1498Szrj     t = gimple_debug_bind_get_var (stmt);
2862*38fd1498Szrj   else
2863*38fd1498Szrj     gcc_unreachable ();
2864*38fd1498Szrj 
2865*38fd1498Szrj   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2866*38fd1498Szrj       && (n = id->debug_map->get (t)))
2867*38fd1498Szrj     {
2868*38fd1498Szrj       gcc_assert (VAR_P (*n));
2869*38fd1498Szrj       t = *n;
2870*38fd1498Szrj     }
2871*38fd1498Szrj   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2872*38fd1498Szrj     /* T is a non-localized variable.  */;
2873*38fd1498Szrj   else
2874*38fd1498Szrj     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2875*38fd1498Szrj 
2876*38fd1498Szrj   if (gimple_debug_bind_p (stmt))
2877*38fd1498Szrj     {
2878*38fd1498Szrj       gimple_debug_bind_set_var (stmt, t);
2879*38fd1498Szrj 
2880*38fd1498Szrj       if (gimple_debug_bind_has_value_p (stmt))
2881*38fd1498Szrj 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2882*38fd1498Szrj 		   remap_gimple_op_r, &wi, NULL);
2883*38fd1498Szrj 
2884*38fd1498Szrj       /* Punt if any decl couldn't be remapped.  */
2885*38fd1498Szrj       if (processing_debug_stmt < 0)
2886*38fd1498Szrj 	gimple_debug_bind_reset_value (stmt);
2887*38fd1498Szrj     }
2888*38fd1498Szrj   else if (gimple_debug_source_bind_p (stmt))
2889*38fd1498Szrj     {
2890*38fd1498Szrj       gimple_debug_source_bind_set_var (stmt, t);
2891*38fd1498Szrj       /* When inlining and source bind refers to one of the optimized
2892*38fd1498Szrj 	 away parameters, change the source bind into normal debug bind
2893*38fd1498Szrj 	 referring to the corresponding DEBUG_EXPR_DECL that should have
2894*38fd1498Szrj 	 been bound before the call stmt.  */
2895*38fd1498Szrj       t = gimple_debug_source_bind_get_value (stmt);
2896*38fd1498Szrj       if (t != NULL_TREE
2897*38fd1498Szrj 	  && TREE_CODE (t) == PARM_DECL
2898*38fd1498Szrj 	  && id->call_stmt)
2899*38fd1498Szrj 	{
2900*38fd1498Szrj 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2901*38fd1498Szrj 	  unsigned int i;
2902*38fd1498Szrj 	  if (debug_args != NULL)
2903*38fd1498Szrj 	    {
2904*38fd1498Szrj 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2905*38fd1498Szrj 		if ((**debug_args)[i] == DECL_ORIGIN (t)
2906*38fd1498Szrj 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2907*38fd1498Szrj 		  {
2908*38fd1498Szrj 		    t = (**debug_args)[i + 1];
2909*38fd1498Szrj 		    stmt->subcode = GIMPLE_DEBUG_BIND;
2910*38fd1498Szrj 		    gimple_debug_bind_set_value (stmt, t);
2911*38fd1498Szrj 		    break;
2912*38fd1498Szrj 		  }
2913*38fd1498Szrj 	    }
2914*38fd1498Szrj 	}
2915*38fd1498Szrj       if (gimple_debug_source_bind_p (stmt))
2916*38fd1498Szrj 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2917*38fd1498Szrj 		   remap_gimple_op_r, &wi, NULL);
2918*38fd1498Szrj     }
2919*38fd1498Szrj 
2920*38fd1498Szrj   processing_debug_stmt = 0;
2921*38fd1498Szrj 
2922*38fd1498Szrj   update_stmt (stmt);
2923*38fd1498Szrj }
2924*38fd1498Szrj 
2925*38fd1498Szrj /* Process deferred debug stmts.  In order to give values better odds
2926*38fd1498Szrj    of being successfully remapped, we delay the processing of debug
2927*38fd1498Szrj    stmts until all other stmts that might require remapping are
2928*38fd1498Szrj    processed.  */
2929*38fd1498Szrj 
2930*38fd1498Szrj static void
2931*38fd1498Szrj copy_debug_stmts (copy_body_data *id)
2932*38fd1498Szrj {
2933*38fd1498Szrj   size_t i;
2934*38fd1498Szrj   gdebug *stmt;
2935*38fd1498Szrj 
2936*38fd1498Szrj   if (!id->debug_stmts.exists ())
2937*38fd1498Szrj     return;
2938*38fd1498Szrj 
2939*38fd1498Szrj   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2940*38fd1498Szrj     copy_debug_stmt (stmt, id);
2941*38fd1498Szrj 
2942*38fd1498Szrj   id->debug_stmts.release ();
2943*38fd1498Szrj }
2944*38fd1498Szrj 
2945*38fd1498Szrj /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2946*38fd1498Szrj    another function.  */
2947*38fd1498Szrj 
2948*38fd1498Szrj static tree
2949*38fd1498Szrj copy_tree_body (copy_body_data *id)
2950*38fd1498Szrj {
2951*38fd1498Szrj   tree fndecl = id->src_fn;
2952*38fd1498Szrj   tree body = DECL_SAVED_TREE (fndecl);
2953*38fd1498Szrj 
2954*38fd1498Szrj   walk_tree (&body, copy_tree_body_r, id, NULL);
2955*38fd1498Szrj 
2956*38fd1498Szrj   return body;
2957*38fd1498Szrj }
2958*38fd1498Szrj 
2959*38fd1498Szrj /* Make a copy of the body of FN so that it can be inserted inline in
2960*38fd1498Szrj    another function.  */
2961*38fd1498Szrj 
2962*38fd1498Szrj static tree
2963*38fd1498Szrj copy_body (copy_body_data *id,
2964*38fd1498Szrj 	   basic_block entry_block_map, basic_block exit_block_map,
2965*38fd1498Szrj 	   basic_block new_entry)
2966*38fd1498Szrj {
2967*38fd1498Szrj   tree fndecl = id->src_fn;
2968*38fd1498Szrj   tree body;
2969*38fd1498Szrj 
2970*38fd1498Szrj   /* If this body has a CFG, walk CFG and copy.  */
2971*38fd1498Szrj   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2972*38fd1498Szrj   body = copy_cfg_body (id, entry_block_map, exit_block_map,
2973*38fd1498Szrj 			new_entry);
2974*38fd1498Szrj   copy_debug_stmts (id);
2975*38fd1498Szrj 
2976*38fd1498Szrj   return body;
2977*38fd1498Szrj }
2978*38fd1498Szrj 
2979*38fd1498Szrj /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2980*38fd1498Szrj    defined in function FN, or of a data member thereof.  */
2981*38fd1498Szrj 
2982*38fd1498Szrj static bool
2983*38fd1498Szrj self_inlining_addr_expr (tree value, tree fn)
2984*38fd1498Szrj {
2985*38fd1498Szrj   tree var;
2986*38fd1498Szrj 
2987*38fd1498Szrj   if (TREE_CODE (value) != ADDR_EXPR)
2988*38fd1498Szrj     return false;
2989*38fd1498Szrj 
2990*38fd1498Szrj   var = get_base_address (TREE_OPERAND (value, 0));
2991*38fd1498Szrj 
2992*38fd1498Szrj   return var && auto_var_in_fn_p (var, fn);
2993*38fd1498Szrj }
2994*38fd1498Szrj 
2995*38fd1498Szrj /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2996*38fd1498Szrj    lexical block and line number information from base_stmt, if given,
2997*38fd1498Szrj    or from the last stmt of the block otherwise.  */
2998*38fd1498Szrj 
2999*38fd1498Szrj static gimple *
3000*38fd1498Szrj insert_init_debug_bind (copy_body_data *id,
3001*38fd1498Szrj 			basic_block bb, tree var, tree value,
3002*38fd1498Szrj 			gimple *base_stmt)
3003*38fd1498Szrj {
3004*38fd1498Szrj   gimple *note;
3005*38fd1498Szrj   gimple_stmt_iterator gsi;
3006*38fd1498Szrj   tree tracked_var;
3007*38fd1498Szrj 
3008*38fd1498Szrj   if (!gimple_in_ssa_p (id->src_cfun))
3009*38fd1498Szrj     return NULL;
3010*38fd1498Szrj 
3011*38fd1498Szrj   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3012*38fd1498Szrj     return NULL;
3013*38fd1498Szrj 
3014*38fd1498Szrj   tracked_var = target_for_debug_bind (var);
3015*38fd1498Szrj   if (!tracked_var)
3016*38fd1498Szrj     return NULL;
3017*38fd1498Szrj 
3018*38fd1498Szrj   if (bb)
3019*38fd1498Szrj     {
3020*38fd1498Szrj       gsi = gsi_last_bb (bb);
3021*38fd1498Szrj       if (!base_stmt && !gsi_end_p (gsi))
3022*38fd1498Szrj 	base_stmt = gsi_stmt (gsi);
3023*38fd1498Szrj     }
3024*38fd1498Szrj 
3025*38fd1498Szrj   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3026*38fd1498Szrj 
3027*38fd1498Szrj   if (bb)
3028*38fd1498Szrj     {
3029*38fd1498Szrj       if (!gsi_end_p (gsi))
3030*38fd1498Szrj 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3031*38fd1498Szrj       else
3032*38fd1498Szrj 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3033*38fd1498Szrj     }
3034*38fd1498Szrj 
3035*38fd1498Szrj   return note;
3036*38fd1498Szrj }
3037*38fd1498Szrj 
3038*38fd1498Szrj static void
3039*38fd1498Szrj insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3040*38fd1498Szrj {
3041*38fd1498Szrj   /* If VAR represents a zero-sized variable, it's possible that the
3042*38fd1498Szrj      assignment statement may result in no gimple statements.  */
3043*38fd1498Szrj   if (init_stmt)
3044*38fd1498Szrj     {
3045*38fd1498Szrj       gimple_stmt_iterator si = gsi_last_bb (bb);
3046*38fd1498Szrj 
3047*38fd1498Szrj       /* We can end up with init statements that store to a non-register
3048*38fd1498Szrj          from a rhs with a conversion.  Handle that here by forcing the
3049*38fd1498Szrj 	 rhs into a temporary.  gimple_regimplify_operands is not
3050*38fd1498Szrj 	 prepared to do this for us.  */
3051*38fd1498Szrj       if (!is_gimple_debug (init_stmt)
3052*38fd1498Szrj 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3053*38fd1498Szrj 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3054*38fd1498Szrj 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3055*38fd1498Szrj 	{
3056*38fd1498Szrj 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3057*38fd1498Szrj 			     gimple_expr_type (init_stmt),
3058*38fd1498Szrj 			     gimple_assign_rhs1 (init_stmt));
3059*38fd1498Szrj 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3060*38fd1498Szrj 					  GSI_NEW_STMT);
3061*38fd1498Szrj 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3062*38fd1498Szrj 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3063*38fd1498Szrj 	}
3064*38fd1498Szrj       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3065*38fd1498Szrj       gimple_regimplify_operands (init_stmt, &si);
3066*38fd1498Szrj 
3067*38fd1498Szrj       if (!is_gimple_debug (init_stmt))
3068*38fd1498Szrj 	{
3069*38fd1498Szrj 	  tree def = gimple_assign_lhs (init_stmt);
3070*38fd1498Szrj 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3071*38fd1498Szrj 	}
3072*38fd1498Szrj     }
3073*38fd1498Szrj }
3074*38fd1498Szrj 
3075*38fd1498Szrj /* Initialize parameter P with VALUE.  If needed, produce init statement
3076*38fd1498Szrj    at the end of BB.  When BB is NULL, we return init statement to be
3077*38fd1498Szrj    output later.  */
3078*38fd1498Szrj static gimple *
3079*38fd1498Szrj setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3080*38fd1498Szrj 		     basic_block bb, tree *vars)
3081*38fd1498Szrj {
3082*38fd1498Szrj   gimple *init_stmt = NULL;
3083*38fd1498Szrj   tree var;
3084*38fd1498Szrj   tree rhs = value;
3085*38fd1498Szrj   tree def = (gimple_in_ssa_p (cfun)
3086*38fd1498Szrj 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3087*38fd1498Szrj 
3088*38fd1498Szrj   if (value
3089*38fd1498Szrj       && value != error_mark_node
3090*38fd1498Szrj       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3091*38fd1498Szrj     {
3092*38fd1498Szrj       /* If we can match up types by promotion/demotion do so.  */
3093*38fd1498Szrj       if (fold_convertible_p (TREE_TYPE (p), value))
3094*38fd1498Szrj 	rhs = fold_convert (TREE_TYPE (p), value);
3095*38fd1498Szrj       else
3096*38fd1498Szrj 	{
3097*38fd1498Szrj 	  /* ???  For valid programs we should not end up here.
3098*38fd1498Szrj 	     Still if we end up with truly mismatched types here, fall back
3099*38fd1498Szrj 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3100*38fd1498Szrj 	     GIMPLE to the following passes.  */
3101*38fd1498Szrj 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3102*38fd1498Szrj 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3103*38fd1498Szrj 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3104*38fd1498Szrj 	  else
3105*38fd1498Szrj 	    rhs = build_zero_cst (TREE_TYPE (p));
3106*38fd1498Szrj 	}
3107*38fd1498Szrj     }
3108*38fd1498Szrj 
3109*38fd1498Szrj   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3110*38fd1498Szrj      here since the type of this decl must be visible to the calling
3111*38fd1498Szrj      function.  */
3112*38fd1498Szrj   var = copy_decl_to_var (p, id);
3113*38fd1498Szrj 
3114*38fd1498Szrj   /* Declare this new variable.  */
3115*38fd1498Szrj   DECL_CHAIN (var) = *vars;
3116*38fd1498Szrj   *vars = var;
3117*38fd1498Szrj 
3118*38fd1498Szrj   /* Make gimplifier happy about this variable.  */
3119*38fd1498Szrj   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3120*38fd1498Szrj 
3121*38fd1498Szrj   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3122*38fd1498Szrj      we would not need to create a new variable here at all, if it
3123*38fd1498Szrj      weren't for debug info.  Still, we can just use the argument
3124*38fd1498Szrj      value.  */
3125*38fd1498Szrj   if (TREE_READONLY (p)
3126*38fd1498Szrj       && !TREE_ADDRESSABLE (p)
3127*38fd1498Szrj       && value && !TREE_SIDE_EFFECTS (value)
3128*38fd1498Szrj       && !def)
3129*38fd1498Szrj     {
3130*38fd1498Szrj       /* We may produce non-gimple trees by adding NOPs or introduce
3131*38fd1498Szrj 	 invalid sharing when operand is not really constant.
3132*38fd1498Szrj 	 It is not big deal to prohibit constant propagation here as
3133*38fd1498Szrj 	 we will constant propagate in DOM1 pass anyway.  */
3134*38fd1498Szrj       if (is_gimple_min_invariant (value)
3135*38fd1498Szrj 	  && useless_type_conversion_p (TREE_TYPE (p),
3136*38fd1498Szrj 						 TREE_TYPE (value))
3137*38fd1498Szrj 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3138*38fd1498Szrj 	     the base variable isn't a local variable of the inlined
3139*38fd1498Szrj 	     function, e.g., when doing recursive inlining, direct or
3140*38fd1498Szrj 	     mutually-recursive or whatever, which is why we don't
3141*38fd1498Szrj 	     just test whether fn == current_function_decl.  */
3142*38fd1498Szrj 	  && ! self_inlining_addr_expr (value, fn))
3143*38fd1498Szrj 	{
3144*38fd1498Szrj 	  insert_decl_map (id, p, value);
3145*38fd1498Szrj 	  insert_debug_decl_map (id, p, var);
3146*38fd1498Szrj 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3147*38fd1498Szrj 	}
3148*38fd1498Szrj     }
3149*38fd1498Szrj 
3150*38fd1498Szrj   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3151*38fd1498Szrj      that way, when the PARM_DECL is encountered, it will be
3152*38fd1498Szrj      automatically replaced by the VAR_DECL.  */
3153*38fd1498Szrj   insert_decl_map (id, p, var);
3154*38fd1498Szrj 
3155*38fd1498Szrj   /* Even if P was TREE_READONLY, the new VAR should not be.
3156*38fd1498Szrj      In the original code, we would have constructed a
3157*38fd1498Szrj      temporary, and then the function body would have never
3158*38fd1498Szrj      changed the value of P.  However, now, we will be
3159*38fd1498Szrj      constructing VAR directly.  The constructor body may
3160*38fd1498Szrj      change its value multiple times as it is being
3161*38fd1498Szrj      constructed.  Therefore, it must not be TREE_READONLY;
3162*38fd1498Szrj      the back-end assumes that TREE_READONLY variable is
3163*38fd1498Szrj      assigned to only once.  */
3164*38fd1498Szrj   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3165*38fd1498Szrj     TREE_READONLY (var) = 0;
3166*38fd1498Szrj 
3167*38fd1498Szrj   /* If there is no setup required and we are in SSA, take the easy route
3168*38fd1498Szrj      replacing all SSA names representing the function parameter by the
3169*38fd1498Szrj      SSA name passed to function.
3170*38fd1498Szrj 
3171*38fd1498Szrj      We need to construct map for the variable anyway as it might be used
3172*38fd1498Szrj      in different SSA names when parameter is set in function.
3173*38fd1498Szrj 
3174*38fd1498Szrj      Do replacement at -O0 for const arguments replaced by constant.
3175*38fd1498Szrj      This is important for builtin_constant_p and other construct requiring
3176*38fd1498Szrj      constant argument to be visible in inlined function body.  */
3177*38fd1498Szrj   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3178*38fd1498Szrj       && (optimize
3179*38fd1498Szrj           || (TREE_READONLY (p)
3180*38fd1498Szrj 	      && is_gimple_min_invariant (rhs)))
3181*38fd1498Szrj       && (TREE_CODE (rhs) == SSA_NAME
3182*38fd1498Szrj 	  || is_gimple_min_invariant (rhs))
3183*38fd1498Szrj       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3184*38fd1498Szrj     {
3185*38fd1498Szrj       insert_decl_map (id, def, rhs);
3186*38fd1498Szrj       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3187*38fd1498Szrj     }
3188*38fd1498Szrj 
3189*38fd1498Szrj   /* If the value of argument is never used, don't care about initializing
3190*38fd1498Szrj      it.  */
3191*38fd1498Szrj   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3192*38fd1498Szrj     {
3193*38fd1498Szrj       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3194*38fd1498Szrj       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3195*38fd1498Szrj     }
3196*38fd1498Szrj 
3197*38fd1498Szrj   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3198*38fd1498Szrj      the argument to the proper type in case it was promoted.  */
3199*38fd1498Szrj   if (value)
3200*38fd1498Szrj     {
3201*38fd1498Szrj       if (rhs == error_mark_node)
3202*38fd1498Szrj 	{
3203*38fd1498Szrj 	  insert_decl_map (id, p, var);
3204*38fd1498Szrj 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3205*38fd1498Szrj 	}
3206*38fd1498Szrj 
3207*38fd1498Szrj       STRIP_USELESS_TYPE_CONVERSION (rhs);
3208*38fd1498Szrj 
3209*38fd1498Szrj       /* If we are in SSA form properly remap the default definition
3210*38fd1498Szrj          or assign to a dummy SSA name if the parameter is unused and
3211*38fd1498Szrj 	 we are not optimizing.  */
3212*38fd1498Szrj       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3213*38fd1498Szrj 	{
3214*38fd1498Szrj 	  if (def)
3215*38fd1498Szrj 	    {
3216*38fd1498Szrj 	      def = remap_ssa_name (def, id);
3217*38fd1498Szrj 	      init_stmt = gimple_build_assign (def, rhs);
3218*38fd1498Szrj 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3219*38fd1498Szrj 	      set_ssa_default_def (cfun, var, NULL);
3220*38fd1498Szrj 	    }
3221*38fd1498Szrj 	  else if (!optimize)
3222*38fd1498Szrj 	    {
3223*38fd1498Szrj 	      def = make_ssa_name (var);
3224*38fd1498Szrj 	      init_stmt = gimple_build_assign (def, rhs);
3225*38fd1498Szrj 	    }
3226*38fd1498Szrj 	}
3227*38fd1498Szrj       else
3228*38fd1498Szrj         init_stmt = gimple_build_assign (var, rhs);
3229*38fd1498Szrj 
3230*38fd1498Szrj       if (bb && init_stmt)
3231*38fd1498Szrj         insert_init_stmt (id, bb, init_stmt);
3232*38fd1498Szrj     }
3233*38fd1498Szrj   return init_stmt;
3234*38fd1498Szrj }
3235*38fd1498Szrj 
3236*38fd1498Szrj /* Generate code to initialize the parameters of the function at the
3237*38fd1498Szrj    top of the stack in ID from the GIMPLE_CALL STMT.  */
3238*38fd1498Szrj 
3239*38fd1498Szrj static void
3240*38fd1498Szrj initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3241*38fd1498Szrj 			       tree fn, basic_block bb)
3242*38fd1498Szrj {
3243*38fd1498Szrj   tree parms;
3244*38fd1498Szrj   size_t i;
3245*38fd1498Szrj   tree p;
3246*38fd1498Szrj   tree vars = NULL_TREE;
3247*38fd1498Szrj   tree static_chain = gimple_call_chain (stmt);
3248*38fd1498Szrj 
3249*38fd1498Szrj   /* Figure out what the parameters are.  */
3250*38fd1498Szrj   parms = DECL_ARGUMENTS (fn);
3251*38fd1498Szrj 
3252*38fd1498Szrj   /* Loop through the parameter declarations, replacing each with an
3253*38fd1498Szrj      equivalent VAR_DECL, appropriately initialized.  */
3254*38fd1498Szrj   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3255*38fd1498Szrj     {
3256*38fd1498Szrj       tree val;
3257*38fd1498Szrj       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3258*38fd1498Szrj       setup_one_parameter (id, p, val, fn, bb, &vars);
3259*38fd1498Szrj     }
3260*38fd1498Szrj   /* After remapping parameters remap their types.  This has to be done
3261*38fd1498Szrj      in a second loop over all parameters to appropriately remap
3262*38fd1498Szrj      variable sized arrays when the size is specified in a
3263*38fd1498Szrj      parameter following the array.  */
3264*38fd1498Szrj   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3265*38fd1498Szrj     {
3266*38fd1498Szrj       tree *varp = id->decl_map->get (p);
3267*38fd1498Szrj       if (varp && VAR_P (*varp))
3268*38fd1498Szrj 	{
3269*38fd1498Szrj 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3270*38fd1498Szrj 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3271*38fd1498Szrj 	  tree var = *varp;
3272*38fd1498Szrj 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3273*38fd1498Szrj 	  /* Also remap the default definition if it was remapped
3274*38fd1498Szrj 	     to the default definition of the parameter replacement
3275*38fd1498Szrj 	     by the parameter setup.  */
3276*38fd1498Szrj 	  if (def)
3277*38fd1498Szrj 	    {
3278*38fd1498Szrj 	      tree *defp = id->decl_map->get (def);
3279*38fd1498Szrj 	      if (defp
3280*38fd1498Szrj 		  && TREE_CODE (*defp) == SSA_NAME
3281*38fd1498Szrj 		  && SSA_NAME_VAR (*defp) == var)
3282*38fd1498Szrj 		TREE_TYPE (*defp) = TREE_TYPE (var);
3283*38fd1498Szrj 	    }
3284*38fd1498Szrj 	}
3285*38fd1498Szrj     }
3286*38fd1498Szrj 
3287*38fd1498Szrj   /* Initialize the static chain.  */
3288*38fd1498Szrj   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3289*38fd1498Szrj   gcc_assert (fn != current_function_decl);
3290*38fd1498Szrj   if (p)
3291*38fd1498Szrj     {
3292*38fd1498Szrj       /* No static chain?  Seems like a bug in tree-nested.c.  */
3293*38fd1498Szrj       gcc_assert (static_chain);
3294*38fd1498Szrj 
3295*38fd1498Szrj       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3296*38fd1498Szrj     }
3297*38fd1498Szrj 
3298*38fd1498Szrj   declare_inline_vars (id->block, vars);
3299*38fd1498Szrj }
3300*38fd1498Szrj 
3301*38fd1498Szrj 
3302*38fd1498Szrj /* Declare a return variable to replace the RESULT_DECL for the
3303*38fd1498Szrj    function we are calling.  An appropriate DECL_STMT is returned.
3304*38fd1498Szrj    The USE_STMT is filled to contain a use of the declaration to
3305*38fd1498Szrj    indicate the return value of the function.
3306*38fd1498Szrj 
3307*38fd1498Szrj    RETURN_SLOT, if non-null is place where to store the result.  It
3308*38fd1498Szrj    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3309*38fd1498Szrj    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3310*38fd1498Szrj 
3311*38fd1498Szrj    RETURN_BOUNDS holds a destination for returned bounds.
3312*38fd1498Szrj 
3313*38fd1498Szrj    The return value is a (possibly null) value that holds the result
3314*38fd1498Szrj    as seen by the caller.  */
3315*38fd1498Szrj 
3316*38fd1498Szrj static tree
3317*38fd1498Szrj declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3318*38fd1498Szrj 			 tree return_bounds, basic_block entry_bb)
3319*38fd1498Szrj {
3320*38fd1498Szrj   tree callee = id->src_fn;
3321*38fd1498Szrj   tree result = DECL_RESULT (callee);
3322*38fd1498Szrj   tree callee_type = TREE_TYPE (result);
3323*38fd1498Szrj   tree caller_type;
3324*38fd1498Szrj   tree var, use;
3325*38fd1498Szrj 
3326*38fd1498Szrj   /* Handle type-mismatches in the function declaration return type
3327*38fd1498Szrj      vs. the call expression.  */
3328*38fd1498Szrj   if (modify_dest)
3329*38fd1498Szrj     caller_type = TREE_TYPE (modify_dest);
3330*38fd1498Szrj   else
3331*38fd1498Szrj     caller_type = TREE_TYPE (TREE_TYPE (callee));
3332*38fd1498Szrj 
3333*38fd1498Szrj   /* We don't need to do anything for functions that don't return anything.  */
3334*38fd1498Szrj   if (VOID_TYPE_P (callee_type))
3335*38fd1498Szrj     return NULL_TREE;
3336*38fd1498Szrj 
3337*38fd1498Szrj   /* If there was a return slot, then the return value is the
3338*38fd1498Szrj      dereferenced address of that object.  */
3339*38fd1498Szrj   if (return_slot)
3340*38fd1498Szrj     {
3341*38fd1498Szrj       /* The front end shouldn't have used both return_slot and
3342*38fd1498Szrj 	 a modify expression.  */
3343*38fd1498Szrj       gcc_assert (!modify_dest);
3344*38fd1498Szrj       if (DECL_BY_REFERENCE (result))
3345*38fd1498Szrj 	{
3346*38fd1498Szrj 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3347*38fd1498Szrj 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3348*38fd1498Szrj 
3349*38fd1498Szrj 	  /* We are going to construct *&return_slot and we can't do that
3350*38fd1498Szrj 	     for variables believed to be not addressable.
3351*38fd1498Szrj 
3352*38fd1498Szrj 	     FIXME: This check possibly can match, because values returned
3353*38fd1498Szrj 	     via return slot optimization are not believed to have address
3354*38fd1498Szrj 	     taken by alias analysis.  */
3355*38fd1498Szrj 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3356*38fd1498Szrj 	  var = return_slot_addr;
3357*38fd1498Szrj 	}
3358*38fd1498Szrj       else
3359*38fd1498Szrj 	{
3360*38fd1498Szrj 	  var = return_slot;
3361*38fd1498Szrj 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3362*38fd1498Szrj 	  if (TREE_ADDRESSABLE (result))
3363*38fd1498Szrj 	    mark_addressable (var);
3364*38fd1498Szrj 	}
3365*38fd1498Szrj       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3366*38fd1498Szrj            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3367*38fd1498Szrj 	  && !DECL_GIMPLE_REG_P (result)
3368*38fd1498Szrj 	  && DECL_P (var))
3369*38fd1498Szrj 	DECL_GIMPLE_REG_P (var) = 0;
3370*38fd1498Szrj       use = NULL;
3371*38fd1498Szrj       goto done;
3372*38fd1498Szrj     }
3373*38fd1498Szrj 
3374*38fd1498Szrj   /* All types requiring non-trivial constructors should have been handled.  */
3375*38fd1498Szrj   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3376*38fd1498Szrj 
3377*38fd1498Szrj   /* Attempt to avoid creating a new temporary variable.  */
3378*38fd1498Szrj   if (modify_dest
3379*38fd1498Szrj       && TREE_CODE (modify_dest) != SSA_NAME)
3380*38fd1498Szrj     {
3381*38fd1498Szrj       bool use_it = false;
3382*38fd1498Szrj 
3383*38fd1498Szrj       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3384*38fd1498Szrj       if (!useless_type_conversion_p (callee_type, caller_type))
3385*38fd1498Szrj 	use_it = false;
3386*38fd1498Szrj 
3387*38fd1498Szrj       /* ??? If we're assigning to a variable sized type, then we must
3388*38fd1498Szrj 	 reuse the destination variable, because we've no good way to
3389*38fd1498Szrj 	 create variable sized temporaries at this point.  */
3390*38fd1498Szrj       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3391*38fd1498Szrj 	use_it = true;
3392*38fd1498Szrj 
3393*38fd1498Szrj       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3394*38fd1498Szrj 	 reuse it as the result of the call directly.  Don't do this if
3395*38fd1498Szrj 	 it would promote MODIFY_DEST to addressable.  */
3396*38fd1498Szrj       else if (TREE_ADDRESSABLE (result))
3397*38fd1498Szrj 	use_it = false;
3398*38fd1498Szrj       else
3399*38fd1498Szrj 	{
3400*38fd1498Szrj 	  tree base_m = get_base_address (modify_dest);
3401*38fd1498Szrj 
3402*38fd1498Szrj 	  /* If the base isn't a decl, then it's a pointer, and we don't
3403*38fd1498Szrj 	     know where that's going to go.  */
3404*38fd1498Szrj 	  if (!DECL_P (base_m))
3405*38fd1498Szrj 	    use_it = false;
3406*38fd1498Szrj 	  else if (is_global_var (base_m))
3407*38fd1498Szrj 	    use_it = false;
3408*38fd1498Szrj 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3409*38fd1498Szrj 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3410*38fd1498Szrj 		   && !DECL_GIMPLE_REG_P (result)
3411*38fd1498Szrj 		   && DECL_GIMPLE_REG_P (base_m))
3412*38fd1498Szrj 	    use_it = false;
3413*38fd1498Szrj 	  else if (!TREE_ADDRESSABLE (base_m))
3414*38fd1498Szrj 	    use_it = true;
3415*38fd1498Szrj 	}
3416*38fd1498Szrj 
3417*38fd1498Szrj       if (use_it)
3418*38fd1498Szrj 	{
3419*38fd1498Szrj 	  var = modify_dest;
3420*38fd1498Szrj 	  use = NULL;
3421*38fd1498Szrj 	  goto done;
3422*38fd1498Szrj 	}
3423*38fd1498Szrj     }
3424*38fd1498Szrj 
3425*38fd1498Szrj   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3426*38fd1498Szrj 
3427*38fd1498Szrj   var = copy_result_decl_to_var (result, id);
3428*38fd1498Szrj   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3429*38fd1498Szrj 
3430*38fd1498Szrj   /* Do not have the rest of GCC warn about this variable as it should
3431*38fd1498Szrj      not be visible to the user.  */
3432*38fd1498Szrj   TREE_NO_WARNING (var) = 1;
3433*38fd1498Szrj 
3434*38fd1498Szrj   declare_inline_vars (id->block, var);
3435*38fd1498Szrj 
3436*38fd1498Szrj   /* Build the use expr.  If the return type of the function was
3437*38fd1498Szrj      promoted, convert it back to the expected type.  */
3438*38fd1498Szrj   use = var;
3439*38fd1498Szrj   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3440*38fd1498Szrj     {
3441*38fd1498Szrj       /* If we can match up types by promotion/demotion do so.  */
3442*38fd1498Szrj       if (fold_convertible_p (caller_type, var))
3443*38fd1498Szrj 	use = fold_convert (caller_type, var);
3444*38fd1498Szrj       else
3445*38fd1498Szrj 	{
3446*38fd1498Szrj 	  /* ???  For valid programs we should not end up here.
3447*38fd1498Szrj 	     Still if we end up with truly mismatched types here, fall back
3448*38fd1498Szrj 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3449*38fd1498Szrj 	     passes.  */
3450*38fd1498Szrj 	  /* Prevent var from being written into SSA form.  */
3451*38fd1498Szrj 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3452*38fd1498Szrj 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3453*38fd1498Szrj 	    DECL_GIMPLE_REG_P (var) = false;
3454*38fd1498Szrj 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3455*38fd1498Szrj 	    TREE_ADDRESSABLE (var) = true;
3456*38fd1498Szrj 	  use = fold_build2 (MEM_REF, caller_type,
3457*38fd1498Szrj 			     build_fold_addr_expr (var),
3458*38fd1498Szrj 			     build_int_cst (ptr_type_node, 0));
3459*38fd1498Szrj 	}
3460*38fd1498Szrj     }
3461*38fd1498Szrj 
3462*38fd1498Szrj   STRIP_USELESS_TYPE_CONVERSION (use);
3463*38fd1498Szrj 
3464*38fd1498Szrj   if (DECL_BY_REFERENCE (result))
3465*38fd1498Szrj     {
3466*38fd1498Szrj       TREE_ADDRESSABLE (var) = 1;
3467*38fd1498Szrj       var = build_fold_addr_expr (var);
3468*38fd1498Szrj     }
3469*38fd1498Szrj 
3470*38fd1498Szrj  done:
3471*38fd1498Szrj   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3472*38fd1498Szrj      way, when the RESULT_DECL is encountered, it will be
3473*38fd1498Szrj      automatically replaced by the VAR_DECL.
3474*38fd1498Szrj 
3475*38fd1498Szrj      When returning by reference, ensure that RESULT_DECL remaps to
3476*38fd1498Szrj      gimple_val.  */
3477*38fd1498Szrj   if (DECL_BY_REFERENCE (result)
3478*38fd1498Szrj       && !is_gimple_val (var))
3479*38fd1498Szrj     {
3480*38fd1498Szrj       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3481*38fd1498Szrj       insert_decl_map (id, result, temp);
3482*38fd1498Szrj       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3483*38fd1498Szrj 	 it's default_def SSA_NAME.  */
3484*38fd1498Szrj       if (gimple_in_ssa_p (id->src_cfun)
3485*38fd1498Szrj 	  && is_gimple_reg (result))
3486*38fd1498Szrj 	{
3487*38fd1498Szrj 	  temp = make_ssa_name (temp);
3488*38fd1498Szrj 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3489*38fd1498Szrj 	}
3490*38fd1498Szrj       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3491*38fd1498Szrj     }
3492*38fd1498Szrj   else
3493*38fd1498Szrj     insert_decl_map (id, result, var);
3494*38fd1498Szrj 
3495*38fd1498Szrj   /* Remember this so we can ignore it in remap_decls.  */
3496*38fd1498Szrj   id->retvar = var;
3497*38fd1498Szrj 
3498*38fd1498Szrj   /* If returned bounds are used, then make var for them.  */
3499*38fd1498Szrj   if (return_bounds)
3500*38fd1498Szrj   {
3501*38fd1498Szrj     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3502*38fd1498Szrj     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3503*38fd1498Szrj     TREE_NO_WARNING (bndtemp) = 1;
3504*38fd1498Szrj     declare_inline_vars (id->block, bndtemp);
3505*38fd1498Szrj 
3506*38fd1498Szrj     id->retbnd = bndtemp;
3507*38fd1498Szrj     insert_init_stmt (id, entry_bb,
3508*38fd1498Szrj 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3509*38fd1498Szrj   }
3510*38fd1498Szrj 
3511*38fd1498Szrj   return use;
3512*38fd1498Szrj }
3513*38fd1498Szrj 
3514*38fd1498Szrj /* Determine if the function can be copied.  If so return NULL.  If
3515*38fd1498Szrj    not return a string describng the reason for failure.  */
3516*38fd1498Szrj 
3517*38fd1498Szrj const char *
3518*38fd1498Szrj copy_forbidden (struct function *fun)
3519*38fd1498Szrj {
3520*38fd1498Szrj   const char *reason = fun->cannot_be_copied_reason;
3521*38fd1498Szrj 
3522*38fd1498Szrj   /* Only examine the function once.  */
3523*38fd1498Szrj   if (fun->cannot_be_copied_set)
3524*38fd1498Szrj     return reason;
3525*38fd1498Szrj 
3526*38fd1498Szrj   /* We cannot copy a function that receives a non-local goto
3527*38fd1498Szrj      because we cannot remap the destination label used in the
3528*38fd1498Szrj      function that is performing the non-local goto.  */
3529*38fd1498Szrj   /* ??? Actually, this should be possible, if we work at it.
3530*38fd1498Szrj      No doubt there's just a handful of places that simply
3531*38fd1498Szrj      assume it doesn't happen and don't substitute properly.  */
3532*38fd1498Szrj   if (fun->has_nonlocal_label)
3533*38fd1498Szrj     {
3534*38fd1498Szrj       reason = G_("function %q+F can never be copied "
3535*38fd1498Szrj 		  "because it receives a non-local goto");
3536*38fd1498Szrj       goto fail;
3537*38fd1498Szrj     }
3538*38fd1498Szrj 
3539*38fd1498Szrj   if (fun->has_forced_label_in_static)
3540*38fd1498Szrj     {
3541*38fd1498Szrj       reason = G_("function %q+F can never be copied because it saves "
3542*38fd1498Szrj 		  "address of local label in a static variable");
3543*38fd1498Szrj       goto fail;
3544*38fd1498Szrj     }
3545*38fd1498Szrj 
3546*38fd1498Szrj  fail:
3547*38fd1498Szrj   fun->cannot_be_copied_reason = reason;
3548*38fd1498Szrj   fun->cannot_be_copied_set = true;
3549*38fd1498Szrj   return reason;
3550*38fd1498Szrj }
3551*38fd1498Szrj 
3552*38fd1498Szrj 
3553*38fd1498Szrj static const char *inline_forbidden_reason;
3554*38fd1498Szrj 
3555*38fd1498Szrj /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3556*38fd1498Szrj    iff a function can not be inlined.  Also sets the reason why. */
3557*38fd1498Szrj 
3558*38fd1498Szrj static tree
3559*38fd1498Szrj inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3560*38fd1498Szrj 			 struct walk_stmt_info *wip)
3561*38fd1498Szrj {
3562*38fd1498Szrj   tree fn = (tree) wip->info;
3563*38fd1498Szrj   tree t;
3564*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
3565*38fd1498Szrj 
3566*38fd1498Szrj   switch (gimple_code (stmt))
3567*38fd1498Szrj     {
3568*38fd1498Szrj     case GIMPLE_CALL:
3569*38fd1498Szrj       /* Refuse to inline alloca call unless user explicitly forced so as
3570*38fd1498Szrj 	 this may change program's memory overhead drastically when the
3571*38fd1498Szrj 	 function using alloca is called in loop.  In GCC present in
3572*38fd1498Szrj 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3573*38fd1498Szrj 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3574*38fd1498Szrj 	 VLA objects as those can't cause unbounded growth (they're always
3575*38fd1498Szrj 	 wrapped inside stack_save/stack_restore regions.  */
3576*38fd1498Szrj       if (gimple_maybe_alloca_call_p (stmt)
3577*38fd1498Szrj 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3578*38fd1498Szrj 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3579*38fd1498Szrj 	{
3580*38fd1498Szrj 	  inline_forbidden_reason
3581*38fd1498Szrj 	    = G_("function %q+F can never be inlined because it uses "
3582*38fd1498Szrj 		 "alloca (override using the always_inline attribute)");
3583*38fd1498Szrj 	  *handled_ops_p = true;
3584*38fd1498Szrj 	  return fn;
3585*38fd1498Szrj 	}
3586*38fd1498Szrj 
3587*38fd1498Szrj       t = gimple_call_fndecl (stmt);
3588*38fd1498Szrj       if (t == NULL_TREE)
3589*38fd1498Szrj 	break;
3590*38fd1498Szrj 
3591*38fd1498Szrj       /* We cannot inline functions that call setjmp.  */
3592*38fd1498Szrj       if (setjmp_call_p (t))
3593*38fd1498Szrj 	{
3594*38fd1498Szrj 	  inline_forbidden_reason
3595*38fd1498Szrj 	    = G_("function %q+F can never be inlined because it uses setjmp");
3596*38fd1498Szrj 	  *handled_ops_p = true;
3597*38fd1498Szrj 	  return t;
3598*38fd1498Szrj 	}
3599*38fd1498Szrj 
3600*38fd1498Szrj       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3601*38fd1498Szrj 	switch (DECL_FUNCTION_CODE (t))
3602*38fd1498Szrj 	  {
3603*38fd1498Szrj 	    /* We cannot inline functions that take a variable number of
3604*38fd1498Szrj 	       arguments.  */
3605*38fd1498Szrj 	  case BUILT_IN_VA_START:
3606*38fd1498Szrj 	  case BUILT_IN_NEXT_ARG:
3607*38fd1498Szrj 	  case BUILT_IN_VA_END:
3608*38fd1498Szrj 	    inline_forbidden_reason
3609*38fd1498Szrj 	      = G_("function %q+F can never be inlined because it "
3610*38fd1498Szrj 		   "uses variable argument lists");
3611*38fd1498Szrj 	    *handled_ops_p = true;
3612*38fd1498Szrj 	    return t;
3613*38fd1498Szrj 
3614*38fd1498Szrj 	  case BUILT_IN_LONGJMP:
3615*38fd1498Szrj 	    /* We can't inline functions that call __builtin_longjmp at
3616*38fd1498Szrj 	       all.  The non-local goto machinery really requires the
3617*38fd1498Szrj 	       destination be in a different function.  If we allow the
3618*38fd1498Szrj 	       function calling __builtin_longjmp to be inlined into the
3619*38fd1498Szrj 	       function calling __builtin_setjmp, Things will Go Awry.  */
3620*38fd1498Szrj 	    inline_forbidden_reason
3621*38fd1498Szrj 	      = G_("function %q+F can never be inlined because "
3622*38fd1498Szrj 		   "it uses setjmp-longjmp exception handling");
3623*38fd1498Szrj 	    *handled_ops_p = true;
3624*38fd1498Szrj 	    return t;
3625*38fd1498Szrj 
3626*38fd1498Szrj 	  case BUILT_IN_NONLOCAL_GOTO:
3627*38fd1498Szrj 	    /* Similarly.  */
3628*38fd1498Szrj 	    inline_forbidden_reason
3629*38fd1498Szrj 	      = G_("function %q+F can never be inlined because "
3630*38fd1498Szrj 		   "it uses non-local goto");
3631*38fd1498Szrj 	    *handled_ops_p = true;
3632*38fd1498Szrj 	    return t;
3633*38fd1498Szrj 
3634*38fd1498Szrj 	  case BUILT_IN_RETURN:
3635*38fd1498Szrj 	  case BUILT_IN_APPLY_ARGS:
3636*38fd1498Szrj 	    /* If a __builtin_apply_args caller would be inlined,
3637*38fd1498Szrj 	       it would be saving arguments of the function it has
3638*38fd1498Szrj 	       been inlined into.  Similarly __builtin_return would
3639*38fd1498Szrj 	       return from the function the inline has been inlined into.  */
3640*38fd1498Szrj 	    inline_forbidden_reason
3641*38fd1498Szrj 	      = G_("function %q+F can never be inlined because "
3642*38fd1498Szrj 		   "it uses __builtin_return or __builtin_apply_args");
3643*38fd1498Szrj 	    *handled_ops_p = true;
3644*38fd1498Szrj 	    return t;
3645*38fd1498Szrj 
3646*38fd1498Szrj 	  default:
3647*38fd1498Szrj 	    break;
3648*38fd1498Szrj 	  }
3649*38fd1498Szrj       break;
3650*38fd1498Szrj 
3651*38fd1498Szrj     case GIMPLE_GOTO:
3652*38fd1498Szrj       t = gimple_goto_dest (stmt);
3653*38fd1498Szrj 
3654*38fd1498Szrj       /* We will not inline a function which uses computed goto.  The
3655*38fd1498Szrj 	 addresses of its local labels, which may be tucked into
3656*38fd1498Szrj 	 global storage, are of course not constant across
3657*38fd1498Szrj 	 instantiations, which causes unexpected behavior.  */
3658*38fd1498Szrj       if (TREE_CODE (t) != LABEL_DECL)
3659*38fd1498Szrj 	{
3660*38fd1498Szrj 	  inline_forbidden_reason
3661*38fd1498Szrj 	    = G_("function %q+F can never be inlined "
3662*38fd1498Szrj 		 "because it contains a computed goto");
3663*38fd1498Szrj 	  *handled_ops_p = true;
3664*38fd1498Szrj 	  return t;
3665*38fd1498Szrj 	}
3666*38fd1498Szrj       break;
3667*38fd1498Szrj 
3668*38fd1498Szrj     default:
3669*38fd1498Szrj       break;
3670*38fd1498Szrj     }
3671*38fd1498Szrj 
3672*38fd1498Szrj   *handled_ops_p = false;
3673*38fd1498Szrj   return NULL_TREE;
3674*38fd1498Szrj }
3675*38fd1498Szrj 
3676*38fd1498Szrj /* Return true if FNDECL is a function that cannot be inlined into
3677*38fd1498Szrj    another one.  */
3678*38fd1498Szrj 
3679*38fd1498Szrj static bool
3680*38fd1498Szrj inline_forbidden_p (tree fndecl)
3681*38fd1498Szrj {
3682*38fd1498Szrj   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3683*38fd1498Szrj   struct walk_stmt_info wi;
3684*38fd1498Szrj   basic_block bb;
3685*38fd1498Szrj   bool forbidden_p = false;
3686*38fd1498Szrj 
3687*38fd1498Szrj   /* First check for shared reasons not to copy the code.  */
3688*38fd1498Szrj   inline_forbidden_reason = copy_forbidden (fun);
3689*38fd1498Szrj   if (inline_forbidden_reason != NULL)
3690*38fd1498Szrj     return true;
3691*38fd1498Szrj 
3692*38fd1498Szrj   /* Next, walk the statements of the function looking for
3693*38fd1498Szrj      constraucts we can't handle, or are non-optimal for inlining.  */
3694*38fd1498Szrj   hash_set<tree> visited_nodes;
3695*38fd1498Szrj   memset (&wi, 0, sizeof (wi));
3696*38fd1498Szrj   wi.info = (void *) fndecl;
3697*38fd1498Szrj   wi.pset = &visited_nodes;
3698*38fd1498Szrj 
3699*38fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
3700*38fd1498Szrj     {
3701*38fd1498Szrj       gimple *ret;
3702*38fd1498Szrj       gimple_seq seq = bb_seq (bb);
3703*38fd1498Szrj       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3704*38fd1498Szrj       forbidden_p = (ret != NULL);
3705*38fd1498Szrj       if (forbidden_p)
3706*38fd1498Szrj 	break;
3707*38fd1498Szrj     }
3708*38fd1498Szrj 
3709*38fd1498Szrj   return forbidden_p;
3710*38fd1498Szrj }
3711*38fd1498Szrj 
3712*38fd1498Szrj /* Return false if the function FNDECL cannot be inlined on account of its
3713*38fd1498Szrj    attributes, true otherwise.  */
3714*38fd1498Szrj static bool
3715*38fd1498Szrj function_attribute_inlinable_p (const_tree fndecl)
3716*38fd1498Szrj {
3717*38fd1498Szrj   if (targetm.attribute_table)
3718*38fd1498Szrj     {
3719*38fd1498Szrj       const_tree a;
3720*38fd1498Szrj 
3721*38fd1498Szrj       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3722*38fd1498Szrj 	{
3723*38fd1498Szrj 	  const_tree name = TREE_PURPOSE (a);
3724*38fd1498Szrj 	  int i;
3725*38fd1498Szrj 
3726*38fd1498Szrj 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3727*38fd1498Szrj 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3728*38fd1498Szrj 	      return targetm.function_attribute_inlinable_p (fndecl);
3729*38fd1498Szrj 	}
3730*38fd1498Szrj     }
3731*38fd1498Szrj 
3732*38fd1498Szrj   return true;
3733*38fd1498Szrj }
3734*38fd1498Szrj 
3735*38fd1498Szrj /* Returns nonzero if FN is a function that does not have any
3736*38fd1498Szrj    fundamental inline blocking properties.  */
3737*38fd1498Szrj 
3738*38fd1498Szrj bool
3739*38fd1498Szrj tree_inlinable_function_p (tree fn)
3740*38fd1498Szrj {
3741*38fd1498Szrj   bool inlinable = true;
3742*38fd1498Szrj   bool do_warning;
3743*38fd1498Szrj   tree always_inline;
3744*38fd1498Szrj 
3745*38fd1498Szrj   /* If we've already decided this function shouldn't be inlined,
3746*38fd1498Szrj      there's no need to check again.  */
3747*38fd1498Szrj   if (DECL_UNINLINABLE (fn))
3748*38fd1498Szrj     return false;
3749*38fd1498Szrj 
3750*38fd1498Szrj   /* We only warn for functions declared `inline' by the user.  */
3751*38fd1498Szrj   do_warning = (warn_inline
3752*38fd1498Szrj 		&& DECL_DECLARED_INLINE_P (fn)
3753*38fd1498Szrj 		&& !DECL_NO_INLINE_WARNING_P (fn)
3754*38fd1498Szrj 		&& !DECL_IN_SYSTEM_HEADER (fn));
3755*38fd1498Szrj 
3756*38fd1498Szrj   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3757*38fd1498Szrj 
3758*38fd1498Szrj   if (flag_no_inline
3759*38fd1498Szrj       && always_inline == NULL)
3760*38fd1498Szrj     {
3761*38fd1498Szrj       if (do_warning)
3762*38fd1498Szrj         warning (OPT_Winline, "function %q+F can never be inlined because it "
3763*38fd1498Szrj                  "is suppressed using -fno-inline", fn);
3764*38fd1498Szrj       inlinable = false;
3765*38fd1498Szrj     }
3766*38fd1498Szrj 
3767*38fd1498Szrj   else if (!function_attribute_inlinable_p (fn))
3768*38fd1498Szrj     {
3769*38fd1498Szrj       if (do_warning)
3770*38fd1498Szrj         warning (OPT_Winline, "function %q+F can never be inlined because it "
3771*38fd1498Szrj                  "uses attributes conflicting with inlining", fn);
3772*38fd1498Szrj       inlinable = false;
3773*38fd1498Szrj     }
3774*38fd1498Szrj 
3775*38fd1498Szrj   else if (inline_forbidden_p (fn))
3776*38fd1498Szrj     {
3777*38fd1498Szrj       /* See if we should warn about uninlinable functions.  Previously,
3778*38fd1498Szrj 	 some of these warnings would be issued while trying to expand
3779*38fd1498Szrj 	 the function inline, but that would cause multiple warnings
3780*38fd1498Szrj 	 about functions that would for example call alloca.  But since
3781*38fd1498Szrj 	 this a property of the function, just one warning is enough.
3782*38fd1498Szrj 	 As a bonus we can now give more details about the reason why a
3783*38fd1498Szrj 	 function is not inlinable.  */
3784*38fd1498Szrj       if (always_inline)
3785*38fd1498Szrj 	error (inline_forbidden_reason, fn);
3786*38fd1498Szrj       else if (do_warning)
3787*38fd1498Szrj 	warning (OPT_Winline, inline_forbidden_reason, fn);
3788*38fd1498Szrj 
3789*38fd1498Szrj       inlinable = false;
3790*38fd1498Szrj     }
3791*38fd1498Szrj 
3792*38fd1498Szrj   /* Squirrel away the result so that we don't have to check again.  */
3793*38fd1498Szrj   DECL_UNINLINABLE (fn) = !inlinable;
3794*38fd1498Szrj 
3795*38fd1498Szrj   return inlinable;
3796*38fd1498Szrj }
3797*38fd1498Szrj 
3798*38fd1498Szrj /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3799*38fd1498Szrj    word size and take possible memcpy call into account and return
3800*38fd1498Szrj    cost based on whether optimizing for size or speed according to SPEED_P.  */
3801*38fd1498Szrj 
3802*38fd1498Szrj int
3803*38fd1498Szrj estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3804*38fd1498Szrj {
3805*38fd1498Szrj   HOST_WIDE_INT size;
3806*38fd1498Szrj 
3807*38fd1498Szrj   gcc_assert (!VOID_TYPE_P (type));
3808*38fd1498Szrj 
3809*38fd1498Szrj   if (TREE_CODE (type) == VECTOR_TYPE)
3810*38fd1498Szrj     {
3811*38fd1498Szrj       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3812*38fd1498Szrj       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3813*38fd1498Szrj       int orig_mode_size
3814*38fd1498Szrj 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3815*38fd1498Szrj       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3816*38fd1498Szrj       return ((orig_mode_size + simd_mode_size - 1)
3817*38fd1498Szrj 	      / simd_mode_size);
3818*38fd1498Szrj     }
3819*38fd1498Szrj 
3820*38fd1498Szrj   size = int_size_in_bytes (type);
3821*38fd1498Szrj 
3822*38fd1498Szrj   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3823*38fd1498Szrj     /* Cost of a memcpy call, 3 arguments and the call.  */
3824*38fd1498Szrj     return 4;
3825*38fd1498Szrj   else
3826*38fd1498Szrj     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3827*38fd1498Szrj }
3828*38fd1498Szrj 
3829*38fd1498Szrj /* Returns cost of operation CODE, according to WEIGHTS  */
3830*38fd1498Szrj 
3831*38fd1498Szrj static int
3832*38fd1498Szrj estimate_operator_cost (enum tree_code code, eni_weights *weights,
3833*38fd1498Szrj 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3834*38fd1498Szrj {
3835*38fd1498Szrj   switch (code)
3836*38fd1498Szrj     {
3837*38fd1498Szrj     /* These are "free" conversions, or their presumed cost
3838*38fd1498Szrj        is folded into other operations.  */
3839*38fd1498Szrj     case RANGE_EXPR:
3840*38fd1498Szrj     CASE_CONVERT:
3841*38fd1498Szrj     case COMPLEX_EXPR:
3842*38fd1498Szrj     case PAREN_EXPR:
3843*38fd1498Szrj     case VIEW_CONVERT_EXPR:
3844*38fd1498Szrj       return 0;
3845*38fd1498Szrj 
3846*38fd1498Szrj     /* Assign cost of 1 to usual operations.
3847*38fd1498Szrj        ??? We may consider mapping RTL costs to this.  */
3848*38fd1498Szrj     case COND_EXPR:
3849*38fd1498Szrj     case VEC_COND_EXPR:
3850*38fd1498Szrj     case VEC_PERM_EXPR:
3851*38fd1498Szrj 
3852*38fd1498Szrj     case PLUS_EXPR:
3853*38fd1498Szrj     case POINTER_PLUS_EXPR:
3854*38fd1498Szrj     case POINTER_DIFF_EXPR:
3855*38fd1498Szrj     case MINUS_EXPR:
3856*38fd1498Szrj     case MULT_EXPR:
3857*38fd1498Szrj     case MULT_HIGHPART_EXPR:
3858*38fd1498Szrj     case FMA_EXPR:
3859*38fd1498Szrj 
3860*38fd1498Szrj     case ADDR_SPACE_CONVERT_EXPR:
3861*38fd1498Szrj     case FIXED_CONVERT_EXPR:
3862*38fd1498Szrj     case FIX_TRUNC_EXPR:
3863*38fd1498Szrj 
3864*38fd1498Szrj     case NEGATE_EXPR:
3865*38fd1498Szrj     case FLOAT_EXPR:
3866*38fd1498Szrj     case MIN_EXPR:
3867*38fd1498Szrj     case MAX_EXPR:
3868*38fd1498Szrj     case ABS_EXPR:
3869*38fd1498Szrj 
3870*38fd1498Szrj     case LSHIFT_EXPR:
3871*38fd1498Szrj     case RSHIFT_EXPR:
3872*38fd1498Szrj     case LROTATE_EXPR:
3873*38fd1498Szrj     case RROTATE_EXPR:
3874*38fd1498Szrj 
3875*38fd1498Szrj     case BIT_IOR_EXPR:
3876*38fd1498Szrj     case BIT_XOR_EXPR:
3877*38fd1498Szrj     case BIT_AND_EXPR:
3878*38fd1498Szrj     case BIT_NOT_EXPR:
3879*38fd1498Szrj 
3880*38fd1498Szrj     case TRUTH_ANDIF_EXPR:
3881*38fd1498Szrj     case TRUTH_ORIF_EXPR:
3882*38fd1498Szrj     case TRUTH_AND_EXPR:
3883*38fd1498Szrj     case TRUTH_OR_EXPR:
3884*38fd1498Szrj     case TRUTH_XOR_EXPR:
3885*38fd1498Szrj     case TRUTH_NOT_EXPR:
3886*38fd1498Szrj 
3887*38fd1498Szrj     case LT_EXPR:
3888*38fd1498Szrj     case LE_EXPR:
3889*38fd1498Szrj     case GT_EXPR:
3890*38fd1498Szrj     case GE_EXPR:
3891*38fd1498Szrj     case EQ_EXPR:
3892*38fd1498Szrj     case NE_EXPR:
3893*38fd1498Szrj     case ORDERED_EXPR:
3894*38fd1498Szrj     case UNORDERED_EXPR:
3895*38fd1498Szrj 
3896*38fd1498Szrj     case UNLT_EXPR:
3897*38fd1498Szrj     case UNLE_EXPR:
3898*38fd1498Szrj     case UNGT_EXPR:
3899*38fd1498Szrj     case UNGE_EXPR:
3900*38fd1498Szrj     case UNEQ_EXPR:
3901*38fd1498Szrj     case LTGT_EXPR:
3902*38fd1498Szrj 
3903*38fd1498Szrj     case CONJ_EXPR:
3904*38fd1498Szrj 
3905*38fd1498Szrj     case PREDECREMENT_EXPR:
3906*38fd1498Szrj     case PREINCREMENT_EXPR:
3907*38fd1498Szrj     case POSTDECREMENT_EXPR:
3908*38fd1498Szrj     case POSTINCREMENT_EXPR:
3909*38fd1498Szrj 
3910*38fd1498Szrj     case REALIGN_LOAD_EXPR:
3911*38fd1498Szrj 
3912*38fd1498Szrj     case WIDEN_SUM_EXPR:
3913*38fd1498Szrj     case WIDEN_MULT_EXPR:
3914*38fd1498Szrj     case DOT_PROD_EXPR:
3915*38fd1498Szrj     case SAD_EXPR:
3916*38fd1498Szrj     case WIDEN_MULT_PLUS_EXPR:
3917*38fd1498Szrj     case WIDEN_MULT_MINUS_EXPR:
3918*38fd1498Szrj     case WIDEN_LSHIFT_EXPR:
3919*38fd1498Szrj 
3920*38fd1498Szrj     case VEC_WIDEN_MULT_HI_EXPR:
3921*38fd1498Szrj     case VEC_WIDEN_MULT_LO_EXPR:
3922*38fd1498Szrj     case VEC_WIDEN_MULT_EVEN_EXPR:
3923*38fd1498Szrj     case VEC_WIDEN_MULT_ODD_EXPR:
3924*38fd1498Szrj     case VEC_UNPACK_HI_EXPR:
3925*38fd1498Szrj     case VEC_UNPACK_LO_EXPR:
3926*38fd1498Szrj     case VEC_UNPACK_FLOAT_HI_EXPR:
3927*38fd1498Szrj     case VEC_UNPACK_FLOAT_LO_EXPR:
3928*38fd1498Szrj     case VEC_PACK_TRUNC_EXPR:
3929*38fd1498Szrj     case VEC_PACK_SAT_EXPR:
3930*38fd1498Szrj     case VEC_PACK_FIX_TRUNC_EXPR:
3931*38fd1498Szrj     case VEC_WIDEN_LSHIFT_HI_EXPR:
3932*38fd1498Szrj     case VEC_WIDEN_LSHIFT_LO_EXPR:
3933*38fd1498Szrj     case VEC_DUPLICATE_EXPR:
3934*38fd1498Szrj     case VEC_SERIES_EXPR:
3935*38fd1498Szrj 
3936*38fd1498Szrj       return 1;
3937*38fd1498Szrj 
3938*38fd1498Szrj     /* Few special cases of expensive operations.  This is useful
3939*38fd1498Szrj        to avoid inlining on functions having too many of these.  */
3940*38fd1498Szrj     case TRUNC_DIV_EXPR:
3941*38fd1498Szrj     case CEIL_DIV_EXPR:
3942*38fd1498Szrj     case FLOOR_DIV_EXPR:
3943*38fd1498Szrj     case ROUND_DIV_EXPR:
3944*38fd1498Szrj     case EXACT_DIV_EXPR:
3945*38fd1498Szrj     case TRUNC_MOD_EXPR:
3946*38fd1498Szrj     case CEIL_MOD_EXPR:
3947*38fd1498Szrj     case FLOOR_MOD_EXPR:
3948*38fd1498Szrj     case ROUND_MOD_EXPR:
3949*38fd1498Szrj     case RDIV_EXPR:
3950*38fd1498Szrj       if (TREE_CODE (op2) != INTEGER_CST)
3951*38fd1498Szrj         return weights->div_mod_cost;
3952*38fd1498Szrj       return 1;
3953*38fd1498Szrj 
3954*38fd1498Szrj     /* Bit-field insertion needs several shift and mask operations.  */
3955*38fd1498Szrj     case BIT_INSERT_EXPR:
3956*38fd1498Szrj       return 3;
3957*38fd1498Szrj 
3958*38fd1498Szrj     default:
3959*38fd1498Szrj       /* We expect a copy assignment with no operator.  */
3960*38fd1498Szrj       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3961*38fd1498Szrj       return 0;
3962*38fd1498Szrj     }
3963*38fd1498Szrj }
3964*38fd1498Szrj 
3965*38fd1498Szrj 
3966*38fd1498Szrj /* Estimate number of instructions that will be created by expanding
3967*38fd1498Szrj    the statements in the statement sequence STMTS.
3968*38fd1498Szrj    WEIGHTS contains weights attributed to various constructs.  */
3969*38fd1498Szrj 
3970*38fd1498Szrj int
3971*38fd1498Szrj estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3972*38fd1498Szrj {
3973*38fd1498Szrj   int cost;
3974*38fd1498Szrj   gimple_stmt_iterator gsi;
3975*38fd1498Szrj 
3976*38fd1498Szrj   cost = 0;
3977*38fd1498Szrj   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3978*38fd1498Szrj     cost += estimate_num_insns (gsi_stmt (gsi), weights);
3979*38fd1498Szrj 
3980*38fd1498Szrj   return cost;
3981*38fd1498Szrj }
3982*38fd1498Szrj 
3983*38fd1498Szrj 
3984*38fd1498Szrj /* Estimate number of instructions that will be created by expanding STMT.
3985*38fd1498Szrj    WEIGHTS contains weights attributed to various constructs.  */
3986*38fd1498Szrj 
3987*38fd1498Szrj int
3988*38fd1498Szrj estimate_num_insns (gimple *stmt, eni_weights *weights)
3989*38fd1498Szrj {
3990*38fd1498Szrj   unsigned cost, i;
3991*38fd1498Szrj   enum gimple_code code = gimple_code (stmt);
3992*38fd1498Szrj   tree lhs;
3993*38fd1498Szrj   tree rhs;
3994*38fd1498Szrj 
3995*38fd1498Szrj   switch (code)
3996*38fd1498Szrj     {
3997*38fd1498Szrj     case GIMPLE_ASSIGN:
3998*38fd1498Szrj       /* Try to estimate the cost of assignments.  We have three cases to
3999*38fd1498Szrj 	 deal with:
4000*38fd1498Szrj 	 1) Simple assignments to registers;
4001*38fd1498Szrj 	 2) Stores to things that must live in memory.  This includes
4002*38fd1498Szrj 	    "normal" stores to scalars, but also assignments of large
4003*38fd1498Szrj 	    structures, or constructors of big arrays;
4004*38fd1498Szrj 
4005*38fd1498Szrj 	 Let us look at the first two cases, assuming we have "a = b + C":
4006*38fd1498Szrj 	 <GIMPLE_ASSIGN <var_decl "a">
4007*38fd1498Szrj 	        <plus_expr <var_decl "b"> <constant C>>
4008*38fd1498Szrj 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4009*38fd1498Szrj 	 any target, because "a" usually ends up in a real register.  Hence
4010*38fd1498Szrj 	 the only cost of this expression comes from the PLUS_EXPR, and we
4011*38fd1498Szrj 	 can ignore the GIMPLE_ASSIGN.
4012*38fd1498Szrj 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4013*38fd1498Szrj 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4014*38fd1498Szrj 	 of moving something into "a", which we compute using the function
4015*38fd1498Szrj 	 estimate_move_cost.  */
4016*38fd1498Szrj       if (gimple_clobber_p (stmt))
4017*38fd1498Szrj 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4018*38fd1498Szrj 
4019*38fd1498Szrj       lhs = gimple_assign_lhs (stmt);
4020*38fd1498Szrj       rhs = gimple_assign_rhs1 (stmt);
4021*38fd1498Szrj 
4022*38fd1498Szrj       cost = 0;
4023*38fd1498Szrj 
4024*38fd1498Szrj       /* Account for the cost of moving to / from memory.  */
4025*38fd1498Szrj       if (gimple_store_p (stmt))
4026*38fd1498Szrj 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4027*38fd1498Szrj       if (gimple_assign_load_p (stmt))
4028*38fd1498Szrj 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4029*38fd1498Szrj 
4030*38fd1498Szrj       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4031*38fd1498Szrj       				      gimple_assign_rhs1 (stmt),
4032*38fd1498Szrj 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4033*38fd1498Szrj 				      == GIMPLE_BINARY_RHS
4034*38fd1498Szrj 				      ? gimple_assign_rhs2 (stmt) : NULL);
4035*38fd1498Szrj       break;
4036*38fd1498Szrj 
4037*38fd1498Szrj     case GIMPLE_COND:
4038*38fd1498Szrj       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4039*38fd1498Szrj       				         gimple_op (stmt, 0),
4040*38fd1498Szrj 				         gimple_op (stmt, 1));
4041*38fd1498Szrj       break;
4042*38fd1498Szrj 
4043*38fd1498Szrj     case GIMPLE_SWITCH:
4044*38fd1498Szrj       {
4045*38fd1498Szrj 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4046*38fd1498Szrj 	/* Take into account cost of the switch + guess 2 conditional jumps for
4047*38fd1498Szrj 	   each case label.
4048*38fd1498Szrj 
4049*38fd1498Szrj 	   TODO: once the switch expansion logic is sufficiently separated, we can
4050*38fd1498Szrj 	   do better job on estimating cost of the switch.  */
4051*38fd1498Szrj 	if (weights->time_based)
4052*38fd1498Szrj 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4053*38fd1498Szrj 	else
4054*38fd1498Szrj 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4055*38fd1498Szrj       }
4056*38fd1498Szrj       break;
4057*38fd1498Szrj 
4058*38fd1498Szrj     case GIMPLE_CALL:
4059*38fd1498Szrj       {
4060*38fd1498Szrj 	tree decl;
4061*38fd1498Szrj 
4062*38fd1498Szrj 	if (gimple_call_internal_p (stmt))
4063*38fd1498Szrj 	  return 0;
4064*38fd1498Szrj 	else if ((decl = gimple_call_fndecl (stmt))
4065*38fd1498Szrj 		 && DECL_BUILT_IN (decl))
4066*38fd1498Szrj 	  {
4067*38fd1498Szrj 	    /* Do not special case builtins where we see the body.
4068*38fd1498Szrj 	       This just confuse inliner.  */
4069*38fd1498Szrj 	    struct cgraph_node *node;
4070*38fd1498Szrj 	    if (!(node = cgraph_node::get (decl))
4071*38fd1498Szrj 		|| node->definition)
4072*38fd1498Szrj 	      ;
4073*38fd1498Szrj 	    /* For buitins that are likely expanded to nothing or
4074*38fd1498Szrj 	       inlined do not account operand costs.  */
4075*38fd1498Szrj 	    else if (is_simple_builtin (decl))
4076*38fd1498Szrj 	      return 0;
4077*38fd1498Szrj 	    else if (is_inexpensive_builtin (decl))
4078*38fd1498Szrj 	      return weights->target_builtin_call_cost;
4079*38fd1498Szrj 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4080*38fd1498Szrj 	      {
4081*38fd1498Szrj 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4082*38fd1498Szrj 		   specialize the cheap expansion we do here.
4083*38fd1498Szrj 		   ???  This asks for a more general solution.  */
4084*38fd1498Szrj 		switch (DECL_FUNCTION_CODE (decl))
4085*38fd1498Szrj 		  {
4086*38fd1498Szrj 		    case BUILT_IN_POW:
4087*38fd1498Szrj 		    case BUILT_IN_POWF:
4088*38fd1498Szrj 		    case BUILT_IN_POWL:
4089*38fd1498Szrj 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4090*38fd1498Szrj 			  && (real_equal
4091*38fd1498Szrj 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4092*38fd1498Szrj 			       &dconst2)))
4093*38fd1498Szrj 			return estimate_operator_cost
4094*38fd1498Szrj 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4095*38fd1498Szrj 			     gimple_call_arg (stmt, 0));
4096*38fd1498Szrj 		      break;
4097*38fd1498Szrj 
4098*38fd1498Szrj 		    default:
4099*38fd1498Szrj 		      break;
4100*38fd1498Szrj 		  }
4101*38fd1498Szrj 	      }
4102*38fd1498Szrj 	  }
4103*38fd1498Szrj 
4104*38fd1498Szrj 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4105*38fd1498Szrj 	if (gimple_call_lhs (stmt))
4106*38fd1498Szrj 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4107*38fd1498Szrj 				      weights->time_based);
4108*38fd1498Szrj 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4109*38fd1498Szrj 	  {
4110*38fd1498Szrj 	    tree arg = gimple_call_arg (stmt, i);
4111*38fd1498Szrj 	    cost += estimate_move_cost (TREE_TYPE (arg),
4112*38fd1498Szrj 					weights->time_based);
4113*38fd1498Szrj 	  }
4114*38fd1498Szrj 	break;
4115*38fd1498Szrj       }
4116*38fd1498Szrj 
4117*38fd1498Szrj     case GIMPLE_RETURN:
4118*38fd1498Szrj       return weights->return_cost;
4119*38fd1498Szrj 
4120*38fd1498Szrj     case GIMPLE_GOTO:
4121*38fd1498Szrj     case GIMPLE_LABEL:
4122*38fd1498Szrj     case GIMPLE_NOP:
4123*38fd1498Szrj     case GIMPLE_PHI:
4124*38fd1498Szrj     case GIMPLE_PREDICT:
4125*38fd1498Szrj     case GIMPLE_DEBUG:
4126*38fd1498Szrj       return 0;
4127*38fd1498Szrj 
4128*38fd1498Szrj     case GIMPLE_ASM:
4129*38fd1498Szrj       {
4130*38fd1498Szrj 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4131*38fd1498Szrj 	/* 1000 means infinity. This avoids overflows later
4132*38fd1498Szrj 	   with very long asm statements.  */
4133*38fd1498Szrj 	if (count > 1000)
4134*38fd1498Szrj 	  count = 1000;
4135*38fd1498Szrj 	return MAX (1, count);
4136*38fd1498Szrj       }
4137*38fd1498Szrj 
4138*38fd1498Szrj     case GIMPLE_RESX:
4139*38fd1498Szrj       /* This is either going to be an external function call with one
4140*38fd1498Szrj 	 argument, or two register copy statements plus a goto.  */
4141*38fd1498Szrj       return 2;
4142*38fd1498Szrj 
4143*38fd1498Szrj     case GIMPLE_EH_DISPATCH:
4144*38fd1498Szrj       /* ??? This is going to turn into a switch statement.  Ideally
4145*38fd1498Szrj 	 we'd have a look at the eh region and estimate the number of
4146*38fd1498Szrj 	 edges involved.  */
4147*38fd1498Szrj       return 10;
4148*38fd1498Szrj 
4149*38fd1498Szrj     case GIMPLE_BIND:
4150*38fd1498Szrj       return estimate_num_insns_seq (
4151*38fd1498Szrj 	       gimple_bind_body (as_a <gbind *> (stmt)),
4152*38fd1498Szrj 	       weights);
4153*38fd1498Szrj 
4154*38fd1498Szrj     case GIMPLE_EH_FILTER:
4155*38fd1498Szrj       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4156*38fd1498Szrj 
4157*38fd1498Szrj     case GIMPLE_CATCH:
4158*38fd1498Szrj       return estimate_num_insns_seq (gimple_catch_handler (
4159*38fd1498Szrj 				       as_a <gcatch *> (stmt)),
4160*38fd1498Szrj 				     weights);
4161*38fd1498Szrj 
4162*38fd1498Szrj     case GIMPLE_TRY:
4163*38fd1498Szrj       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4164*38fd1498Szrj               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4165*38fd1498Szrj 
4166*38fd1498Szrj     /* OMP directives are generally very expensive.  */
4167*38fd1498Szrj 
4168*38fd1498Szrj     case GIMPLE_OMP_RETURN:
4169*38fd1498Szrj     case GIMPLE_OMP_SECTIONS_SWITCH:
4170*38fd1498Szrj     case GIMPLE_OMP_ATOMIC_STORE:
4171*38fd1498Szrj     case GIMPLE_OMP_CONTINUE:
4172*38fd1498Szrj       /* ...except these, which are cheap.  */
4173*38fd1498Szrj       return 0;
4174*38fd1498Szrj 
4175*38fd1498Szrj     case GIMPLE_OMP_ATOMIC_LOAD:
4176*38fd1498Szrj       return weights->omp_cost;
4177*38fd1498Szrj 
4178*38fd1498Szrj     case GIMPLE_OMP_FOR:
4179*38fd1498Szrj       return (weights->omp_cost
4180*38fd1498Szrj               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4181*38fd1498Szrj               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4182*38fd1498Szrj 
4183*38fd1498Szrj     case GIMPLE_OMP_PARALLEL:
4184*38fd1498Szrj     case GIMPLE_OMP_TASK:
4185*38fd1498Szrj     case GIMPLE_OMP_CRITICAL:
4186*38fd1498Szrj     case GIMPLE_OMP_MASTER:
4187*38fd1498Szrj     case GIMPLE_OMP_TASKGROUP:
4188*38fd1498Szrj     case GIMPLE_OMP_ORDERED:
4189*38fd1498Szrj     case GIMPLE_OMP_SECTION:
4190*38fd1498Szrj     case GIMPLE_OMP_SECTIONS:
4191*38fd1498Szrj     case GIMPLE_OMP_SINGLE:
4192*38fd1498Szrj     case GIMPLE_OMP_TARGET:
4193*38fd1498Szrj     case GIMPLE_OMP_TEAMS:
4194*38fd1498Szrj       return (weights->omp_cost
4195*38fd1498Szrj               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4196*38fd1498Szrj 
4197*38fd1498Szrj     case GIMPLE_TRANSACTION:
4198*38fd1498Szrj       return (weights->tm_cost
4199*38fd1498Szrj 	      + estimate_num_insns_seq (gimple_transaction_body (
4200*38fd1498Szrj 					  as_a <gtransaction *> (stmt)),
4201*38fd1498Szrj 					weights));
4202*38fd1498Szrj 
4203*38fd1498Szrj     default:
4204*38fd1498Szrj       gcc_unreachable ();
4205*38fd1498Szrj     }
4206*38fd1498Szrj 
4207*38fd1498Szrj   return cost;
4208*38fd1498Szrj }
4209*38fd1498Szrj 
4210*38fd1498Szrj /* Estimate number of instructions that will be created by expanding
4211*38fd1498Szrj    function FNDECL.  WEIGHTS contains weights attributed to various
4212*38fd1498Szrj    constructs.  */
4213*38fd1498Szrj 
4214*38fd1498Szrj int
4215*38fd1498Szrj estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4216*38fd1498Szrj {
4217*38fd1498Szrj   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4218*38fd1498Szrj   gimple_stmt_iterator bsi;
4219*38fd1498Szrj   basic_block bb;
4220*38fd1498Szrj   int n = 0;
4221*38fd1498Szrj 
4222*38fd1498Szrj   gcc_assert (my_function && my_function->cfg);
4223*38fd1498Szrj   FOR_EACH_BB_FN (bb, my_function)
4224*38fd1498Szrj     {
4225*38fd1498Szrj       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4226*38fd1498Szrj 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4227*38fd1498Szrj     }
4228*38fd1498Szrj 
4229*38fd1498Szrj   return n;
4230*38fd1498Szrj }
4231*38fd1498Szrj 
4232*38fd1498Szrj 
4233*38fd1498Szrj /* Initializes weights used by estimate_num_insns.  */
4234*38fd1498Szrj 
4235*38fd1498Szrj void
4236*38fd1498Szrj init_inline_once (void)
4237*38fd1498Szrj {
4238*38fd1498Szrj   eni_size_weights.call_cost = 1;
4239*38fd1498Szrj   eni_size_weights.indirect_call_cost = 3;
4240*38fd1498Szrj   eni_size_weights.target_builtin_call_cost = 1;
4241*38fd1498Szrj   eni_size_weights.div_mod_cost = 1;
4242*38fd1498Szrj   eni_size_weights.omp_cost = 40;
4243*38fd1498Szrj   eni_size_weights.tm_cost = 10;
4244*38fd1498Szrj   eni_size_weights.time_based = false;
4245*38fd1498Szrj   eni_size_weights.return_cost = 1;
4246*38fd1498Szrj 
4247*38fd1498Szrj   /* Estimating time for call is difficult, since we have no idea what the
4248*38fd1498Szrj      called function does.  In the current uses of eni_time_weights,
4249*38fd1498Szrj      underestimating the cost does less harm than overestimating it, so
4250*38fd1498Szrj      we choose a rather small value here.  */
4251*38fd1498Szrj   eni_time_weights.call_cost = 10;
4252*38fd1498Szrj   eni_time_weights.indirect_call_cost = 15;
4253*38fd1498Szrj   eni_time_weights.target_builtin_call_cost = 1;
4254*38fd1498Szrj   eni_time_weights.div_mod_cost = 10;
4255*38fd1498Szrj   eni_time_weights.omp_cost = 40;
4256*38fd1498Szrj   eni_time_weights.tm_cost = 40;
4257*38fd1498Szrj   eni_time_weights.time_based = true;
4258*38fd1498Szrj   eni_time_weights.return_cost = 2;
4259*38fd1498Szrj }
4260*38fd1498Szrj 
4261*38fd1498Szrj 
4262*38fd1498Szrj /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4263*38fd1498Szrj 
4264*38fd1498Szrj static void
4265*38fd1498Szrj prepend_lexical_block (tree current_block, tree new_block)
4266*38fd1498Szrj {
4267*38fd1498Szrj   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4268*38fd1498Szrj   BLOCK_SUBBLOCKS (current_block) = new_block;
4269*38fd1498Szrj   BLOCK_SUPERCONTEXT (new_block) = current_block;
4270*38fd1498Szrj }
4271*38fd1498Szrj 
4272*38fd1498Szrj /* Add local variables from CALLEE to CALLER.  */
4273*38fd1498Szrj 
4274*38fd1498Szrj static inline void
4275*38fd1498Szrj add_local_variables (struct function *callee, struct function *caller,
4276*38fd1498Szrj 		     copy_body_data *id)
4277*38fd1498Szrj {
4278*38fd1498Szrj   tree var;
4279*38fd1498Szrj   unsigned ix;
4280*38fd1498Szrj 
4281*38fd1498Szrj   FOR_EACH_LOCAL_DECL (callee, ix, var)
4282*38fd1498Szrj     if (!can_be_nonlocal (var, id))
4283*38fd1498Szrj       {
4284*38fd1498Szrj         tree new_var = remap_decl (var, id);
4285*38fd1498Szrj 
4286*38fd1498Szrj         /* Remap debug-expressions.  */
4287*38fd1498Szrj 	if (VAR_P (new_var)
4288*38fd1498Szrj 	    && DECL_HAS_DEBUG_EXPR_P (var)
4289*38fd1498Szrj 	    && new_var != var)
4290*38fd1498Szrj 	  {
4291*38fd1498Szrj 	    tree tem = DECL_DEBUG_EXPR (var);
4292*38fd1498Szrj 	    bool old_regimplify = id->regimplify;
4293*38fd1498Szrj 	    id->remapping_type_depth++;
4294*38fd1498Szrj 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4295*38fd1498Szrj 	    id->remapping_type_depth--;
4296*38fd1498Szrj 	    id->regimplify = old_regimplify;
4297*38fd1498Szrj 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4298*38fd1498Szrj 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4299*38fd1498Szrj 	  }
4300*38fd1498Szrj 	add_local_decl (caller, new_var);
4301*38fd1498Szrj       }
4302*38fd1498Szrj }
4303*38fd1498Szrj 
4304*38fd1498Szrj /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4305*38fd1498Szrj    have brought in or introduced any debug stmts for SRCVAR.  */
4306*38fd1498Szrj 
4307*38fd1498Szrj static inline void
4308*38fd1498Szrj reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4309*38fd1498Szrj {
4310*38fd1498Szrj   tree *remappedvarp = id->decl_map->get (srcvar);
4311*38fd1498Szrj 
4312*38fd1498Szrj   if (!remappedvarp)
4313*38fd1498Szrj     return;
4314*38fd1498Szrj 
4315*38fd1498Szrj   if (!VAR_P (*remappedvarp))
4316*38fd1498Szrj     return;
4317*38fd1498Szrj 
4318*38fd1498Szrj   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4319*38fd1498Szrj     return;
4320*38fd1498Szrj 
4321*38fd1498Szrj   tree tvar = target_for_debug_bind (*remappedvarp);
4322*38fd1498Szrj   if (!tvar)
4323*38fd1498Szrj     return;
4324*38fd1498Szrj 
4325*38fd1498Szrj   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4326*38fd1498Szrj 					  id->call_stmt);
4327*38fd1498Szrj   gimple_seq_add_stmt (bindings, stmt);
4328*38fd1498Szrj }
4329*38fd1498Szrj 
4330*38fd1498Szrj /* For each inlined variable for which we may have debug bind stmts,
4331*38fd1498Szrj    add before GSI a final debug stmt resetting it, marking the end of
4332*38fd1498Szrj    its life, so that var-tracking knows it doesn't have to compute
4333*38fd1498Szrj    further locations for it.  */
4334*38fd1498Szrj 
4335*38fd1498Szrj static inline void
4336*38fd1498Szrj reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4337*38fd1498Szrj {
4338*38fd1498Szrj   tree var;
4339*38fd1498Szrj   unsigned ix;
4340*38fd1498Szrj   gimple_seq bindings = NULL;
4341*38fd1498Szrj 
4342*38fd1498Szrj   if (!gimple_in_ssa_p (id->src_cfun))
4343*38fd1498Szrj     return;
4344*38fd1498Szrj 
4345*38fd1498Szrj   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4346*38fd1498Szrj     return;
4347*38fd1498Szrj 
4348*38fd1498Szrj   for (var = DECL_ARGUMENTS (id->src_fn);
4349*38fd1498Szrj        var; var = DECL_CHAIN (var))
4350*38fd1498Szrj     reset_debug_binding (id, var, &bindings);
4351*38fd1498Szrj 
4352*38fd1498Szrj   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4353*38fd1498Szrj     reset_debug_binding (id, var, &bindings);
4354*38fd1498Szrj 
4355*38fd1498Szrj   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4356*38fd1498Szrj }
4357*38fd1498Szrj 
4358*38fd1498Szrj /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4359*38fd1498Szrj 
4360*38fd1498Szrj static bool
4361*38fd1498Szrj expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4362*38fd1498Szrj {
4363*38fd1498Szrj   tree use_retvar;
4364*38fd1498Szrj   tree fn;
4365*38fd1498Szrj   hash_map<tree, tree> *dst;
4366*38fd1498Szrj   hash_map<tree, tree> *st = NULL;
4367*38fd1498Szrj   tree return_slot;
4368*38fd1498Szrj   tree modify_dest;
4369*38fd1498Szrj   tree return_bounds = NULL;
4370*38fd1498Szrj   struct cgraph_edge *cg_edge;
4371*38fd1498Szrj   cgraph_inline_failed_t reason;
4372*38fd1498Szrj   basic_block return_block;
4373*38fd1498Szrj   edge e;
4374*38fd1498Szrj   gimple_stmt_iterator gsi, stmt_gsi;
4375*38fd1498Szrj   bool successfully_inlined = false;
4376*38fd1498Szrj   bool purge_dead_abnormal_edges;
4377*38fd1498Szrj   gcall *call_stmt;
4378*38fd1498Szrj   unsigned int i;
4379*38fd1498Szrj   unsigned int prop_mask, src_properties;
4380*38fd1498Szrj   struct function *dst_cfun;
4381*38fd1498Szrj   tree simduid;
4382*38fd1498Szrj   use_operand_p use;
4383*38fd1498Szrj   gimple *simtenter_stmt = NULL;
4384*38fd1498Szrj   vec<tree> *simtvars_save;
4385*38fd1498Szrj 
4386*38fd1498Szrj   /* The gimplifier uses input_location in too many places, such as
4387*38fd1498Szrj      internal_get_tmp_var ().  */
4388*38fd1498Szrj   location_t saved_location = input_location;
4389*38fd1498Szrj   input_location = gimple_location (stmt);
4390*38fd1498Szrj 
4391*38fd1498Szrj   /* From here on, we're only interested in CALL_EXPRs.  */
4392*38fd1498Szrj   call_stmt = dyn_cast <gcall *> (stmt);
4393*38fd1498Szrj   if (!call_stmt)
4394*38fd1498Szrj     goto egress;
4395*38fd1498Szrj 
4396*38fd1498Szrj   cg_edge = id->dst_node->get_edge (stmt);
4397*38fd1498Szrj   gcc_checking_assert (cg_edge);
4398*38fd1498Szrj   /* First, see if we can figure out what function is being called.
4399*38fd1498Szrj      If we cannot, then there is no hope of inlining the function.  */
4400*38fd1498Szrj   if (cg_edge->indirect_unknown_callee)
4401*38fd1498Szrj     goto egress;
4402*38fd1498Szrj   fn = cg_edge->callee->decl;
4403*38fd1498Szrj   gcc_checking_assert (fn);
4404*38fd1498Szrj 
4405*38fd1498Szrj   /* If FN is a declaration of a function in a nested scope that was
4406*38fd1498Szrj      globally declared inline, we don't set its DECL_INITIAL.
4407*38fd1498Szrj      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4408*38fd1498Szrj      C++ front-end uses it for cdtors to refer to their internal
4409*38fd1498Szrj      declarations, that are not real functions.  Fortunately those
4410*38fd1498Szrj      don't have trees to be saved, so we can tell by checking their
4411*38fd1498Szrj      gimple_body.  */
4412*38fd1498Szrj   if (!DECL_INITIAL (fn)
4413*38fd1498Szrj       && DECL_ABSTRACT_ORIGIN (fn)
4414*38fd1498Szrj       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4415*38fd1498Szrj     fn = DECL_ABSTRACT_ORIGIN (fn);
4416*38fd1498Szrj 
4417*38fd1498Szrj   /* Don't try to inline functions that are not well-suited to inlining.  */
4418*38fd1498Szrj   if (cg_edge->inline_failed)
4419*38fd1498Szrj     {
4420*38fd1498Szrj       reason = cg_edge->inline_failed;
4421*38fd1498Szrj       /* If this call was originally indirect, we do not want to emit any
4422*38fd1498Szrj 	 inlining related warnings or sorry messages because there are no
4423*38fd1498Szrj 	 guarantees regarding those.  */
4424*38fd1498Szrj       if (cg_edge->indirect_inlining_edge)
4425*38fd1498Szrj 	goto egress;
4426*38fd1498Szrj 
4427*38fd1498Szrj       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4428*38fd1498Szrj           /* For extern inline functions that get redefined we always
4429*38fd1498Szrj 	     silently ignored always_inline flag. Better behavior would
4430*38fd1498Szrj 	     be to be able to keep both bodies and use extern inline body
4431*38fd1498Szrj 	     for inlining, but we can't do that because frontends overwrite
4432*38fd1498Szrj 	     the body.  */
4433*38fd1498Szrj 	  && !cg_edge->callee->local.redefined_extern_inline
4434*38fd1498Szrj 	  /* During early inline pass, report only when optimization is
4435*38fd1498Szrj 	     not turned on.  */
4436*38fd1498Szrj 	  && (symtab->global_info_ready
4437*38fd1498Szrj 	      || !optimize
4438*38fd1498Szrj 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4439*38fd1498Szrj 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4440*38fd1498Szrj 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4441*38fd1498Szrj 	{
4442*38fd1498Szrj 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4443*38fd1498Szrj 		 cgraph_inline_failed_string (reason));
4444*38fd1498Szrj 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4445*38fd1498Szrj 	    inform (gimple_location (stmt), "called from here");
4446*38fd1498Szrj 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4447*38fd1498Szrj 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4448*38fd1498Szrj                    "called from this function");
4449*38fd1498Szrj 	}
4450*38fd1498Szrj       else if (warn_inline
4451*38fd1498Szrj 	       && DECL_DECLARED_INLINE_P (fn)
4452*38fd1498Szrj 	       && !DECL_NO_INLINE_WARNING_P (fn)
4453*38fd1498Szrj 	       && !DECL_IN_SYSTEM_HEADER (fn)
4454*38fd1498Szrj 	       && reason != CIF_UNSPECIFIED
4455*38fd1498Szrj 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4456*38fd1498Szrj 	       /* Do not warn about not inlined recursive calls.  */
4457*38fd1498Szrj 	       && !cg_edge->recursive_p ()
4458*38fd1498Szrj 	       /* Avoid warnings during early inline pass. */
4459*38fd1498Szrj 	       && symtab->global_info_ready)
4460*38fd1498Szrj 	{
4461*38fd1498Szrj 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4462*38fd1498Szrj 		       fn, _(cgraph_inline_failed_string (reason))))
4463*38fd1498Szrj 	    {
4464*38fd1498Szrj 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4465*38fd1498Szrj 		inform (gimple_location (stmt), "called from here");
4466*38fd1498Szrj 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4467*38fd1498Szrj 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4468*38fd1498Szrj                        "called from this function");
4469*38fd1498Szrj 	    }
4470*38fd1498Szrj 	}
4471*38fd1498Szrj       goto egress;
4472*38fd1498Szrj     }
4473*38fd1498Szrj   id->src_node = cg_edge->callee;
4474*38fd1498Szrj 
4475*38fd1498Szrj   /* If callee is thunk, all we need is to adjust the THIS pointer
4476*38fd1498Szrj      and redirect to function being thunked.  */
4477*38fd1498Szrj   if (id->src_node->thunk.thunk_p)
4478*38fd1498Szrj     {
4479*38fd1498Szrj       cgraph_edge *edge;
4480*38fd1498Szrj       tree virtual_offset = NULL;
4481*38fd1498Szrj       profile_count count = cg_edge->count;
4482*38fd1498Szrj       tree op;
4483*38fd1498Szrj       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4484*38fd1498Szrj 
4485*38fd1498Szrj       cg_edge->remove ();
4486*38fd1498Szrj       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4487*38fd1498Szrj 		   		           gimple_uid (stmt),
4488*38fd1498Szrj 				   	   profile_count::one (),
4489*38fd1498Szrj 					   profile_count::one (),
4490*38fd1498Szrj 				           true);
4491*38fd1498Szrj       edge->count = count;
4492*38fd1498Szrj       if (id->src_node->thunk.virtual_offset_p)
4493*38fd1498Szrj         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4494*38fd1498Szrj       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4495*38fd1498Szrj 			      NULL);
4496*38fd1498Szrj       gsi_insert_before (&iter, gimple_build_assign (op,
4497*38fd1498Szrj 						    gimple_call_arg (stmt, 0)),
4498*38fd1498Szrj 			 GSI_NEW_STMT);
4499*38fd1498Szrj       gcc_assert (id->src_node->thunk.this_adjusting);
4500*38fd1498Szrj       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4501*38fd1498Szrj 			 virtual_offset);
4502*38fd1498Szrj 
4503*38fd1498Szrj       gimple_call_set_arg (stmt, 0, op);
4504*38fd1498Szrj       gimple_call_set_fndecl (stmt, edge->callee->decl);
4505*38fd1498Szrj       update_stmt (stmt);
4506*38fd1498Szrj       id->src_node->remove ();
4507*38fd1498Szrj       expand_call_inline (bb, stmt, id);
4508*38fd1498Szrj       maybe_remove_unused_call_args (cfun, stmt);
4509*38fd1498Szrj       return true;
4510*38fd1498Szrj     }
4511*38fd1498Szrj   fn = cg_edge->callee->decl;
4512*38fd1498Szrj   cg_edge->callee->get_untransformed_body ();
4513*38fd1498Szrj 
4514*38fd1498Szrj   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4515*38fd1498Szrj     cg_edge->callee->verify ();
4516*38fd1498Szrj 
4517*38fd1498Szrj   /* We will be inlining this callee.  */
4518*38fd1498Szrj   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4519*38fd1498Szrj   id->assign_stmts.create (0);
4520*38fd1498Szrj 
4521*38fd1498Szrj   /* Update the callers EH personality.  */
4522*38fd1498Szrj   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4523*38fd1498Szrj     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4524*38fd1498Szrj       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4525*38fd1498Szrj 
4526*38fd1498Szrj   /* Split the block before the GIMPLE_CALL.  */
4527*38fd1498Szrj   stmt_gsi = gsi_for_stmt (stmt);
4528*38fd1498Szrj   gsi_prev (&stmt_gsi);
4529*38fd1498Szrj   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4530*38fd1498Szrj   bb = e->src;
4531*38fd1498Szrj   return_block = e->dest;
4532*38fd1498Szrj   remove_edge (e);
4533*38fd1498Szrj 
4534*38fd1498Szrj   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4535*38fd1498Szrj      been the source of abnormal edges.  In this case, schedule
4536*38fd1498Szrj      the removal of dead abnormal edges.  */
4537*38fd1498Szrj   gsi = gsi_start_bb (return_block);
4538*38fd1498Szrj   gsi_next (&gsi);
4539*38fd1498Szrj   purge_dead_abnormal_edges = gsi_end_p (gsi);
4540*38fd1498Szrj 
4541*38fd1498Szrj   stmt_gsi = gsi_start_bb (return_block);
4542*38fd1498Szrj 
4543*38fd1498Szrj   /* Build a block containing code to initialize the arguments, the
4544*38fd1498Szrj      actual inline expansion of the body, and a label for the return
4545*38fd1498Szrj      statements within the function to jump to.  The type of the
4546*38fd1498Szrj      statement expression is the return type of the function call.
4547*38fd1498Szrj      ???  If the call does not have an associated block then we will
4548*38fd1498Szrj      remap all callee blocks to NULL, effectively dropping most of
4549*38fd1498Szrj      its debug information.  This should only happen for calls to
4550*38fd1498Szrj      artificial decls inserted by the compiler itself.  We need to
4551*38fd1498Szrj      either link the inlined blocks into the caller block tree or
4552*38fd1498Szrj      not refer to them in any way to not break GC for locations.  */
4553*38fd1498Szrj   if (gimple_block (stmt))
4554*38fd1498Szrj     {
4555*38fd1498Szrj       id->block = make_node (BLOCK);
4556*38fd1498Szrj       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4557*38fd1498Szrj       BLOCK_SOURCE_LOCATION (id->block)
4558*38fd1498Szrj 	= LOCATION_LOCUS (gimple_location (stmt));
4559*38fd1498Szrj       prepend_lexical_block (gimple_block (stmt), id->block);
4560*38fd1498Szrj     }
4561*38fd1498Szrj 
4562*38fd1498Szrj   /* Local declarations will be replaced by their equivalents in this
4563*38fd1498Szrj      map.  */
4564*38fd1498Szrj   st = id->decl_map;
4565*38fd1498Szrj   id->decl_map = new hash_map<tree, tree>;
4566*38fd1498Szrj   dst = id->debug_map;
4567*38fd1498Szrj   id->debug_map = NULL;
4568*38fd1498Szrj 
4569*38fd1498Szrj   /* Record the function we are about to inline.  */
4570*38fd1498Szrj   id->src_fn = fn;
4571*38fd1498Szrj   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4572*38fd1498Szrj   id->call_stmt = call_stmt;
4573*38fd1498Szrj 
4574*38fd1498Szrj   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4575*38fd1498Szrj      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4576*38fd1498Szrj   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4577*38fd1498Szrj   simtvars_save = id->dst_simt_vars;
4578*38fd1498Szrj   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4579*38fd1498Szrj       && (simduid = bb->loop_father->simduid) != NULL_TREE
4580*38fd1498Szrj       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4581*38fd1498Szrj       && single_imm_use (simduid, &use, &simtenter_stmt)
4582*38fd1498Szrj       && is_gimple_call (simtenter_stmt)
4583*38fd1498Szrj       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4584*38fd1498Szrj     vec_alloc (id->dst_simt_vars, 0);
4585*38fd1498Szrj   else
4586*38fd1498Szrj     id->dst_simt_vars = NULL;
4587*38fd1498Szrj 
4588*38fd1498Szrj   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4589*38fd1498Szrj     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4590*38fd1498Szrj 
4591*38fd1498Szrj   /* If the src function contains an IFN_VA_ARG, then so will the dst
4592*38fd1498Szrj      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4593*38fd1498Szrj   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4594*38fd1498Szrj   src_properties = id->src_cfun->curr_properties & prop_mask;
4595*38fd1498Szrj   if (src_properties != prop_mask)
4596*38fd1498Szrj     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4597*38fd1498Szrj 
4598*38fd1498Szrj   gcc_assert (!id->src_cfun->after_inlining);
4599*38fd1498Szrj 
4600*38fd1498Szrj   id->entry_bb = bb;
4601*38fd1498Szrj   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4602*38fd1498Szrj     {
4603*38fd1498Szrj       gimple_stmt_iterator si = gsi_last_bb (bb);
4604*38fd1498Szrj       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4605*38fd1498Szrj       						   NOT_TAKEN),
4606*38fd1498Szrj 			GSI_NEW_STMT);
4607*38fd1498Szrj     }
4608*38fd1498Szrj   initialize_inlined_parameters (id, stmt, fn, bb);
4609*38fd1498Szrj   if (debug_nonbind_markers_p && debug_inline_points && id->block
4610*38fd1498Szrj       && inlined_function_outer_scope_p (id->block))
4611*38fd1498Szrj     {
4612*38fd1498Szrj       gimple_stmt_iterator si = gsi_last_bb (bb);
4613*38fd1498Szrj       gsi_insert_after (&si, gimple_build_debug_inline_entry
4614*38fd1498Szrj 			(id->block, input_location), GSI_NEW_STMT);
4615*38fd1498Szrj     }
4616*38fd1498Szrj 
4617*38fd1498Szrj   if (DECL_INITIAL (fn))
4618*38fd1498Szrj     {
4619*38fd1498Szrj       if (gimple_block (stmt))
4620*38fd1498Szrj 	{
4621*38fd1498Szrj 	  tree *var;
4622*38fd1498Szrj 
4623*38fd1498Szrj 	  prepend_lexical_block (id->block,
4624*38fd1498Szrj 				 remap_blocks (DECL_INITIAL (fn), id));
4625*38fd1498Szrj 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4626*38fd1498Szrj 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4627*38fd1498Szrj 				   == NULL_TREE));
4628*38fd1498Szrj 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4629*38fd1498Szrj 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4630*38fd1498Szrj 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4631*38fd1498Szrj 	     under it.  The parameters can be then evaluated in the debugger,
4632*38fd1498Szrj 	     but don't show in backtraces.  */
4633*38fd1498Szrj 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4634*38fd1498Szrj 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4635*38fd1498Szrj 	      {
4636*38fd1498Szrj 		tree v = *var;
4637*38fd1498Szrj 		*var = TREE_CHAIN (v);
4638*38fd1498Szrj 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4639*38fd1498Szrj 		BLOCK_VARS (id->block) = v;
4640*38fd1498Szrj 	      }
4641*38fd1498Szrj 	    else
4642*38fd1498Szrj 	      var = &TREE_CHAIN (*var);
4643*38fd1498Szrj 	}
4644*38fd1498Szrj       else
4645*38fd1498Szrj 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4646*38fd1498Szrj     }
4647*38fd1498Szrj 
4648*38fd1498Szrj   /* Return statements in the function body will be replaced by jumps
4649*38fd1498Szrj      to the RET_LABEL.  */
4650*38fd1498Szrj   gcc_assert (DECL_INITIAL (fn));
4651*38fd1498Szrj   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4652*38fd1498Szrj 
4653*38fd1498Szrj   /* Find the LHS to which the result of this call is assigned.  */
4654*38fd1498Szrj   return_slot = NULL;
4655*38fd1498Szrj   if (gimple_call_lhs (stmt))
4656*38fd1498Szrj     {
4657*38fd1498Szrj       modify_dest = gimple_call_lhs (stmt);
4658*38fd1498Szrj 
4659*38fd1498Szrj       /* Remember where to copy returned bounds.  */
4660*38fd1498Szrj       if (gimple_call_with_bounds_p (stmt)
4661*38fd1498Szrj 	  && TREE_CODE (modify_dest) == SSA_NAME)
4662*38fd1498Szrj 	{
4663*38fd1498Szrj 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4664*38fd1498Szrj 	  if (retbnd)
4665*38fd1498Szrj 	    {
4666*38fd1498Szrj 	      return_bounds = gimple_call_lhs (retbnd);
4667*38fd1498Szrj 	      /* If returned bounds are not used then just
4668*38fd1498Szrj 		 remove unused call.  */
4669*38fd1498Szrj 	      if (!return_bounds)
4670*38fd1498Szrj 		{
4671*38fd1498Szrj 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4672*38fd1498Szrj 		  gsi_remove (&iter, true);
4673*38fd1498Szrj 		}
4674*38fd1498Szrj 	    }
4675*38fd1498Szrj 	}
4676*38fd1498Szrj 
4677*38fd1498Szrj       /* The function which we are inlining might not return a value,
4678*38fd1498Szrj 	 in which case we should issue a warning that the function
4679*38fd1498Szrj 	 does not return a value.  In that case the optimizers will
4680*38fd1498Szrj 	 see that the variable to which the value is assigned was not
4681*38fd1498Szrj 	 initialized.  We do not want to issue a warning about that
4682*38fd1498Szrj 	 uninitialized variable.  */
4683*38fd1498Szrj       if (DECL_P (modify_dest))
4684*38fd1498Szrj 	TREE_NO_WARNING (modify_dest) = 1;
4685*38fd1498Szrj 
4686*38fd1498Szrj       if (gimple_call_return_slot_opt_p (call_stmt))
4687*38fd1498Szrj 	{
4688*38fd1498Szrj 	  return_slot = modify_dest;
4689*38fd1498Szrj 	  modify_dest = NULL;
4690*38fd1498Szrj 	}
4691*38fd1498Szrj     }
4692*38fd1498Szrj   else
4693*38fd1498Szrj     modify_dest = NULL;
4694*38fd1498Szrj 
4695*38fd1498Szrj   /* If we are inlining a call to the C++ operator new, we don't want
4696*38fd1498Szrj      to use type based alias analysis on the return value.  Otherwise
4697*38fd1498Szrj      we may get confused if the compiler sees that the inlined new
4698*38fd1498Szrj      function returns a pointer which was just deleted.  See bug
4699*38fd1498Szrj      33407.  */
4700*38fd1498Szrj   if (DECL_IS_OPERATOR_NEW (fn))
4701*38fd1498Szrj     {
4702*38fd1498Szrj       return_slot = NULL;
4703*38fd1498Szrj       modify_dest = NULL;
4704*38fd1498Szrj     }
4705*38fd1498Szrj 
4706*38fd1498Szrj   /* Declare the return variable for the function.  */
4707*38fd1498Szrj   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4708*38fd1498Szrj 					return_bounds, bb);
4709*38fd1498Szrj 
4710*38fd1498Szrj   /* Add local vars in this inlined callee to caller.  */
4711*38fd1498Szrj   add_local_variables (id->src_cfun, cfun, id);
4712*38fd1498Szrj 
4713*38fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
4714*38fd1498Szrj     {
4715*38fd1498Szrj       fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4716*38fd1498Szrj 	       id->src_node->dump_name (),
4717*38fd1498Szrj 	       id->dst_node->dump_name (),
4718*38fd1498Szrj 	       cg_edge->sreal_frequency ().to_double ());
4719*38fd1498Szrj       id->src_node->dump (dump_file);
4720*38fd1498Szrj       id->dst_node->dump (dump_file);
4721*38fd1498Szrj     }
4722*38fd1498Szrj 
4723*38fd1498Szrj   /* This is it.  Duplicate the callee body.  Assume callee is
4724*38fd1498Szrj      pre-gimplified.  Note that we must not alter the caller
4725*38fd1498Szrj      function in any way before this point, as this CALL_EXPR may be
4726*38fd1498Szrj      a self-referential call; if we're calling ourselves, we need to
4727*38fd1498Szrj      duplicate our body before altering anything.  */
4728*38fd1498Szrj   copy_body (id, bb, return_block, NULL);
4729*38fd1498Szrj 
4730*38fd1498Szrj   reset_debug_bindings (id, stmt_gsi);
4731*38fd1498Szrj 
4732*38fd1498Szrj   if (flag_stack_reuse != SR_NONE)
4733*38fd1498Szrj     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4734*38fd1498Szrj       if (!TREE_THIS_VOLATILE (p))
4735*38fd1498Szrj 	{
4736*38fd1498Szrj 	  tree *varp = id->decl_map->get (p);
4737*38fd1498Szrj 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4738*38fd1498Szrj 	    {
4739*38fd1498Szrj 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4740*38fd1498Szrj 	      gimple *clobber_stmt;
4741*38fd1498Szrj 	      TREE_THIS_VOLATILE (clobber) = 1;
4742*38fd1498Szrj 	      clobber_stmt = gimple_build_assign (*varp, clobber);
4743*38fd1498Szrj 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
4744*38fd1498Szrj 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4745*38fd1498Szrj 	    }
4746*38fd1498Szrj 	}
4747*38fd1498Szrj 
4748*38fd1498Szrj   /* Reset the escaped solution.  */
4749*38fd1498Szrj   if (cfun->gimple_df)
4750*38fd1498Szrj     pt_solution_reset (&cfun->gimple_df->escaped);
4751*38fd1498Szrj 
4752*38fd1498Szrj   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
4753*38fd1498Szrj   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4754*38fd1498Szrj     {
4755*38fd1498Szrj       size_t nargs = gimple_call_num_args (simtenter_stmt);
4756*38fd1498Szrj       vec<tree> *vars = id->dst_simt_vars;
4757*38fd1498Szrj       auto_vec<tree> newargs (nargs + vars->length ());
4758*38fd1498Szrj       for (size_t i = 0; i < nargs; i++)
4759*38fd1498Szrj 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4760*38fd1498Szrj       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4761*38fd1498Szrj 	{
4762*38fd1498Szrj 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4763*38fd1498Szrj 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4764*38fd1498Szrj 	}
4765*38fd1498Szrj       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4766*38fd1498Szrj       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4767*38fd1498Szrj       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4768*38fd1498Szrj       gsi_replace (&gsi, g, false);
4769*38fd1498Szrj     }
4770*38fd1498Szrj   vec_free (id->dst_simt_vars);
4771*38fd1498Szrj   id->dst_simt_vars = simtvars_save;
4772*38fd1498Szrj 
4773*38fd1498Szrj   /* Clean up.  */
4774*38fd1498Szrj   if (id->debug_map)
4775*38fd1498Szrj     {
4776*38fd1498Szrj       delete id->debug_map;
4777*38fd1498Szrj       id->debug_map = dst;
4778*38fd1498Szrj     }
4779*38fd1498Szrj   delete id->decl_map;
4780*38fd1498Szrj   id->decl_map = st;
4781*38fd1498Szrj 
4782*38fd1498Szrj   /* Unlink the calls virtual operands before replacing it.  */
4783*38fd1498Szrj   unlink_stmt_vdef (stmt);
4784*38fd1498Szrj   if (gimple_vdef (stmt)
4785*38fd1498Szrj       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4786*38fd1498Szrj     release_ssa_name (gimple_vdef (stmt));
4787*38fd1498Szrj 
4788*38fd1498Szrj   /* If the inlined function returns a result that we care about,
4789*38fd1498Szrj      substitute the GIMPLE_CALL with an assignment of the return
4790*38fd1498Szrj      variable to the LHS of the call.  That is, if STMT was
4791*38fd1498Szrj      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4792*38fd1498Szrj   if (use_retvar && gimple_call_lhs (stmt))
4793*38fd1498Szrj     {
4794*38fd1498Szrj       gimple *old_stmt = stmt;
4795*38fd1498Szrj       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4796*38fd1498Szrj       gsi_replace (&stmt_gsi, stmt, false);
4797*38fd1498Szrj       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4798*38fd1498Szrj       /* Append a clobber for id->retvar if easily possible.  */
4799*38fd1498Szrj       if (flag_stack_reuse != SR_NONE
4800*38fd1498Szrj 	  && id->retvar
4801*38fd1498Szrj 	  && VAR_P (id->retvar)
4802*38fd1498Szrj 	  && id->retvar != return_slot
4803*38fd1498Szrj 	  && id->retvar != modify_dest
4804*38fd1498Szrj 	  && !TREE_THIS_VOLATILE (id->retvar)
4805*38fd1498Szrj 	  && !is_gimple_reg (id->retvar)
4806*38fd1498Szrj 	  && !stmt_ends_bb_p (stmt))
4807*38fd1498Szrj 	{
4808*38fd1498Szrj 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4809*38fd1498Szrj 	  gimple *clobber_stmt;
4810*38fd1498Szrj 	  TREE_THIS_VOLATILE (clobber) = 1;
4811*38fd1498Szrj 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4812*38fd1498Szrj 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4813*38fd1498Szrj 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4814*38fd1498Szrj 	}
4815*38fd1498Szrj 
4816*38fd1498Szrj       /* Copy bounds if we copy structure with bounds.  */
4817*38fd1498Szrj       if (chkp_function_instrumented_p (id->dst_fn)
4818*38fd1498Szrj 	  && !BOUNDED_P (use_retvar)
4819*38fd1498Szrj 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4820*38fd1498Szrj 	id->assign_stmts.safe_push (stmt);
4821*38fd1498Szrj     }
4822*38fd1498Szrj   else
4823*38fd1498Szrj     {
4824*38fd1498Szrj       /* Handle the case of inlining a function with no return
4825*38fd1498Szrj 	 statement, which causes the return value to become undefined.  */
4826*38fd1498Szrj       if (gimple_call_lhs (stmt)
4827*38fd1498Szrj 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4828*38fd1498Szrj 	{
4829*38fd1498Szrj 	  tree name = gimple_call_lhs (stmt);
4830*38fd1498Szrj 	  tree var = SSA_NAME_VAR (name);
4831*38fd1498Szrj 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4832*38fd1498Szrj 
4833*38fd1498Szrj 	  if (def)
4834*38fd1498Szrj 	    {
4835*38fd1498Szrj 	      /* If the variable is used undefined, make this name
4836*38fd1498Szrj 		 undefined via a move.  */
4837*38fd1498Szrj 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4838*38fd1498Szrj 	      gsi_replace (&stmt_gsi, stmt, true);
4839*38fd1498Szrj 	    }
4840*38fd1498Szrj 	  else
4841*38fd1498Szrj 	    {
4842*38fd1498Szrj 	      if (!var)
4843*38fd1498Szrj 		{
4844*38fd1498Szrj 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4845*38fd1498Szrj 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4846*38fd1498Szrj 		}
4847*38fd1498Szrj 	      /* Otherwise make this variable undefined.  */
4848*38fd1498Szrj 	      gsi_remove (&stmt_gsi, true);
4849*38fd1498Szrj 	      set_ssa_default_def (cfun, var, name);
4850*38fd1498Szrj 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4851*38fd1498Szrj 	    }
4852*38fd1498Szrj 	}
4853*38fd1498Szrj       /* Replace with a clobber for id->retvar.  */
4854*38fd1498Szrj       else if (flag_stack_reuse != SR_NONE
4855*38fd1498Szrj 	       && id->retvar
4856*38fd1498Szrj 	       && VAR_P (id->retvar)
4857*38fd1498Szrj 	       && id->retvar != return_slot
4858*38fd1498Szrj 	       && id->retvar != modify_dest
4859*38fd1498Szrj 	       && !TREE_THIS_VOLATILE (id->retvar)
4860*38fd1498Szrj 	       && !is_gimple_reg (id->retvar))
4861*38fd1498Szrj 	{
4862*38fd1498Szrj 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4863*38fd1498Szrj 	  gimple *clobber_stmt;
4864*38fd1498Szrj 	  TREE_THIS_VOLATILE (clobber) = 1;
4865*38fd1498Szrj 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4866*38fd1498Szrj 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
4867*38fd1498Szrj 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
4868*38fd1498Szrj 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4869*38fd1498Szrj 	}
4870*38fd1498Szrj       else
4871*38fd1498Szrj 	gsi_remove (&stmt_gsi, true);
4872*38fd1498Szrj     }
4873*38fd1498Szrj 
4874*38fd1498Szrj   /* Put returned bounds into the correct place if required.  */
4875*38fd1498Szrj   if (return_bounds)
4876*38fd1498Szrj     {
4877*38fd1498Szrj       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4878*38fd1498Szrj       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4879*38fd1498Szrj       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4880*38fd1498Szrj       unlink_stmt_vdef (old_stmt);
4881*38fd1498Szrj       gsi_replace (&bnd_gsi, new_stmt, false);
4882*38fd1498Szrj       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4883*38fd1498Szrj       cgraph_update_edges_for_call_stmt (old_stmt,
4884*38fd1498Szrj 					 gimple_call_fndecl (old_stmt),
4885*38fd1498Szrj 					 new_stmt);
4886*38fd1498Szrj     }
4887*38fd1498Szrj 
4888*38fd1498Szrj   if (purge_dead_abnormal_edges)
4889*38fd1498Szrj     {
4890*38fd1498Szrj       gimple_purge_dead_eh_edges (return_block);
4891*38fd1498Szrj       gimple_purge_dead_abnormal_call_edges (return_block);
4892*38fd1498Szrj     }
4893*38fd1498Szrj 
4894*38fd1498Szrj   /* If the value of the new expression is ignored, that's OK.  We
4895*38fd1498Szrj      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4896*38fd1498Szrj      the equivalent inlined version either.  */
4897*38fd1498Szrj   if (is_gimple_assign (stmt))
4898*38fd1498Szrj     {
4899*38fd1498Szrj       gcc_assert (gimple_assign_single_p (stmt)
4900*38fd1498Szrj 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4901*38fd1498Szrj       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4902*38fd1498Szrj     }
4903*38fd1498Szrj 
4904*38fd1498Szrj   /* Copy bounds for all generated assigns that need it.  */
4905*38fd1498Szrj   for (i = 0; i < id->assign_stmts.length (); i++)
4906*38fd1498Szrj     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4907*38fd1498Szrj   id->assign_stmts.release ();
4908*38fd1498Szrj 
4909*38fd1498Szrj   /* Output the inlining info for this abstract function, since it has been
4910*38fd1498Szrj      inlined.  If we don't do this now, we can lose the information about the
4911*38fd1498Szrj      variables in the function when the blocks get blown away as soon as we
4912*38fd1498Szrj      remove the cgraph node.  */
4913*38fd1498Szrj   if (gimple_block (stmt))
4914*38fd1498Szrj     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4915*38fd1498Szrj 
4916*38fd1498Szrj   /* Update callgraph if needed.  */
4917*38fd1498Szrj   cg_edge->callee->remove ();
4918*38fd1498Szrj 
4919*38fd1498Szrj   id->block = NULL_TREE;
4920*38fd1498Szrj   id->retvar = NULL_TREE;
4921*38fd1498Szrj   id->retbnd = NULL_TREE;
4922*38fd1498Szrj   successfully_inlined = true;
4923*38fd1498Szrj 
4924*38fd1498Szrj  egress:
4925*38fd1498Szrj   input_location = saved_location;
4926*38fd1498Szrj   return successfully_inlined;
4927*38fd1498Szrj }
4928*38fd1498Szrj 
4929*38fd1498Szrj /* Expand call statements reachable from STMT_P.
4930*38fd1498Szrj    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4931*38fd1498Szrj    in a MODIFY_EXPR.  */
4932*38fd1498Szrj 
4933*38fd1498Szrj static bool
4934*38fd1498Szrj gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4935*38fd1498Szrj {
4936*38fd1498Szrj   gimple_stmt_iterator gsi;
4937*38fd1498Szrj   bool inlined = false;
4938*38fd1498Szrj 
4939*38fd1498Szrj   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4940*38fd1498Szrj     {
4941*38fd1498Szrj       gimple *stmt = gsi_stmt (gsi);
4942*38fd1498Szrj       gsi_prev (&gsi);
4943*38fd1498Szrj 
4944*38fd1498Szrj       if (is_gimple_call (stmt)
4945*38fd1498Szrj 	  && !gimple_call_internal_p (stmt))
4946*38fd1498Szrj 	inlined |= expand_call_inline (bb, stmt, id);
4947*38fd1498Szrj     }
4948*38fd1498Szrj 
4949*38fd1498Szrj   return inlined;
4950*38fd1498Szrj }
4951*38fd1498Szrj 
4952*38fd1498Szrj 
4953*38fd1498Szrj /* Walk all basic blocks created after FIRST and try to fold every statement
4954*38fd1498Szrj    in the STATEMENTS pointer set.  */
4955*38fd1498Szrj 
4956*38fd1498Szrj static void
4957*38fd1498Szrj fold_marked_statements (int first, hash_set<gimple *> *statements)
4958*38fd1498Szrj {
4959*38fd1498Szrj   for (; first < n_basic_blocks_for_fn (cfun); first++)
4960*38fd1498Szrj     if (BASIC_BLOCK_FOR_FN (cfun, first))
4961*38fd1498Szrj       {
4962*38fd1498Szrj         gimple_stmt_iterator gsi;
4963*38fd1498Szrj 
4964*38fd1498Szrj 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4965*38fd1498Szrj 	     !gsi_end_p (gsi);
4966*38fd1498Szrj 	     gsi_next (&gsi))
4967*38fd1498Szrj 	  if (statements->contains (gsi_stmt (gsi)))
4968*38fd1498Szrj 	    {
4969*38fd1498Szrj 	      gimple *old_stmt = gsi_stmt (gsi);
4970*38fd1498Szrj 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4971*38fd1498Szrj 
4972*38fd1498Szrj 	      if (old_decl && DECL_BUILT_IN (old_decl))
4973*38fd1498Szrj 		{
4974*38fd1498Szrj 		  /* Folding builtins can create multiple instructions,
4975*38fd1498Szrj 		     we need to look at all of them.  */
4976*38fd1498Szrj 		  gimple_stmt_iterator i2 = gsi;
4977*38fd1498Szrj 		  gsi_prev (&i2);
4978*38fd1498Szrj 		  if (fold_stmt (&gsi))
4979*38fd1498Szrj 		    {
4980*38fd1498Szrj 		      gimple *new_stmt;
4981*38fd1498Szrj 		      /* If a builtin at the end of a bb folded into nothing,
4982*38fd1498Szrj 			 the following loop won't work.  */
4983*38fd1498Szrj 		      if (gsi_end_p (gsi))
4984*38fd1498Szrj 			{
4985*38fd1498Szrj 			  cgraph_update_edges_for_call_stmt (old_stmt,
4986*38fd1498Szrj 							     old_decl, NULL);
4987*38fd1498Szrj 			  break;
4988*38fd1498Szrj 			}
4989*38fd1498Szrj 		      if (gsi_end_p (i2))
4990*38fd1498Szrj 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4991*38fd1498Szrj 		      else
4992*38fd1498Szrj 			gsi_next (&i2);
4993*38fd1498Szrj 		      while (1)
4994*38fd1498Szrj 			{
4995*38fd1498Szrj 			  new_stmt = gsi_stmt (i2);
4996*38fd1498Szrj 			  update_stmt (new_stmt);
4997*38fd1498Szrj 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4998*38fd1498Szrj 							     new_stmt);
4999*38fd1498Szrj 
5000*38fd1498Szrj 			  if (new_stmt == gsi_stmt (gsi))
5001*38fd1498Szrj 			    {
5002*38fd1498Szrj 			      /* It is okay to check only for the very last
5003*38fd1498Szrj 				 of these statements.  If it is a throwing
5004*38fd1498Szrj 				 statement nothing will change.  If it isn't
5005*38fd1498Szrj 				 this can remove EH edges.  If that weren't
5006*38fd1498Szrj 				 correct then because some intermediate stmts
5007*38fd1498Szrj 				 throw, but not the last one.  That would mean
5008*38fd1498Szrj 				 we'd have to split the block, which we can't
5009*38fd1498Szrj 				 here and we'd loose anyway.  And as builtins
5010*38fd1498Szrj 				 probably never throw, this all
5011*38fd1498Szrj 				 is mood anyway.  */
5012*38fd1498Szrj 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
5013*38fd1498Szrj 								  new_stmt))
5014*38fd1498Szrj 				gimple_purge_dead_eh_edges (
5015*38fd1498Szrj 				  BASIC_BLOCK_FOR_FN (cfun, first));
5016*38fd1498Szrj 			      break;
5017*38fd1498Szrj 			    }
5018*38fd1498Szrj 			  gsi_next (&i2);
5019*38fd1498Szrj 			}
5020*38fd1498Szrj 		    }
5021*38fd1498Szrj 		}
5022*38fd1498Szrj 	      else if (fold_stmt (&gsi))
5023*38fd1498Szrj 		{
5024*38fd1498Szrj 		  /* Re-read the statement from GSI as fold_stmt() may
5025*38fd1498Szrj 		     have changed it.  */
5026*38fd1498Szrj 		  gimple *new_stmt = gsi_stmt (gsi);
5027*38fd1498Szrj 		  update_stmt (new_stmt);
5028*38fd1498Szrj 
5029*38fd1498Szrj 		  if (is_gimple_call (old_stmt)
5030*38fd1498Szrj 		      || is_gimple_call (new_stmt))
5031*38fd1498Szrj 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5032*38fd1498Szrj 						       new_stmt);
5033*38fd1498Szrj 
5034*38fd1498Szrj 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5035*38fd1498Szrj 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5036*38fd1498Szrj 								    first));
5037*38fd1498Szrj 		}
5038*38fd1498Szrj 	    }
5039*38fd1498Szrj       }
5040*38fd1498Szrj }
5041*38fd1498Szrj 
5042*38fd1498Szrj /* Expand calls to inline functions in the body of FN.  */
5043*38fd1498Szrj 
5044*38fd1498Szrj unsigned int
5045*38fd1498Szrj optimize_inline_calls (tree fn)
5046*38fd1498Szrj {
5047*38fd1498Szrj   copy_body_data id;
5048*38fd1498Szrj   basic_block bb;
5049*38fd1498Szrj   int last = n_basic_blocks_for_fn (cfun);
5050*38fd1498Szrj   bool inlined_p = false;
5051*38fd1498Szrj 
5052*38fd1498Szrj   /* Clear out ID.  */
5053*38fd1498Szrj   memset (&id, 0, sizeof (id));
5054*38fd1498Szrj 
5055*38fd1498Szrj   id.src_node = id.dst_node = cgraph_node::get (fn);
5056*38fd1498Szrj   gcc_assert (id.dst_node->definition);
5057*38fd1498Szrj   id.dst_fn = fn;
5058*38fd1498Szrj   /* Or any functions that aren't finished yet.  */
5059*38fd1498Szrj   if (current_function_decl)
5060*38fd1498Szrj     id.dst_fn = current_function_decl;
5061*38fd1498Szrj 
5062*38fd1498Szrj   id.copy_decl = copy_decl_maybe_to_var;
5063*38fd1498Szrj   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5064*38fd1498Szrj   id.transform_new_cfg = false;
5065*38fd1498Szrj   id.transform_return_to_modify = true;
5066*38fd1498Szrj   id.transform_parameter = true;
5067*38fd1498Szrj   id.transform_lang_insert_block = NULL;
5068*38fd1498Szrj   id.statements_to_fold = new hash_set<gimple *>;
5069*38fd1498Szrj 
5070*38fd1498Szrj   push_gimplify_context ();
5071*38fd1498Szrj 
5072*38fd1498Szrj   /* We make no attempts to keep dominance info up-to-date.  */
5073*38fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
5074*38fd1498Szrj   free_dominance_info (CDI_POST_DOMINATORS);
5075*38fd1498Szrj 
5076*38fd1498Szrj   /* Register specific gimple functions.  */
5077*38fd1498Szrj   gimple_register_cfg_hooks ();
5078*38fd1498Szrj 
5079*38fd1498Szrj   /* Reach the trees by walking over the CFG, and note the
5080*38fd1498Szrj      enclosing basic-blocks in the call edges.  */
5081*38fd1498Szrj   /* We walk the blocks going forward, because inlined function bodies
5082*38fd1498Szrj      will split id->current_basic_block, and the new blocks will
5083*38fd1498Szrj      follow it; we'll trudge through them, processing their CALL_EXPRs
5084*38fd1498Szrj      along the way.  */
5085*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
5086*38fd1498Szrj     inlined_p |= gimple_expand_calls_inline (bb, &id);
5087*38fd1498Szrj 
5088*38fd1498Szrj   pop_gimplify_context (NULL);
5089*38fd1498Szrj 
5090*38fd1498Szrj   if (flag_checking)
5091*38fd1498Szrj     {
5092*38fd1498Szrj       struct cgraph_edge *e;
5093*38fd1498Szrj 
5094*38fd1498Szrj       id.dst_node->verify ();
5095*38fd1498Szrj 
5096*38fd1498Szrj       /* Double check that we inlined everything we are supposed to inline.  */
5097*38fd1498Szrj       for (e = id.dst_node->callees; e; e = e->next_callee)
5098*38fd1498Szrj 	gcc_assert (e->inline_failed);
5099*38fd1498Szrj     }
5100*38fd1498Szrj 
5101*38fd1498Szrj   /* Fold queued statements.  */
5102*38fd1498Szrj   update_max_bb_count ();
5103*38fd1498Szrj   fold_marked_statements (last, id.statements_to_fold);
5104*38fd1498Szrj   delete id.statements_to_fold;
5105*38fd1498Szrj 
5106*38fd1498Szrj   gcc_assert (!id.debug_stmts.exists ());
5107*38fd1498Szrj 
5108*38fd1498Szrj   /* If we didn't inline into the function there is nothing to do.  */
5109*38fd1498Szrj   if (!inlined_p)
5110*38fd1498Szrj     return 0;
5111*38fd1498Szrj 
5112*38fd1498Szrj   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5113*38fd1498Szrj   number_blocks (fn);
5114*38fd1498Szrj 
5115*38fd1498Szrj   delete_unreachable_blocks_update_callgraph (&id);
5116*38fd1498Szrj   if (flag_checking)
5117*38fd1498Szrj     id.dst_node->verify ();
5118*38fd1498Szrj 
5119*38fd1498Szrj   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5120*38fd1498Szrj      not possible yet - the IPA passes might make various functions to not
5121*38fd1498Szrj      throw and they don't care to proactively update local EH info.  This is
5122*38fd1498Szrj      done later in fixup_cfg pass that also execute the verification.  */
5123*38fd1498Szrj   return (TODO_update_ssa
5124*38fd1498Szrj 	  | TODO_cleanup_cfg
5125*38fd1498Szrj 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5126*38fd1498Szrj 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5127*38fd1498Szrj 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5128*38fd1498Szrj 	     ? TODO_rebuild_frequencies : 0));
5129*38fd1498Szrj }
5130*38fd1498Szrj 
5131*38fd1498Szrj /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5132*38fd1498Szrj 
5133*38fd1498Szrj tree
5134*38fd1498Szrj copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5135*38fd1498Szrj {
5136*38fd1498Szrj   enum tree_code code = TREE_CODE (*tp);
5137*38fd1498Szrj   enum tree_code_class cl = TREE_CODE_CLASS (code);
5138*38fd1498Szrj 
5139*38fd1498Szrj   /* We make copies of most nodes.  */
5140*38fd1498Szrj   if (IS_EXPR_CODE_CLASS (cl)
5141*38fd1498Szrj       || code == TREE_LIST
5142*38fd1498Szrj       || code == TREE_VEC
5143*38fd1498Szrj       || code == TYPE_DECL
5144*38fd1498Szrj       || code == OMP_CLAUSE)
5145*38fd1498Szrj     {
5146*38fd1498Szrj       /* Because the chain gets clobbered when we make a copy, we save it
5147*38fd1498Szrj 	 here.  */
5148*38fd1498Szrj       tree chain = NULL_TREE, new_tree;
5149*38fd1498Szrj 
5150*38fd1498Szrj       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5151*38fd1498Szrj 	chain = TREE_CHAIN (*tp);
5152*38fd1498Szrj 
5153*38fd1498Szrj       /* Copy the node.  */
5154*38fd1498Szrj       new_tree = copy_node (*tp);
5155*38fd1498Szrj 
5156*38fd1498Szrj       *tp = new_tree;
5157*38fd1498Szrj 
5158*38fd1498Szrj       /* Now, restore the chain, if appropriate.  That will cause
5159*38fd1498Szrj 	 walk_tree to walk into the chain as well.  */
5160*38fd1498Szrj       if (code == PARM_DECL
5161*38fd1498Szrj 	  || code == TREE_LIST
5162*38fd1498Szrj 	  || code == OMP_CLAUSE)
5163*38fd1498Szrj 	TREE_CHAIN (*tp) = chain;
5164*38fd1498Szrj 
5165*38fd1498Szrj       /* For now, we don't update BLOCKs when we make copies.  So, we
5166*38fd1498Szrj 	 have to nullify all BIND_EXPRs.  */
5167*38fd1498Szrj       if (TREE_CODE (*tp) == BIND_EXPR)
5168*38fd1498Szrj 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5169*38fd1498Szrj     }
5170*38fd1498Szrj   else if (code == CONSTRUCTOR)
5171*38fd1498Szrj     {
5172*38fd1498Szrj       /* CONSTRUCTOR nodes need special handling because
5173*38fd1498Szrj          we need to duplicate the vector of elements.  */
5174*38fd1498Szrj       tree new_tree;
5175*38fd1498Szrj 
5176*38fd1498Szrj       new_tree = copy_node (*tp);
5177*38fd1498Szrj       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5178*38fd1498Szrj       *tp = new_tree;
5179*38fd1498Szrj     }
5180*38fd1498Szrj   else if (code == STATEMENT_LIST)
5181*38fd1498Szrj     /* We used to just abort on STATEMENT_LIST, but we can run into them
5182*38fd1498Szrj        with statement-expressions (c++/40975).  */
5183*38fd1498Szrj     copy_statement_list (tp);
5184*38fd1498Szrj   else if (TREE_CODE_CLASS (code) == tcc_type)
5185*38fd1498Szrj     *walk_subtrees = 0;
5186*38fd1498Szrj   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5187*38fd1498Szrj     *walk_subtrees = 0;
5188*38fd1498Szrj   else if (TREE_CODE_CLASS (code) == tcc_constant)
5189*38fd1498Szrj     *walk_subtrees = 0;
5190*38fd1498Szrj   return NULL_TREE;
5191*38fd1498Szrj }
5192*38fd1498Szrj 
5193*38fd1498Szrj /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5194*38fd1498Szrj    information indicating to what new SAVE_EXPR this one should be mapped,
5195*38fd1498Szrj    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5196*38fd1498Szrj    the function into which the copy will be placed.  */
5197*38fd1498Szrj 
5198*38fd1498Szrj static void
5199*38fd1498Szrj remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5200*38fd1498Szrj {
5201*38fd1498Szrj   tree *n;
5202*38fd1498Szrj   tree t;
5203*38fd1498Szrj 
5204*38fd1498Szrj   /* See if we already encountered this SAVE_EXPR.  */
5205*38fd1498Szrj   n = st->get (*tp);
5206*38fd1498Szrj 
5207*38fd1498Szrj   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5208*38fd1498Szrj   if (!n)
5209*38fd1498Szrj     {
5210*38fd1498Szrj       t = copy_node (*tp);
5211*38fd1498Szrj 
5212*38fd1498Szrj       /* Remember this SAVE_EXPR.  */
5213*38fd1498Szrj       st->put (*tp, t);
5214*38fd1498Szrj       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5215*38fd1498Szrj       st->put (t, t);
5216*38fd1498Szrj     }
5217*38fd1498Szrj   else
5218*38fd1498Szrj     {
5219*38fd1498Szrj       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5220*38fd1498Szrj       *walk_subtrees = 0;
5221*38fd1498Szrj       t = *n;
5222*38fd1498Szrj     }
5223*38fd1498Szrj 
5224*38fd1498Szrj   /* Replace this SAVE_EXPR with the copy.  */
5225*38fd1498Szrj   *tp = t;
5226*38fd1498Szrj }
5227*38fd1498Szrj 
5228*38fd1498Szrj /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5229*38fd1498Szrj    label, copies the declaration and enters it in the splay_tree in DATA (which
5230*38fd1498Szrj    is really a 'copy_body_data *'.  */
5231*38fd1498Szrj 
5232*38fd1498Szrj static tree
5233*38fd1498Szrj mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5234*38fd1498Szrj 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5235*38fd1498Szrj 		        struct walk_stmt_info *wi)
5236*38fd1498Szrj {
5237*38fd1498Szrj   copy_body_data *id = (copy_body_data *) wi->info;
5238*38fd1498Szrj   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5239*38fd1498Szrj 
5240*38fd1498Szrj   if (stmt)
5241*38fd1498Szrj     {
5242*38fd1498Szrj       tree decl = gimple_label_label (stmt);
5243*38fd1498Szrj 
5244*38fd1498Szrj       /* Copy the decl and remember the copy.  */
5245*38fd1498Szrj       insert_decl_map (id, decl, id->copy_decl (decl, id));
5246*38fd1498Szrj     }
5247*38fd1498Szrj 
5248*38fd1498Szrj   return NULL_TREE;
5249*38fd1498Szrj }
5250*38fd1498Szrj 
5251*38fd1498Szrj static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5252*38fd1498Szrj 						  struct walk_stmt_info *wi);
5253*38fd1498Szrj 
5254*38fd1498Szrj /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5255*38fd1498Szrj    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5256*38fd1498Szrj    remaps all local declarations to appropriate replacements in gimple
5257*38fd1498Szrj    operands. */
5258*38fd1498Szrj 
5259*38fd1498Szrj static tree
5260*38fd1498Szrj replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5261*38fd1498Szrj {
5262*38fd1498Szrj   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5263*38fd1498Szrj   copy_body_data *id = (copy_body_data *) wi->info;
5264*38fd1498Szrj   hash_map<tree, tree> *st = id->decl_map;
5265*38fd1498Szrj   tree *n;
5266*38fd1498Szrj   tree expr = *tp;
5267*38fd1498Szrj 
5268*38fd1498Szrj   /* For recursive invocations this is no longer the LHS itself.  */
5269*38fd1498Szrj   bool is_lhs = wi->is_lhs;
5270*38fd1498Szrj   wi->is_lhs = false;
5271*38fd1498Szrj 
5272*38fd1498Szrj   if (TREE_CODE (expr) == SSA_NAME)
5273*38fd1498Szrj     {
5274*38fd1498Szrj       *tp = remap_ssa_name (*tp, id);
5275*38fd1498Szrj       *walk_subtrees = 0;
5276*38fd1498Szrj       if (is_lhs)
5277*38fd1498Szrj 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5278*38fd1498Szrj     }
5279*38fd1498Szrj   /* Only a local declaration (variable or label).  */
5280*38fd1498Szrj   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5281*38fd1498Szrj 	   || TREE_CODE (expr) == LABEL_DECL)
5282*38fd1498Szrj     {
5283*38fd1498Szrj       /* Lookup the declaration.  */
5284*38fd1498Szrj       n = st->get (expr);
5285*38fd1498Szrj 
5286*38fd1498Szrj       /* If it's there, remap it.  */
5287*38fd1498Szrj       if (n)
5288*38fd1498Szrj 	*tp = *n;
5289*38fd1498Szrj       *walk_subtrees = 0;
5290*38fd1498Szrj     }
5291*38fd1498Szrj   else if (TREE_CODE (expr) == STATEMENT_LIST
5292*38fd1498Szrj 	   || TREE_CODE (expr) == BIND_EXPR
5293*38fd1498Szrj 	   || TREE_CODE (expr) == SAVE_EXPR)
5294*38fd1498Szrj     gcc_unreachable ();
5295*38fd1498Szrj   else if (TREE_CODE (expr) == TARGET_EXPR)
5296*38fd1498Szrj     {
5297*38fd1498Szrj       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5298*38fd1498Szrj          It's OK for this to happen if it was part of a subtree that
5299*38fd1498Szrj          isn't immediately expanded, such as operand 2 of another
5300*38fd1498Szrj          TARGET_EXPR.  */
5301*38fd1498Szrj       if (!TREE_OPERAND (expr, 1))
5302*38fd1498Szrj 	{
5303*38fd1498Szrj 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5304*38fd1498Szrj 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5305*38fd1498Szrj 	}
5306*38fd1498Szrj     }
5307*38fd1498Szrj   else if (TREE_CODE (expr) == OMP_CLAUSE)
5308*38fd1498Szrj     {
5309*38fd1498Szrj       /* Before the omplower pass completes, some OMP clauses can contain
5310*38fd1498Szrj 	 sequences that are neither copied by gimple_seq_copy nor walked by
5311*38fd1498Szrj 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5312*38fd1498Szrj 	 in those situations, we have to copy and process them explicitely.  */
5313*38fd1498Szrj 
5314*38fd1498Szrj       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5315*38fd1498Szrj 	{
5316*38fd1498Szrj 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5317*38fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5318*38fd1498Szrj 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5319*38fd1498Szrj 	}
5320*38fd1498Szrj       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5321*38fd1498Szrj 	{
5322*38fd1498Szrj 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5323*38fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5324*38fd1498Szrj 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5325*38fd1498Szrj 	}
5326*38fd1498Szrj       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5327*38fd1498Szrj 	{
5328*38fd1498Szrj 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5329*38fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5330*38fd1498Szrj 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5331*38fd1498Szrj 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5332*38fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5333*38fd1498Szrj 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5334*38fd1498Szrj 	}
5335*38fd1498Szrj     }
5336*38fd1498Szrj 
5337*38fd1498Szrj   /* Keep iterating.  */
5338*38fd1498Szrj   return NULL_TREE;
5339*38fd1498Szrj }
5340*38fd1498Szrj 
5341*38fd1498Szrj 
5342*38fd1498Szrj /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5343*38fd1498Szrj    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5344*38fd1498Szrj    remaps all local declarations to appropriate replacements in gimple
5345*38fd1498Szrj    statements. */
5346*38fd1498Szrj 
5347*38fd1498Szrj static tree
5348*38fd1498Szrj replace_locals_stmt (gimple_stmt_iterator *gsip,
5349*38fd1498Szrj 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5350*38fd1498Szrj 		     struct walk_stmt_info *wi)
5351*38fd1498Szrj {
5352*38fd1498Szrj   copy_body_data *id = (copy_body_data *) wi->info;
5353*38fd1498Szrj   gimple *gs = gsi_stmt (*gsip);
5354*38fd1498Szrj 
5355*38fd1498Szrj   if (gbind *stmt = dyn_cast <gbind *> (gs))
5356*38fd1498Szrj     {
5357*38fd1498Szrj       tree block = gimple_bind_block (stmt);
5358*38fd1498Szrj 
5359*38fd1498Szrj       if (block)
5360*38fd1498Szrj 	{
5361*38fd1498Szrj 	  remap_block (&block, id);
5362*38fd1498Szrj 	  gimple_bind_set_block (stmt, block);
5363*38fd1498Szrj 	}
5364*38fd1498Szrj 
5365*38fd1498Szrj       /* This will remap a lot of the same decls again, but this should be
5366*38fd1498Szrj 	 harmless.  */
5367*38fd1498Szrj       if (gimple_bind_vars (stmt))
5368*38fd1498Szrj 	{
5369*38fd1498Szrj 	  tree old_var, decls = gimple_bind_vars (stmt);
5370*38fd1498Szrj 
5371*38fd1498Szrj 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5372*38fd1498Szrj 	    if (!can_be_nonlocal (old_var, id)
5373*38fd1498Szrj 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5374*38fd1498Szrj 	      remap_decl (old_var, id);
5375*38fd1498Szrj 
5376*38fd1498Szrj 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5377*38fd1498Szrj 	  id->prevent_decl_creation_for_types = true;
5378*38fd1498Szrj 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5379*38fd1498Szrj 	  id->prevent_decl_creation_for_types = false;
5380*38fd1498Szrj 	}
5381*38fd1498Szrj     }
5382*38fd1498Szrj 
5383*38fd1498Szrj   /* Keep iterating.  */
5384*38fd1498Szrj   return NULL_TREE;
5385*38fd1498Szrj }
5386*38fd1498Szrj 
5387*38fd1498Szrj /* Create a copy of SEQ and remap all decls in it.  */
5388*38fd1498Szrj 
5389*38fd1498Szrj static gimple_seq
5390*38fd1498Szrj duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5391*38fd1498Szrj {
5392*38fd1498Szrj   if (!seq)
5393*38fd1498Szrj     return NULL;
5394*38fd1498Szrj 
5395*38fd1498Szrj   /* If there are any labels in OMP sequences, they can be only referred to in
5396*38fd1498Szrj      the sequence itself and therefore we can do both here.  */
5397*38fd1498Szrj   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5398*38fd1498Szrj   gimple_seq copy = gimple_seq_copy (seq);
5399*38fd1498Szrj   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5400*38fd1498Szrj   return copy;
5401*38fd1498Szrj }
5402*38fd1498Szrj 
5403*38fd1498Szrj /* Copies everything in SEQ and replaces variables and labels local to
5404*38fd1498Szrj    current_function_decl.  */
5405*38fd1498Szrj 
5406*38fd1498Szrj gimple_seq
5407*38fd1498Szrj copy_gimple_seq_and_replace_locals (gimple_seq seq)
5408*38fd1498Szrj {
5409*38fd1498Szrj   copy_body_data id;
5410*38fd1498Szrj   struct walk_stmt_info wi;
5411*38fd1498Szrj   gimple_seq copy;
5412*38fd1498Szrj 
5413*38fd1498Szrj   /* There's nothing to do for NULL_TREE.  */
5414*38fd1498Szrj   if (seq == NULL)
5415*38fd1498Szrj     return seq;
5416*38fd1498Szrj 
5417*38fd1498Szrj   /* Set up ID.  */
5418*38fd1498Szrj   memset (&id, 0, sizeof (id));
5419*38fd1498Szrj   id.src_fn = current_function_decl;
5420*38fd1498Szrj   id.dst_fn = current_function_decl;
5421*38fd1498Szrj   id.src_cfun = cfun;
5422*38fd1498Szrj   id.decl_map = new hash_map<tree, tree>;
5423*38fd1498Szrj   id.debug_map = NULL;
5424*38fd1498Szrj 
5425*38fd1498Szrj   id.copy_decl = copy_decl_no_change;
5426*38fd1498Szrj   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5427*38fd1498Szrj   id.transform_new_cfg = false;
5428*38fd1498Szrj   id.transform_return_to_modify = false;
5429*38fd1498Szrj   id.transform_parameter = false;
5430*38fd1498Szrj   id.transform_lang_insert_block = NULL;
5431*38fd1498Szrj 
5432*38fd1498Szrj   /* Walk the tree once to find local labels.  */
5433*38fd1498Szrj   memset (&wi, 0, sizeof (wi));
5434*38fd1498Szrj   hash_set<tree> visited;
5435*38fd1498Szrj   wi.info = &id;
5436*38fd1498Szrj   wi.pset = &visited;
5437*38fd1498Szrj   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5438*38fd1498Szrj 
5439*38fd1498Szrj   copy = gimple_seq_copy (seq);
5440*38fd1498Szrj 
5441*38fd1498Szrj   /* Walk the copy, remapping decls.  */
5442*38fd1498Szrj   memset (&wi, 0, sizeof (wi));
5443*38fd1498Szrj   wi.info = &id;
5444*38fd1498Szrj   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5445*38fd1498Szrj 
5446*38fd1498Szrj   /* Clean up.  */
5447*38fd1498Szrj   delete id.decl_map;
5448*38fd1498Szrj   if (id.debug_map)
5449*38fd1498Szrj     delete id.debug_map;
5450*38fd1498Szrj   if (id.dependence_map)
5451*38fd1498Szrj     {
5452*38fd1498Szrj       delete id.dependence_map;
5453*38fd1498Szrj       id.dependence_map = NULL;
5454*38fd1498Szrj     }
5455*38fd1498Szrj 
5456*38fd1498Szrj   return copy;
5457*38fd1498Szrj }
5458*38fd1498Szrj 
5459*38fd1498Szrj 
5460*38fd1498Szrj /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5461*38fd1498Szrj 
5462*38fd1498Szrj static tree
5463*38fd1498Szrj debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5464*38fd1498Szrj {
5465*38fd1498Szrj   if (*tp == data)
5466*38fd1498Szrj     return (tree) data;
5467*38fd1498Szrj   else
5468*38fd1498Szrj     return NULL;
5469*38fd1498Szrj }
5470*38fd1498Szrj 
5471*38fd1498Szrj DEBUG_FUNCTION bool
5472*38fd1498Szrj debug_find_tree (tree top, tree search)
5473*38fd1498Szrj {
5474*38fd1498Szrj   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5475*38fd1498Szrj }
5476*38fd1498Szrj 
5477*38fd1498Szrj 
5478*38fd1498Szrj /* Declare the variables created by the inliner.  Add all the variables in
5479*38fd1498Szrj    VARS to BIND_EXPR.  */
5480*38fd1498Szrj 
5481*38fd1498Szrj static void
5482*38fd1498Szrj declare_inline_vars (tree block, tree vars)
5483*38fd1498Szrj {
5484*38fd1498Szrj   tree t;
5485*38fd1498Szrj   for (t = vars; t; t = DECL_CHAIN (t))
5486*38fd1498Szrj     {
5487*38fd1498Szrj       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5488*38fd1498Szrj       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5489*38fd1498Szrj       add_local_decl (cfun, t);
5490*38fd1498Szrj     }
5491*38fd1498Szrj 
5492*38fd1498Szrj   if (block)
5493*38fd1498Szrj     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5494*38fd1498Szrj }
5495*38fd1498Szrj 
5496*38fd1498Szrj /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5497*38fd1498Szrj    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5498*38fd1498Szrj    VAR_DECL translation.  */
5499*38fd1498Szrj 
5500*38fd1498Szrj tree
5501*38fd1498Szrj copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5502*38fd1498Szrj {
5503*38fd1498Szrj   /* Don't generate debug information for the copy if we wouldn't have
5504*38fd1498Szrj      generated it for the copy either.  */
5505*38fd1498Szrj   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5506*38fd1498Szrj   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5507*38fd1498Szrj 
5508*38fd1498Szrj   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5509*38fd1498Szrj      declaration inspired this copy.  */
5510*38fd1498Szrj   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5511*38fd1498Szrj 
5512*38fd1498Szrj   /* The new variable/label has no RTL, yet.  */
5513*38fd1498Szrj   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5514*38fd1498Szrj       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5515*38fd1498Szrj     SET_DECL_RTL (copy, 0);
5516*38fd1498Szrj 
5517*38fd1498Szrj   /* These args would always appear unused, if not for this.  */
5518*38fd1498Szrj   TREE_USED (copy) = 1;
5519*38fd1498Szrj 
5520*38fd1498Szrj   /* Set the context for the new declaration.  */
5521*38fd1498Szrj   if (!DECL_CONTEXT (decl))
5522*38fd1498Szrj     /* Globals stay global.  */
5523*38fd1498Szrj     ;
5524*38fd1498Szrj   else if (DECL_CONTEXT (decl) != id->src_fn)
5525*38fd1498Szrj     /* Things that weren't in the scope of the function we're inlining
5526*38fd1498Szrj        from aren't in the scope we're inlining to, either.  */
5527*38fd1498Szrj     ;
5528*38fd1498Szrj   else if (TREE_STATIC (decl))
5529*38fd1498Szrj     /* Function-scoped static variables should stay in the original
5530*38fd1498Szrj        function.  */
5531*38fd1498Szrj     ;
5532*38fd1498Szrj   else
5533*38fd1498Szrj     {
5534*38fd1498Szrj       /* Ordinary automatic local variables are now in the scope of the
5535*38fd1498Szrj 	 new function.  */
5536*38fd1498Szrj       DECL_CONTEXT (copy) = id->dst_fn;
5537*38fd1498Szrj       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5538*38fd1498Szrj 	{
5539*38fd1498Szrj 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5540*38fd1498Szrj 	    DECL_ATTRIBUTES (copy)
5541*38fd1498Szrj 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5542*38fd1498Szrj 			   DECL_ATTRIBUTES (copy));
5543*38fd1498Szrj 	  id->dst_simt_vars->safe_push (copy);
5544*38fd1498Szrj 	}
5545*38fd1498Szrj     }
5546*38fd1498Szrj 
5547*38fd1498Szrj   return copy;
5548*38fd1498Szrj }
5549*38fd1498Szrj 
5550*38fd1498Szrj static tree
5551*38fd1498Szrj copy_decl_to_var (tree decl, copy_body_data *id)
5552*38fd1498Szrj {
5553*38fd1498Szrj   tree copy, type;
5554*38fd1498Szrj 
5555*38fd1498Szrj   gcc_assert (TREE_CODE (decl) == PARM_DECL
5556*38fd1498Szrj 	      || TREE_CODE (decl) == RESULT_DECL);
5557*38fd1498Szrj 
5558*38fd1498Szrj   type = TREE_TYPE (decl);
5559*38fd1498Szrj 
5560*38fd1498Szrj   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5561*38fd1498Szrj 		     VAR_DECL, DECL_NAME (decl), type);
5562*38fd1498Szrj   if (DECL_PT_UID_SET_P (decl))
5563*38fd1498Szrj     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5564*38fd1498Szrj   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5565*38fd1498Szrj   TREE_READONLY (copy) = TREE_READONLY (decl);
5566*38fd1498Szrj   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5567*38fd1498Szrj   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5568*38fd1498Szrj 
5569*38fd1498Szrj   return copy_decl_for_dup_finish (id, decl, copy);
5570*38fd1498Szrj }
5571*38fd1498Szrj 
5572*38fd1498Szrj /* Like copy_decl_to_var, but create a return slot object instead of a
5573*38fd1498Szrj    pointer variable for return by invisible reference.  */
5574*38fd1498Szrj 
5575*38fd1498Szrj static tree
5576*38fd1498Szrj copy_result_decl_to_var (tree decl, copy_body_data *id)
5577*38fd1498Szrj {
5578*38fd1498Szrj   tree copy, type;
5579*38fd1498Szrj 
5580*38fd1498Szrj   gcc_assert (TREE_CODE (decl) == PARM_DECL
5581*38fd1498Szrj 	      || TREE_CODE (decl) == RESULT_DECL);
5582*38fd1498Szrj 
5583*38fd1498Szrj   type = TREE_TYPE (decl);
5584*38fd1498Szrj   if (DECL_BY_REFERENCE (decl))
5585*38fd1498Szrj     type = TREE_TYPE (type);
5586*38fd1498Szrj 
5587*38fd1498Szrj   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5588*38fd1498Szrj 		     VAR_DECL, DECL_NAME (decl), type);
5589*38fd1498Szrj   if (DECL_PT_UID_SET_P (decl))
5590*38fd1498Szrj     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5591*38fd1498Szrj   TREE_READONLY (copy) = TREE_READONLY (decl);
5592*38fd1498Szrj   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5593*38fd1498Szrj   if (!DECL_BY_REFERENCE (decl))
5594*38fd1498Szrj     {
5595*38fd1498Szrj       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5596*38fd1498Szrj       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5597*38fd1498Szrj     }
5598*38fd1498Szrj 
5599*38fd1498Szrj   return copy_decl_for_dup_finish (id, decl, copy);
5600*38fd1498Szrj }
5601*38fd1498Szrj 
5602*38fd1498Szrj tree
5603*38fd1498Szrj copy_decl_no_change (tree decl, copy_body_data *id)
5604*38fd1498Szrj {
5605*38fd1498Szrj   tree copy;
5606*38fd1498Szrj 
5607*38fd1498Szrj   copy = copy_node (decl);
5608*38fd1498Szrj 
5609*38fd1498Szrj   /* The COPY is not abstract; it will be generated in DST_FN.  */
5610*38fd1498Szrj   DECL_ABSTRACT_P (copy) = false;
5611*38fd1498Szrj   lang_hooks.dup_lang_specific_decl (copy);
5612*38fd1498Szrj 
5613*38fd1498Szrj   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5614*38fd1498Szrj      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5615*38fd1498Szrj   if (TREE_CODE (copy) == LABEL_DECL)
5616*38fd1498Szrj     {
5617*38fd1498Szrj       TREE_ADDRESSABLE (copy) = 0;
5618*38fd1498Szrj       LABEL_DECL_UID (copy) = -1;
5619*38fd1498Szrj     }
5620*38fd1498Szrj 
5621*38fd1498Szrj   return copy_decl_for_dup_finish (id, decl, copy);
5622*38fd1498Szrj }
5623*38fd1498Szrj 
5624*38fd1498Szrj static tree
5625*38fd1498Szrj copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5626*38fd1498Szrj {
5627*38fd1498Szrj   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5628*38fd1498Szrj     return copy_decl_to_var (decl, id);
5629*38fd1498Szrj   else
5630*38fd1498Szrj     return copy_decl_no_change (decl, id);
5631*38fd1498Szrj }
5632*38fd1498Szrj 
5633*38fd1498Szrj /* Return a copy of the function's argument tree.  */
5634*38fd1498Szrj static tree
5635*38fd1498Szrj copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5636*38fd1498Szrj 			       bitmap args_to_skip, tree *vars)
5637*38fd1498Szrj {
5638*38fd1498Szrj   tree arg, *parg;
5639*38fd1498Szrj   tree new_parm = NULL;
5640*38fd1498Szrj   int i = 0;
5641*38fd1498Szrj 
5642*38fd1498Szrj   parg = &new_parm;
5643*38fd1498Szrj 
5644*38fd1498Szrj   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5645*38fd1498Szrj     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5646*38fd1498Szrj       {
5647*38fd1498Szrj         tree new_tree = remap_decl (arg, id);
5648*38fd1498Szrj 	if (TREE_CODE (new_tree) != PARM_DECL)
5649*38fd1498Szrj 	  new_tree = id->copy_decl (arg, id);
5650*38fd1498Szrj         lang_hooks.dup_lang_specific_decl (new_tree);
5651*38fd1498Szrj         *parg = new_tree;
5652*38fd1498Szrj 	parg = &DECL_CHAIN (new_tree);
5653*38fd1498Szrj       }
5654*38fd1498Szrj     else if (!id->decl_map->get (arg))
5655*38fd1498Szrj       {
5656*38fd1498Szrj 	/* Make an equivalent VAR_DECL.  If the argument was used
5657*38fd1498Szrj 	   as temporary variable later in function, the uses will be
5658*38fd1498Szrj 	   replaced by local variable.  */
5659*38fd1498Szrj 	tree var = copy_decl_to_var (arg, id);
5660*38fd1498Szrj 	insert_decl_map (id, arg, var);
5661*38fd1498Szrj         /* Declare this new variable.  */
5662*38fd1498Szrj         DECL_CHAIN (var) = *vars;
5663*38fd1498Szrj         *vars = var;
5664*38fd1498Szrj       }
5665*38fd1498Szrj   return new_parm;
5666*38fd1498Szrj }
5667*38fd1498Szrj 
5668*38fd1498Szrj /* Return a copy of the function's static chain.  */
5669*38fd1498Szrj static tree
5670*38fd1498Szrj copy_static_chain (tree static_chain, copy_body_data * id)
5671*38fd1498Szrj {
5672*38fd1498Szrj   tree *chain_copy, *pvar;
5673*38fd1498Szrj 
5674*38fd1498Szrj   chain_copy = &static_chain;
5675*38fd1498Szrj   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5676*38fd1498Szrj     {
5677*38fd1498Szrj       tree new_tree = remap_decl (*pvar, id);
5678*38fd1498Szrj       lang_hooks.dup_lang_specific_decl (new_tree);
5679*38fd1498Szrj       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5680*38fd1498Szrj       *pvar = new_tree;
5681*38fd1498Szrj     }
5682*38fd1498Szrj   return static_chain;
5683*38fd1498Szrj }
5684*38fd1498Szrj 
5685*38fd1498Szrj /* Return true if the function is allowed to be versioned.
5686*38fd1498Szrj    This is a guard for the versioning functionality.  */
5687*38fd1498Szrj 
5688*38fd1498Szrj bool
5689*38fd1498Szrj tree_versionable_function_p (tree fndecl)
5690*38fd1498Szrj {
5691*38fd1498Szrj   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5692*38fd1498Szrj 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5693*38fd1498Szrj }
5694*38fd1498Szrj 
5695*38fd1498Szrj /* Delete all unreachable basic blocks and update callgraph.
5696*38fd1498Szrj    Doing so is somewhat nontrivial because we need to update all clones and
5697*38fd1498Szrj    remove inline function that become unreachable.  */
5698*38fd1498Szrj 
5699*38fd1498Szrj static bool
5700*38fd1498Szrj delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5701*38fd1498Szrj {
5702*38fd1498Szrj   bool changed = false;
5703*38fd1498Szrj   basic_block b, next_bb;
5704*38fd1498Szrj 
5705*38fd1498Szrj   find_unreachable_blocks ();
5706*38fd1498Szrj 
5707*38fd1498Szrj   /* Delete all unreachable basic blocks.  */
5708*38fd1498Szrj 
5709*38fd1498Szrj   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5710*38fd1498Szrj        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5711*38fd1498Szrj     {
5712*38fd1498Szrj       next_bb = b->next_bb;
5713*38fd1498Szrj 
5714*38fd1498Szrj       if (!(b->flags & BB_REACHABLE))
5715*38fd1498Szrj 	{
5716*38fd1498Szrj           gimple_stmt_iterator bsi;
5717*38fd1498Szrj 
5718*38fd1498Szrj           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5719*38fd1498Szrj 	    {
5720*38fd1498Szrj 	      struct cgraph_edge *e;
5721*38fd1498Szrj 	      struct cgraph_node *node;
5722*38fd1498Szrj 
5723*38fd1498Szrj 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5724*38fd1498Szrj 
5725*38fd1498Szrj 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5726*38fd1498Szrj 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5727*38fd1498Szrj 		{
5728*38fd1498Szrj 		  if (!e->inline_failed)
5729*38fd1498Szrj 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5730*38fd1498Szrj 		  else
5731*38fd1498Szrj 		    e->remove ();
5732*38fd1498Szrj 		}
5733*38fd1498Szrj 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5734*38fd1498Szrj 		  && id->dst_node->clones)
5735*38fd1498Szrj 		for (node = id->dst_node->clones; node != id->dst_node;)
5736*38fd1498Szrj 		  {
5737*38fd1498Szrj 		    node->remove_stmt_references (gsi_stmt (bsi));
5738*38fd1498Szrj 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5739*38fd1498Szrj 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5740*38fd1498Szrj 		      {
5741*38fd1498Szrj 			if (!e->inline_failed)
5742*38fd1498Szrj 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5743*38fd1498Szrj 			else
5744*38fd1498Szrj 			  e->remove ();
5745*38fd1498Szrj 		      }
5746*38fd1498Szrj 
5747*38fd1498Szrj 		    if (node->clones)
5748*38fd1498Szrj 		      node = node->clones;
5749*38fd1498Szrj 		    else if (node->next_sibling_clone)
5750*38fd1498Szrj 		      node = node->next_sibling_clone;
5751*38fd1498Szrj 		    else
5752*38fd1498Szrj 		      {
5753*38fd1498Szrj 			while (node != id->dst_node && !node->next_sibling_clone)
5754*38fd1498Szrj 			  node = node->clone_of;
5755*38fd1498Szrj 			if (node != id->dst_node)
5756*38fd1498Szrj 			  node = node->next_sibling_clone;
5757*38fd1498Szrj 		      }
5758*38fd1498Szrj 		  }
5759*38fd1498Szrj 	    }
5760*38fd1498Szrj 	  delete_basic_block (b);
5761*38fd1498Szrj 	  changed = true;
5762*38fd1498Szrj 	}
5763*38fd1498Szrj     }
5764*38fd1498Szrj 
5765*38fd1498Szrj   return changed;
5766*38fd1498Szrj }
5767*38fd1498Szrj 
5768*38fd1498Szrj /* Update clone info after duplication.  */
5769*38fd1498Szrj 
5770*38fd1498Szrj static void
5771*38fd1498Szrj update_clone_info (copy_body_data * id)
5772*38fd1498Szrj {
5773*38fd1498Szrj   struct cgraph_node *node;
5774*38fd1498Szrj   if (!id->dst_node->clones)
5775*38fd1498Szrj     return;
5776*38fd1498Szrj   for (node = id->dst_node->clones; node != id->dst_node;)
5777*38fd1498Szrj     {
5778*38fd1498Szrj       /* First update replace maps to match the new body.  */
5779*38fd1498Szrj       if (node->clone.tree_map)
5780*38fd1498Szrj         {
5781*38fd1498Szrj 	  unsigned int i;
5782*38fd1498Szrj           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5783*38fd1498Szrj 	    {
5784*38fd1498Szrj 	      struct ipa_replace_map *replace_info;
5785*38fd1498Szrj 	      replace_info = (*node->clone.tree_map)[i];
5786*38fd1498Szrj 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5787*38fd1498Szrj 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5788*38fd1498Szrj 	    }
5789*38fd1498Szrj 	}
5790*38fd1498Szrj       if (node->clones)
5791*38fd1498Szrj 	node = node->clones;
5792*38fd1498Szrj       else if (node->next_sibling_clone)
5793*38fd1498Szrj 	node = node->next_sibling_clone;
5794*38fd1498Szrj       else
5795*38fd1498Szrj 	{
5796*38fd1498Szrj 	  while (node != id->dst_node && !node->next_sibling_clone)
5797*38fd1498Szrj 	    node = node->clone_of;
5798*38fd1498Szrj 	  if (node != id->dst_node)
5799*38fd1498Szrj 	    node = node->next_sibling_clone;
5800*38fd1498Szrj 	}
5801*38fd1498Szrj     }
5802*38fd1498Szrj }
5803*38fd1498Szrj 
5804*38fd1498Szrj /* Create a copy of a function's tree.
5805*38fd1498Szrj    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5806*38fd1498Szrj    of the original function and the new copied function
5807*38fd1498Szrj    respectively.  In case we want to replace a DECL
5808*38fd1498Szrj    tree with another tree while duplicating the function's
5809*38fd1498Szrj    body, TREE_MAP represents the mapping between these
5810*38fd1498Szrj    trees. If UPDATE_CLONES is set, the call_stmt fields
5811*38fd1498Szrj    of edges of clones of the function will be updated.
5812*38fd1498Szrj 
5813*38fd1498Szrj    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5814*38fd1498Szrj    from new version.
5815*38fd1498Szrj    If SKIP_RETURN is true, the new version will return void.
5816*38fd1498Szrj    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5817*38fd1498Szrj    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5818*38fd1498Szrj */
5819*38fd1498Szrj void
5820*38fd1498Szrj tree_function_versioning (tree old_decl, tree new_decl,
5821*38fd1498Szrj 			  vec<ipa_replace_map *, va_gc> *tree_map,
5822*38fd1498Szrj 			  bool update_clones, bitmap args_to_skip,
5823*38fd1498Szrj 			  bool skip_return, bitmap blocks_to_copy,
5824*38fd1498Szrj 			  basic_block new_entry)
5825*38fd1498Szrj {
5826*38fd1498Szrj   struct cgraph_node *old_version_node;
5827*38fd1498Szrj   struct cgraph_node *new_version_node;
5828*38fd1498Szrj   copy_body_data id;
5829*38fd1498Szrj   tree p;
5830*38fd1498Szrj   unsigned i;
5831*38fd1498Szrj   struct ipa_replace_map *replace_info;
5832*38fd1498Szrj   basic_block old_entry_block, bb;
5833*38fd1498Szrj   auto_vec<gimple *, 10> init_stmts;
5834*38fd1498Szrj   tree vars = NULL_TREE;
5835*38fd1498Szrj   bitmap debug_args_to_skip = args_to_skip;
5836*38fd1498Szrj 
5837*38fd1498Szrj   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5838*38fd1498Szrj 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5839*38fd1498Szrj   DECL_POSSIBLY_INLINED (old_decl) = 1;
5840*38fd1498Szrj 
5841*38fd1498Szrj   old_version_node = cgraph_node::get (old_decl);
5842*38fd1498Szrj   gcc_checking_assert (old_version_node);
5843*38fd1498Szrj   new_version_node = cgraph_node::get (new_decl);
5844*38fd1498Szrj   gcc_checking_assert (new_version_node);
5845*38fd1498Szrj 
5846*38fd1498Szrj   /* Copy over debug args.  */
5847*38fd1498Szrj   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5848*38fd1498Szrj     {
5849*38fd1498Szrj       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5850*38fd1498Szrj       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5851*38fd1498Szrj       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5852*38fd1498Szrj       old_debug_args = decl_debug_args_lookup (old_decl);
5853*38fd1498Szrj       if (old_debug_args)
5854*38fd1498Szrj 	{
5855*38fd1498Szrj 	  new_debug_args = decl_debug_args_insert (new_decl);
5856*38fd1498Szrj 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5857*38fd1498Szrj 	}
5858*38fd1498Szrj     }
5859*38fd1498Szrj 
5860*38fd1498Szrj   /* Output the inlining info for this abstract function, since it has been
5861*38fd1498Szrj      inlined.  If we don't do this now, we can lose the information about the
5862*38fd1498Szrj      variables in the function when the blocks get blown away as soon as we
5863*38fd1498Szrj      remove the cgraph node.  */
5864*38fd1498Szrj   (*debug_hooks->outlining_inline_function) (old_decl);
5865*38fd1498Szrj 
5866*38fd1498Szrj   DECL_ARTIFICIAL (new_decl) = 1;
5867*38fd1498Szrj   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5868*38fd1498Szrj   if (DECL_ORIGIN (old_decl) == old_decl)
5869*38fd1498Szrj     old_version_node->used_as_abstract_origin = true;
5870*38fd1498Szrj   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5871*38fd1498Szrj 
5872*38fd1498Szrj   /* Prepare the data structures for the tree copy.  */
5873*38fd1498Szrj   memset (&id, 0, sizeof (id));
5874*38fd1498Szrj 
5875*38fd1498Szrj   /* Generate a new name for the new version. */
5876*38fd1498Szrj   id.statements_to_fold = new hash_set<gimple *>;
5877*38fd1498Szrj 
5878*38fd1498Szrj   id.decl_map = new hash_map<tree, tree>;
5879*38fd1498Szrj   id.debug_map = NULL;
5880*38fd1498Szrj   id.src_fn = old_decl;
5881*38fd1498Szrj   id.dst_fn = new_decl;
5882*38fd1498Szrj   id.src_node = old_version_node;
5883*38fd1498Szrj   id.dst_node = new_version_node;
5884*38fd1498Szrj   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5885*38fd1498Szrj   id.blocks_to_copy = blocks_to_copy;
5886*38fd1498Szrj 
5887*38fd1498Szrj   id.copy_decl = copy_decl_no_change;
5888*38fd1498Szrj   id.transform_call_graph_edges
5889*38fd1498Szrj     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5890*38fd1498Szrj   id.transform_new_cfg = true;
5891*38fd1498Szrj   id.transform_return_to_modify = false;
5892*38fd1498Szrj   id.transform_parameter = false;
5893*38fd1498Szrj   id.transform_lang_insert_block = NULL;
5894*38fd1498Szrj 
5895*38fd1498Szrj   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5896*38fd1498Szrj     (DECL_STRUCT_FUNCTION (old_decl));
5897*38fd1498Szrj   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5898*38fd1498Szrj   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5899*38fd1498Szrj   initialize_cfun (new_decl, old_decl,
5900*38fd1498Szrj 		   new_entry ? new_entry->count : old_entry_block->count);
5901*38fd1498Szrj   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5902*38fd1498Szrj     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5903*38fd1498Szrj       = id.src_cfun->gimple_df->ipa_pta;
5904*38fd1498Szrj 
5905*38fd1498Szrj   /* Copy the function's static chain.  */
5906*38fd1498Szrj   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5907*38fd1498Szrj   if (p)
5908*38fd1498Szrj     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5909*38fd1498Szrj       = copy_static_chain (p, &id);
5910*38fd1498Szrj 
5911*38fd1498Szrj   /* If there's a tree_map, prepare for substitution.  */
5912*38fd1498Szrj   if (tree_map)
5913*38fd1498Szrj     for (i = 0; i < tree_map->length (); i++)
5914*38fd1498Szrj       {
5915*38fd1498Szrj 	gimple *init;
5916*38fd1498Szrj 	replace_info = (*tree_map)[i];
5917*38fd1498Szrj 	if (replace_info->replace_p)
5918*38fd1498Szrj 	  {
5919*38fd1498Szrj 	    int parm_num = -1;
5920*38fd1498Szrj 	    if (!replace_info->old_tree)
5921*38fd1498Szrj 	      {
5922*38fd1498Szrj 		int p = replace_info->parm_num;
5923*38fd1498Szrj 		tree parm;
5924*38fd1498Szrj 		tree req_type, new_type;
5925*38fd1498Szrj 
5926*38fd1498Szrj 		for (parm = DECL_ARGUMENTS (old_decl); p;
5927*38fd1498Szrj 		     parm = DECL_CHAIN (parm))
5928*38fd1498Szrj 		  p--;
5929*38fd1498Szrj 		replace_info->old_tree = parm;
5930*38fd1498Szrj 		parm_num = replace_info->parm_num;
5931*38fd1498Szrj 		req_type = TREE_TYPE (parm);
5932*38fd1498Szrj 		new_type = TREE_TYPE (replace_info->new_tree);
5933*38fd1498Szrj 		if (!useless_type_conversion_p (req_type, new_type))
5934*38fd1498Szrj 		  {
5935*38fd1498Szrj 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5936*38fd1498Szrj 		      replace_info->new_tree
5937*38fd1498Szrj 			= fold_build1 (NOP_EXPR, req_type,
5938*38fd1498Szrj 				       replace_info->new_tree);
5939*38fd1498Szrj 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5940*38fd1498Szrj 		      replace_info->new_tree
5941*38fd1498Szrj 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
5942*38fd1498Szrj 				       replace_info->new_tree);
5943*38fd1498Szrj 		    else
5944*38fd1498Szrj 		      {
5945*38fd1498Szrj 			if (dump_file)
5946*38fd1498Szrj 			  {
5947*38fd1498Szrj 			    fprintf (dump_file, "    const ");
5948*38fd1498Szrj 			    print_generic_expr (dump_file,
5949*38fd1498Szrj 						replace_info->new_tree);
5950*38fd1498Szrj 			    fprintf (dump_file,
5951*38fd1498Szrj 				     "  can't be converted to param ");
5952*38fd1498Szrj 			    print_generic_expr (dump_file, parm);
5953*38fd1498Szrj 			    fprintf (dump_file, "\n");
5954*38fd1498Szrj 			  }
5955*38fd1498Szrj 			replace_info->old_tree = NULL;
5956*38fd1498Szrj 		      }
5957*38fd1498Szrj 		  }
5958*38fd1498Szrj 	      }
5959*38fd1498Szrj 	    else
5960*38fd1498Szrj 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5961*38fd1498Szrj 	    if (replace_info->old_tree)
5962*38fd1498Szrj 	      {
5963*38fd1498Szrj 		init = setup_one_parameter (&id, replace_info->old_tree,
5964*38fd1498Szrj 					    replace_info->new_tree, id.src_fn,
5965*38fd1498Szrj 					    NULL,
5966*38fd1498Szrj 					    &vars);
5967*38fd1498Szrj 		if (init)
5968*38fd1498Szrj 		  init_stmts.safe_push (init);
5969*38fd1498Szrj 		if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5970*38fd1498Szrj 		  {
5971*38fd1498Szrj 		    if (parm_num == -1)
5972*38fd1498Szrj 		      {
5973*38fd1498Szrj 			tree parm;
5974*38fd1498Szrj 			int p;
5975*38fd1498Szrj 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5976*38fd1498Szrj 			     parm = DECL_CHAIN (parm), p++)
5977*38fd1498Szrj 			  if (parm == replace_info->old_tree)
5978*38fd1498Szrj 			    {
5979*38fd1498Szrj 			      parm_num = p;
5980*38fd1498Szrj 			      break;
5981*38fd1498Szrj 			    }
5982*38fd1498Szrj 		      }
5983*38fd1498Szrj 		    if (parm_num != -1)
5984*38fd1498Szrj 		      {
5985*38fd1498Szrj 			if (debug_args_to_skip == args_to_skip)
5986*38fd1498Szrj 			  {
5987*38fd1498Szrj 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
5988*38fd1498Szrj 			    bitmap_copy (debug_args_to_skip, args_to_skip);
5989*38fd1498Szrj 			  }
5990*38fd1498Szrj 			bitmap_clear_bit (debug_args_to_skip, parm_num);
5991*38fd1498Szrj 		      }
5992*38fd1498Szrj 		  }
5993*38fd1498Szrj 	      }
5994*38fd1498Szrj 	  }
5995*38fd1498Szrj       }
5996*38fd1498Szrj   /* Copy the function's arguments.  */
5997*38fd1498Szrj   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5998*38fd1498Szrj     DECL_ARGUMENTS (new_decl)
5999*38fd1498Szrj       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6000*38fd1498Szrj 				       args_to_skip, &vars);
6001*38fd1498Szrj 
6002*38fd1498Szrj   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6003*38fd1498Szrj   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6004*38fd1498Szrj 
6005*38fd1498Szrj   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6006*38fd1498Szrj 
6007*38fd1498Szrj   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6008*38fd1498Szrj     /* Add local vars.  */
6009*38fd1498Szrj     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6010*38fd1498Szrj 
6011*38fd1498Szrj   if (DECL_RESULT (old_decl) == NULL_TREE)
6012*38fd1498Szrj     ;
6013*38fd1498Szrj   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6014*38fd1498Szrj     {
6015*38fd1498Szrj       DECL_RESULT (new_decl)
6016*38fd1498Szrj 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6017*38fd1498Szrj 		      RESULT_DECL, NULL_TREE, void_type_node);
6018*38fd1498Szrj       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6019*38fd1498Szrj       cfun->returns_struct = 0;
6020*38fd1498Szrj       cfun->returns_pcc_struct = 0;
6021*38fd1498Szrj     }
6022*38fd1498Szrj   else
6023*38fd1498Szrj     {
6024*38fd1498Szrj       tree old_name;
6025*38fd1498Szrj       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6026*38fd1498Szrj       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6027*38fd1498Szrj       if (gimple_in_ssa_p (id.src_cfun)
6028*38fd1498Szrj 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6029*38fd1498Szrj 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6030*38fd1498Szrj 	{
6031*38fd1498Szrj 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6032*38fd1498Szrj 	  insert_decl_map (&id, old_name, new_name);
6033*38fd1498Szrj 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6034*38fd1498Szrj 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6035*38fd1498Szrj 	}
6036*38fd1498Szrj     }
6037*38fd1498Szrj 
6038*38fd1498Szrj   /* Set up the destination functions loop tree.  */
6039*38fd1498Szrj   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6040*38fd1498Szrj     {
6041*38fd1498Szrj       cfun->curr_properties &= ~PROP_loops;
6042*38fd1498Szrj       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6043*38fd1498Szrj       cfun->curr_properties |= PROP_loops;
6044*38fd1498Szrj     }
6045*38fd1498Szrj 
6046*38fd1498Szrj   /* Copy the Function's body.  */
6047*38fd1498Szrj   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6048*38fd1498Szrj 	     new_entry);
6049*38fd1498Szrj 
6050*38fd1498Szrj   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6051*38fd1498Szrj   number_blocks (new_decl);
6052*38fd1498Szrj 
6053*38fd1498Szrj   /* We want to create the BB unconditionally, so that the addition of
6054*38fd1498Szrj      debug stmts doesn't affect BB count, which may in the end cause
6055*38fd1498Szrj      codegen differences.  */
6056*38fd1498Szrj   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6057*38fd1498Szrj   while (init_stmts.length ())
6058*38fd1498Szrj     insert_init_stmt (&id, bb, init_stmts.pop ());
6059*38fd1498Szrj   update_clone_info (&id);
6060*38fd1498Szrj 
6061*38fd1498Szrj   /* Remap the nonlocal_goto_save_area, if any.  */
6062*38fd1498Szrj   if (cfun->nonlocal_goto_save_area)
6063*38fd1498Szrj     {
6064*38fd1498Szrj       struct walk_stmt_info wi;
6065*38fd1498Szrj 
6066*38fd1498Szrj       memset (&wi, 0, sizeof (wi));
6067*38fd1498Szrj       wi.info = &id;
6068*38fd1498Szrj       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6069*38fd1498Szrj     }
6070*38fd1498Szrj 
6071*38fd1498Szrj   /* Clean up.  */
6072*38fd1498Szrj   delete id.decl_map;
6073*38fd1498Szrj   if (id.debug_map)
6074*38fd1498Szrj     delete id.debug_map;
6075*38fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
6076*38fd1498Szrj   free_dominance_info (CDI_POST_DOMINATORS);
6077*38fd1498Szrj 
6078*38fd1498Szrj   update_max_bb_count ();
6079*38fd1498Szrj   fold_marked_statements (0, id.statements_to_fold);
6080*38fd1498Szrj   delete id.statements_to_fold;
6081*38fd1498Szrj   delete_unreachable_blocks_update_callgraph (&id);
6082*38fd1498Szrj   if (id.dst_node->definition)
6083*38fd1498Szrj     cgraph_edge::rebuild_references ();
6084*38fd1498Szrj   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6085*38fd1498Szrj     {
6086*38fd1498Szrj       calculate_dominance_info (CDI_DOMINATORS);
6087*38fd1498Szrj       fix_loop_structure (NULL);
6088*38fd1498Szrj     }
6089*38fd1498Szrj   update_ssa (TODO_update_ssa);
6090*38fd1498Szrj 
6091*38fd1498Szrj   /* After partial cloning we need to rescale frequencies, so they are
6092*38fd1498Szrj      within proper range in the cloned function.  */
6093*38fd1498Szrj   if (new_entry)
6094*38fd1498Szrj     {
6095*38fd1498Szrj       struct cgraph_edge *e;
6096*38fd1498Szrj       rebuild_frequencies ();
6097*38fd1498Szrj 
6098*38fd1498Szrj       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6099*38fd1498Szrj       for (e = new_version_node->callees; e; e = e->next_callee)
6100*38fd1498Szrj 	{
6101*38fd1498Szrj 	  basic_block bb = gimple_bb (e->call_stmt);
6102*38fd1498Szrj 	  e->count = bb->count;
6103*38fd1498Szrj 	}
6104*38fd1498Szrj       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6105*38fd1498Szrj 	{
6106*38fd1498Szrj 	  basic_block bb = gimple_bb (e->call_stmt);
6107*38fd1498Szrj 	  e->count = bb->count;
6108*38fd1498Szrj 	}
6109*38fd1498Szrj     }
6110*38fd1498Szrj 
6111*38fd1498Szrj   if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6112*38fd1498Szrj     {
6113*38fd1498Szrj       tree parm;
6114*38fd1498Szrj       vec<tree, va_gc> **debug_args = NULL;
6115*38fd1498Szrj       unsigned int len = 0;
6116*38fd1498Szrj       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6117*38fd1498Szrj 	   parm; parm = DECL_CHAIN (parm), i++)
6118*38fd1498Szrj 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6119*38fd1498Szrj 	  {
6120*38fd1498Szrj 	    tree ddecl;
6121*38fd1498Szrj 
6122*38fd1498Szrj 	    if (debug_args == NULL)
6123*38fd1498Szrj 	      {
6124*38fd1498Szrj 		debug_args = decl_debug_args_insert (new_decl);
6125*38fd1498Szrj 		len = vec_safe_length (*debug_args);
6126*38fd1498Szrj 	      }
6127*38fd1498Szrj 	    ddecl = make_node (DEBUG_EXPR_DECL);
6128*38fd1498Szrj 	    DECL_ARTIFICIAL (ddecl) = 1;
6129*38fd1498Szrj 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6130*38fd1498Szrj 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
6131*38fd1498Szrj 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6132*38fd1498Szrj 	    vec_safe_push (*debug_args, ddecl);
6133*38fd1498Szrj 	  }
6134*38fd1498Szrj       if (debug_args != NULL)
6135*38fd1498Szrj 	{
6136*38fd1498Szrj 	  /* On the callee side, add
6137*38fd1498Szrj 	     DEBUG D#Y s=> parm
6138*38fd1498Szrj 	     DEBUG var => D#Y
6139*38fd1498Szrj 	     stmts to the first bb where var is a VAR_DECL created for the
6140*38fd1498Szrj 	     optimized away parameter in DECL_INITIAL block.  This hints
6141*38fd1498Szrj 	     in the debug info that var (whole DECL_ORIGIN is the parm
6142*38fd1498Szrj 	     PARM_DECL) is optimized away, but could be looked up at the
6143*38fd1498Szrj 	     call site as value of D#X there.  */
6144*38fd1498Szrj 	  tree var = vars, vexpr;
6145*38fd1498Szrj 	  gimple_stmt_iterator cgsi
6146*38fd1498Szrj 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6147*38fd1498Szrj 	  gimple *def_temp;
6148*38fd1498Szrj 	  var = vars;
6149*38fd1498Szrj 	  i = vec_safe_length (*debug_args);
6150*38fd1498Szrj 	  do
6151*38fd1498Szrj 	    {
6152*38fd1498Szrj 	      i -= 2;
6153*38fd1498Szrj 	      while (var != NULL_TREE
6154*38fd1498Szrj 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6155*38fd1498Szrj 		var = TREE_CHAIN (var);
6156*38fd1498Szrj 	      if (var == NULL_TREE)
6157*38fd1498Szrj 		break;
6158*38fd1498Szrj 	      vexpr = make_node (DEBUG_EXPR_DECL);
6159*38fd1498Szrj 	      parm = (**debug_args)[i];
6160*38fd1498Szrj 	      DECL_ARTIFICIAL (vexpr) = 1;
6161*38fd1498Szrj 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6162*38fd1498Szrj 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6163*38fd1498Szrj 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6164*38fd1498Szrj 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6165*38fd1498Szrj 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6166*38fd1498Szrj 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6167*38fd1498Szrj 	    }
6168*38fd1498Szrj 	  while (i > len);
6169*38fd1498Szrj 	}
6170*38fd1498Szrj     }
6171*38fd1498Szrj 
6172*38fd1498Szrj   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6173*38fd1498Szrj     BITMAP_FREE (debug_args_to_skip);
6174*38fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
6175*38fd1498Szrj   free_dominance_info (CDI_POST_DOMINATORS);
6176*38fd1498Szrj 
6177*38fd1498Szrj   gcc_assert (!id.debug_stmts.exists ());
6178*38fd1498Szrj   pop_cfun ();
6179*38fd1498Szrj   return;
6180*38fd1498Szrj }
6181*38fd1498Szrj 
6182*38fd1498Szrj /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6183*38fd1498Szrj    the callee and return the inlined body on success.  */
6184*38fd1498Szrj 
6185*38fd1498Szrj tree
6186*38fd1498Szrj maybe_inline_call_in_expr (tree exp)
6187*38fd1498Szrj {
6188*38fd1498Szrj   tree fn = get_callee_fndecl (exp);
6189*38fd1498Szrj 
6190*38fd1498Szrj   /* We can only try to inline "const" functions.  */
6191*38fd1498Szrj   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6192*38fd1498Szrj     {
6193*38fd1498Szrj       call_expr_arg_iterator iter;
6194*38fd1498Szrj       copy_body_data id;
6195*38fd1498Szrj       tree param, arg, t;
6196*38fd1498Szrj       hash_map<tree, tree> decl_map;
6197*38fd1498Szrj 
6198*38fd1498Szrj       /* Remap the parameters.  */
6199*38fd1498Szrj       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6200*38fd1498Szrj 	   param;
6201*38fd1498Szrj 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6202*38fd1498Szrj 	decl_map.put (param, arg);
6203*38fd1498Szrj 
6204*38fd1498Szrj       memset (&id, 0, sizeof (id));
6205*38fd1498Szrj       id.src_fn = fn;
6206*38fd1498Szrj       id.dst_fn = current_function_decl;
6207*38fd1498Szrj       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6208*38fd1498Szrj       id.decl_map = &decl_map;
6209*38fd1498Szrj 
6210*38fd1498Szrj       id.copy_decl = copy_decl_no_change;
6211*38fd1498Szrj       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6212*38fd1498Szrj       id.transform_new_cfg = false;
6213*38fd1498Szrj       id.transform_return_to_modify = true;
6214*38fd1498Szrj       id.transform_parameter = true;
6215*38fd1498Szrj       id.transform_lang_insert_block = NULL;
6216*38fd1498Szrj 
6217*38fd1498Szrj       /* Make sure not to unshare trees behind the front-end's back
6218*38fd1498Szrj 	 since front-end specific mechanisms may rely on sharing.  */
6219*38fd1498Szrj       id.regimplify = false;
6220*38fd1498Szrj       id.do_not_unshare = true;
6221*38fd1498Szrj 
6222*38fd1498Szrj       /* We're not inside any EH region.  */
6223*38fd1498Szrj       id.eh_lp_nr = 0;
6224*38fd1498Szrj 
6225*38fd1498Szrj       t = copy_tree_body (&id);
6226*38fd1498Szrj 
6227*38fd1498Szrj       /* We can only return something suitable for use in a GENERIC
6228*38fd1498Szrj 	 expression tree.  */
6229*38fd1498Szrj       if (TREE_CODE (t) == MODIFY_EXPR)
6230*38fd1498Szrj 	return TREE_OPERAND (t, 1);
6231*38fd1498Szrj     }
6232*38fd1498Szrj 
6233*38fd1498Szrj    return NULL_TREE;
6234*38fd1498Szrj }
6235*38fd1498Szrj 
6236*38fd1498Szrj /* Duplicate a type, fields and all.  */
6237*38fd1498Szrj 
6238*38fd1498Szrj tree
6239*38fd1498Szrj build_duplicate_type (tree type)
6240*38fd1498Szrj {
6241*38fd1498Szrj   struct copy_body_data id;
6242*38fd1498Szrj 
6243*38fd1498Szrj   memset (&id, 0, sizeof (id));
6244*38fd1498Szrj   id.src_fn = current_function_decl;
6245*38fd1498Szrj   id.dst_fn = current_function_decl;
6246*38fd1498Szrj   id.src_cfun = cfun;
6247*38fd1498Szrj   id.decl_map = new hash_map<tree, tree>;
6248*38fd1498Szrj   id.debug_map = NULL;
6249*38fd1498Szrj   id.copy_decl = copy_decl_no_change;
6250*38fd1498Szrj 
6251*38fd1498Szrj   type = remap_type_1 (type, &id);
6252*38fd1498Szrj 
6253*38fd1498Szrj   delete id.decl_map;
6254*38fd1498Szrj   if (id.debug_map)
6255*38fd1498Szrj     delete id.debug_map;
6256*38fd1498Szrj 
6257*38fd1498Szrj   TYPE_CANONICAL (type) = type;
6258*38fd1498Szrj 
6259*38fd1498Szrj   return type;
6260*38fd1498Szrj }
6261*38fd1498Szrj 
6262*38fd1498Szrj /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6263*38fd1498Szrj    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6264*38fd1498Szrj    evaluation.  */
6265*38fd1498Szrj 
6266*38fd1498Szrj tree
6267*38fd1498Szrj copy_fn (tree fn, tree& parms, tree& result)
6268*38fd1498Szrj {
6269*38fd1498Szrj   copy_body_data id;
6270*38fd1498Szrj   tree param;
6271*38fd1498Szrj   hash_map<tree, tree> decl_map;
6272*38fd1498Szrj 
6273*38fd1498Szrj   tree *p = &parms;
6274*38fd1498Szrj   *p = NULL_TREE;
6275*38fd1498Szrj 
6276*38fd1498Szrj   memset (&id, 0, sizeof (id));
6277*38fd1498Szrj   id.src_fn = fn;
6278*38fd1498Szrj   id.dst_fn = current_function_decl;
6279*38fd1498Szrj   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6280*38fd1498Szrj   id.decl_map = &decl_map;
6281*38fd1498Szrj 
6282*38fd1498Szrj   id.copy_decl = copy_decl_no_change;
6283*38fd1498Szrj   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6284*38fd1498Szrj   id.transform_new_cfg = false;
6285*38fd1498Szrj   id.transform_return_to_modify = false;
6286*38fd1498Szrj   id.transform_parameter = true;
6287*38fd1498Szrj   id.transform_lang_insert_block = NULL;
6288*38fd1498Szrj 
6289*38fd1498Szrj   /* Make sure not to unshare trees behind the front-end's back
6290*38fd1498Szrj      since front-end specific mechanisms may rely on sharing.  */
6291*38fd1498Szrj   id.regimplify = false;
6292*38fd1498Szrj   id.do_not_unshare = true;
6293*38fd1498Szrj 
6294*38fd1498Szrj   /* We're not inside any EH region.  */
6295*38fd1498Szrj   id.eh_lp_nr = 0;
6296*38fd1498Szrj 
6297*38fd1498Szrj   /* Remap the parameters and result and return them to the caller.  */
6298*38fd1498Szrj   for (param = DECL_ARGUMENTS (fn);
6299*38fd1498Szrj        param;
6300*38fd1498Szrj        param = DECL_CHAIN (param))
6301*38fd1498Szrj     {
6302*38fd1498Szrj       *p = remap_decl (param, &id);
6303*38fd1498Szrj       p = &DECL_CHAIN (*p);
6304*38fd1498Szrj     }
6305*38fd1498Szrj 
6306*38fd1498Szrj   if (DECL_RESULT (fn))
6307*38fd1498Szrj     result = remap_decl (DECL_RESULT (fn), &id);
6308*38fd1498Szrj   else
6309*38fd1498Szrj     result = NULL_TREE;
6310*38fd1498Szrj 
6311*38fd1498Szrj   return copy_tree_body (&id);
6312*38fd1498Szrj }
6313