xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* Tree inlining.
2    Copyright (C) 2001-2020 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 
65 /* I'm not real happy about this, but we need to handle gimple and
66    non-gimple trees.  */
67 
68 /* Inlining, Cloning, Versioning, Parallelization
69 
70    Inlining: a function body is duplicated, but the PARM_DECLs are
71    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72    MODIFY_EXPRs that store to a dedicated returned-value variable.
73    The duplicated eh_region info of the copy will later be appended
74    to the info for the caller; the eh_region info in copied throwing
75    statements and RESX statements are adjusted accordingly.
76 
77    Cloning: (only in C++) We have one body for a con/de/structor, and
78    multiple function decls, each with a unique parameter list.
79    Duplicate the body, using the given splay tree; some parameters
80    will become constants (like 0 or 1).
81 
82    Versioning: a function body is duplicated and the result is a new
83    function rather than into blocks of an existing function as with
84    inlining.  Some parameters will become constants.
85 
86    Parallelization: a region of a function is duplicated resulting in
87    a new function.  Variables may be replaced with complex expressions
88    to enable shared variable semantics.
89 
90    All of these will simultaneously lookup any callgraph edges.  If
91    we're going to inline the duplicated function body, and the given
92    function has some cloned callgraph nodes (one for each place this
93    function will be inlined) those callgraph edges will be duplicated.
94    If we're cloning the body, those callgraph edges will be
95    updated to point into the new body.  (Note that the original
96    callgraph node and edge list will not be altered.)
97 
98    See the CALL_EXPR handling case in copy_tree_body_r ().  */
99 
100 /* To Do:
101 
102    o In order to make inlining-on-trees work, we pessimized
103      function-local static constants.  In particular, they are now
104      always output, even when not addressed.  Fix this by treating
105      function-local static constants just like global static
106      constants; the back-end already knows not to output them if they
107      are not needed.
108 
109    o Provide heuristics to clamp inlining of recursive template
110      calls?  */
111 
112 
113 /* Weights that estimate_num_insns uses to estimate the size of the
114    produced code.  */
115 
116 eni_weights eni_size_weights;
117 
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119    to execute the produced code.  */
120 
121 eni_weights eni_time_weights;
122 
123 /* Prototypes.  */
124 
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 				     basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136 
137 /* Insert a tree->tree mapping for ID.  Despite the name suggests
138    that the trees should be variables, it is used for more than that.  */
139 
140 void
insert_decl_map(copy_body_data * id,tree key,tree value)141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143   id->decl_map->put (key, value);
144 
145   /* Always insert an identity map as well.  If we see this same new
146      node again, we won't want to duplicate it a second time.  */
147   if (key != value)
148     id->decl_map->put (value, value);
149 }
150 
151 /* Insert a tree->tree mapping for ID.  This is only used for
152    variables.  */
153 
154 static void
insert_debug_decl_map(copy_body_data * id,tree key,tree value)155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157   if (!gimple_in_ssa_p (id->src_cfun))
158     return;
159 
160   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161     return;
162 
163   if (!target_for_debug_bind (key))
164     return;
165 
166   gcc_assert (TREE_CODE (key) == PARM_DECL);
167   gcc_assert (VAR_P (value));
168 
169   if (!id->debug_map)
170     id->debug_map = new hash_map<tree, tree>;
171 
172   id->debug_map->put (key, value);
173 }
174 
175 /* If nonzero, we're remapping the contents of inlined debug
176    statements.  If negative, an error has occurred, such as a
177    reference to a variable that isn't available in the inlined
178    context.  */
179 static int processing_debug_stmt = 0;
180 
181 /* Construct new SSA name for old NAME. ID is the inline context.  */
182 
183 static tree
remap_ssa_name(tree name,copy_body_data * id)184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186   tree new_tree, var;
187   tree *n;
188 
189   gcc_assert (TREE_CODE (name) == SSA_NAME);
190 
191   n = id->decl_map->get (name);
192   if (n)
193     {
194       /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
195 	 remove an unused LHS from a call statement.  Such LHS can however
196 	 still appear in debug statements, but their value is lost in this
197 	 function and we do not want to map them.  */
198       if (id->killed_new_ssa_names
199 	  && id->killed_new_ssa_names->contains (*n))
200 	{
201 	  gcc_assert (processing_debug_stmt);
202 	  processing_debug_stmt = -1;
203 	  return name;
204 	}
205 
206       return unshare_expr (*n);
207     }
208 
209   if (processing_debug_stmt)
210     {
211       if (SSA_NAME_IS_DEFAULT_DEF (name)
212 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 	  && id->entry_bb == NULL
214 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
215 	{
216 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
217 	  gimple *def_temp;
218 	  gimple_stmt_iterator gsi;
219 	  tree val = SSA_NAME_VAR (name);
220 
221 	  n = id->decl_map->get (val);
222 	  if (n != NULL)
223 	    val = *n;
224 	  if (TREE_CODE (val) != PARM_DECL
225 	      && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
226 	    {
227 	      processing_debug_stmt = -1;
228 	      return name;
229 	    }
230 	  n = id->decl_map->get (val);
231 	  if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
232 	    return *n;
233 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
234 	  DECL_ARTIFICIAL (vexpr) = 1;
235 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
236 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
237 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
238 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
239 	  insert_decl_map (id, val, vexpr);
240 	  return vexpr;
241 	}
242 
243       processing_debug_stmt = -1;
244       return name;
245     }
246 
247   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
248   var = SSA_NAME_VAR (name);
249   if (!var
250       || (!SSA_NAME_IS_DEFAULT_DEF (name)
251 	  && VAR_P (var)
252 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
253 	  && DECL_ARTIFICIAL (var)
254 	  && DECL_IGNORED_P (var)
255 	  && !DECL_NAME (var)))
256     {
257       struct ptr_info_def *pi;
258       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
259       if (!var && SSA_NAME_IDENTIFIER (name))
260 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
261       insert_decl_map (id, name, new_tree);
262       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
263 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
264       /* At least IPA points-to info can be directly transferred.  */
265       if (id->src_cfun->gimple_df
266 	  && id->src_cfun->gimple_df->ipa_pta
267 	  && POINTER_TYPE_P (TREE_TYPE (name))
268 	  && (pi = SSA_NAME_PTR_INFO (name))
269 	  && !pi->pt.anything)
270 	{
271 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
272 	  new_pi->pt = pi->pt;
273 	}
274       /* So can range-info.  */
275       if (!POINTER_TYPE_P (TREE_TYPE (name))
276 	  && SSA_NAME_RANGE_INFO (name))
277 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
278 				       SSA_NAME_RANGE_INFO (name));
279       return new_tree;
280     }
281 
282   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
283      in copy_bb.  */
284   new_tree = remap_decl (var, id);
285 
286   /* We might've substituted constant or another SSA_NAME for
287      the variable.
288 
289      Replace the SSA name representing RESULT_DECL by variable during
290      inlining:  this saves us from need to introduce PHI node in a case
291      return value is just partly initialized.  */
292   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
293       && (!SSA_NAME_VAR (name)
294 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
295 	  || !id->transform_return_to_modify))
296     {
297       struct ptr_info_def *pi;
298       new_tree = make_ssa_name (new_tree);
299       insert_decl_map (id, name, new_tree);
300       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
301 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
302       /* At least IPA points-to info can be directly transferred.  */
303       if (id->src_cfun->gimple_df
304 	  && id->src_cfun->gimple_df->ipa_pta
305 	  && POINTER_TYPE_P (TREE_TYPE (name))
306 	  && (pi = SSA_NAME_PTR_INFO (name))
307 	  && !pi->pt.anything)
308 	{
309 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 	  new_pi->pt = pi->pt;
311 	}
312       /* So can range-info.  */
313       if (!POINTER_TYPE_P (TREE_TYPE (name))
314 	  && SSA_NAME_RANGE_INFO (name))
315 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
316 				       SSA_NAME_RANGE_INFO (name));
317       if (SSA_NAME_IS_DEFAULT_DEF (name))
318 	{
319 	  /* By inlining function having uninitialized variable, we might
320 	     extend the lifetime (variable might get reused).  This cause
321 	     ICE in the case we end up extending lifetime of SSA name across
322 	     abnormal edge, but also increase register pressure.
323 
324 	     We simply initialize all uninitialized vars by 0 except
325 	     for case we are inlining to very first BB.  We can avoid
326 	     this for all BBs that are not inside strongly connected
327 	     regions of the CFG, but this is expensive to test.  */
328 	  if (id->entry_bb
329 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
330 	      && (!SSA_NAME_VAR (name)
331 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
332 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
333 					     0)->dest
334 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
335 	    {
336 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
337 	      gimple *init_stmt;
338 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
339 
340 	      init_stmt = gimple_build_assign (new_tree, zero);
341 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
342 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
343 	    }
344 	  else
345 	    {
346 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
347 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
348 	    }
349 	}
350     }
351   else
352     insert_decl_map (id, name, new_tree);
353   return new_tree;
354 }
355 
356 /* Remap DECL during the copying of the BLOCK tree for the function.  */
357 
358 tree
remap_decl(tree decl,copy_body_data * id)359 remap_decl (tree decl, copy_body_data *id)
360 {
361   tree *n;
362 
363   /* We only remap local variables in the current function.  */
364 
365   /* See if we have remapped this declaration.  */
366 
367   n = id->decl_map->get (decl);
368 
369   if (!n && processing_debug_stmt)
370     {
371       processing_debug_stmt = -1;
372       return decl;
373     }
374 
375   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
376      necessary DECLs have already been remapped and we do not want to duplicate
377      a decl coming from outside of the sequence we are copying.  */
378   if (!n
379       && id->prevent_decl_creation_for_types
380       && id->remapping_type_depth > 0
381       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
382     return decl;
383 
384   /* If we didn't already have an equivalent for this declaration, create one
385      now.  */
386   if (!n)
387     {
388       /* Make a copy of the variable or label.  */
389       tree t = id->copy_decl (decl, id);
390 
391       /* Remember it, so that if we encounter this local entity again
392 	 we can reuse this copy.  Do this early because remap_type may
393 	 need this decl for TYPE_STUB_DECL.  */
394       insert_decl_map (id, decl, t);
395 
396       if (!DECL_P (t))
397 	return t;
398 
399       /* Remap types, if necessary.  */
400       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
401       if (TREE_CODE (t) == TYPE_DECL)
402 	{
403 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404 
405 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
406 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
407 	     is not set on the TYPE_DECL, for example in LTO mode.  */
408 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
409 	    {
410 	      tree x = build_variant_type_copy (TREE_TYPE (t));
411 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
412 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
413 	      DECL_ORIGINAL_TYPE (t) = x;
414 	    }
415 	}
416 
417       /* Remap sizes as necessary.  */
418       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
419       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
420 
421       /* If fields, do likewise for offset and qualifier.  */
422       if (TREE_CODE (t) == FIELD_DECL)
423 	{
424 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
425 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
426 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
427 	}
428 
429       return t;
430     }
431 
432   if (id->do_not_unshare)
433     return *n;
434   else
435     return unshare_expr (*n);
436 }
437 
438 static tree
remap_type_1(tree type,copy_body_data * id)439 remap_type_1 (tree type, copy_body_data *id)
440 {
441   tree new_tree, t;
442 
443   /* We do need a copy.  build and register it now.  If this is a pointer or
444      reference type, remap the designated type and make a new pointer or
445      reference type.  */
446   if (TREE_CODE (type) == POINTER_TYPE)
447     {
448       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
449 					 TYPE_MODE (type),
450 					 TYPE_REF_CAN_ALIAS_ALL (type));
451       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 	new_tree = build_type_attribute_qual_variant (new_tree,
453 						      TYPE_ATTRIBUTES (type),
454 						      TYPE_QUALS (type));
455       insert_decl_map (id, type, new_tree);
456       return new_tree;
457     }
458   else if (TREE_CODE (type) == REFERENCE_TYPE)
459     {
460       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
461 					    TYPE_MODE (type),
462 					    TYPE_REF_CAN_ALIAS_ALL (type));
463       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
464 	new_tree = build_type_attribute_qual_variant (new_tree,
465 						      TYPE_ATTRIBUTES (type),
466 						      TYPE_QUALS (type));
467       insert_decl_map (id, type, new_tree);
468       return new_tree;
469     }
470   else
471     new_tree = copy_node (type);
472 
473   insert_decl_map (id, type, new_tree);
474 
475   /* This is a new type, not a copy of an old type.  Need to reassociate
476      variants.  We can handle everything except the main variant lazily.  */
477   t = TYPE_MAIN_VARIANT (type);
478   if (type != t)
479     {
480       t = remap_type (t, id);
481       TYPE_MAIN_VARIANT (new_tree) = t;
482       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
483       TYPE_NEXT_VARIANT (t) = new_tree;
484     }
485   else
486     {
487       TYPE_MAIN_VARIANT (new_tree) = new_tree;
488       TYPE_NEXT_VARIANT (new_tree) = NULL;
489     }
490 
491   if (TYPE_STUB_DECL (type))
492     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
493 
494   /* Lazily create pointer and reference types.  */
495   TYPE_POINTER_TO (new_tree) = NULL;
496   TYPE_REFERENCE_TO (new_tree) = NULL;
497 
498   /* Copy all types that may contain references to local variables; be sure to
499      preserve sharing in between type and its main variant when possible.  */
500   switch (TREE_CODE (new_tree))
501     {
502     case INTEGER_TYPE:
503     case REAL_TYPE:
504     case FIXED_POINT_TYPE:
505     case ENUMERAL_TYPE:
506     case BOOLEAN_TYPE:
507       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
508 	{
509 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
510 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
511 
512 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
513 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 	}
515       else
516 	{
517 	  t = TYPE_MIN_VALUE (new_tree);
518 	  if (t && TREE_CODE (t) != INTEGER_CST)
519 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
520 
521 	  t = TYPE_MAX_VALUE (new_tree);
522 	  if (t && TREE_CODE (t) != INTEGER_CST)
523 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
524 	}
525       return new_tree;
526 
527     case FUNCTION_TYPE:
528       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531       else
532         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
534 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
535 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
536       else
537         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
538       return new_tree;
539 
540     case ARRAY_TYPE:
541       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
542 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
543 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
544       else
545 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
546 
547       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
548 	{
549 	  gcc_checking_assert (TYPE_DOMAIN (type)
550 			       == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
551 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
552 	}
553       else
554         {
555 	  TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
556 	  /* For array bounds where we have decided not to copy over the bounds
557 	     variable which isn't used in OpenMP/OpenACC region, change them to
558 	     an uninitialized VAR_DECL temporary.  */
559 	  if (id->adjust_array_error_bounds
560 	      && TYPE_DOMAIN (new_tree)
561 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
562 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
563 	    {
564 	      tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
565 	      DECL_ATTRIBUTES (v)
566 		= tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
567 			     DECL_ATTRIBUTES (v));
568 	      TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
569 	    }
570         }
571       break;
572 
573     case RECORD_TYPE:
574     case UNION_TYPE:
575     case QUAL_UNION_TYPE:
576       if (TYPE_MAIN_VARIANT (type) != type
577 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
578 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
579       else
580 	{
581 	  tree f, nf = NULL;
582 
583 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
584 	    {
585 	      t = remap_decl (f, id);
586 	      DECL_CONTEXT (t) = new_tree;
587 	      DECL_CHAIN (t) = nf;
588 	      nf = t;
589 	    }
590 	  TYPE_FIELDS (new_tree) = nreverse (nf);
591 	}
592       break;
593 
594     case OFFSET_TYPE:
595     default:
596       /* Shouldn't have been thought variable sized.  */
597       gcc_unreachable ();
598     }
599 
600   /* All variants of type share the same size, so use the already remaped data.  */
601   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
602     {
603       tree s = TYPE_SIZE (type);
604       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
605       tree su = TYPE_SIZE_UNIT (type);
606       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
607       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
608 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
609 			   || s == mvs);
610       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
611 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
612 			   || su == mvsu);
613       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
614       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
615     }
616   else
617     {
618       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
619       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
620     }
621 
622   return new_tree;
623 }
624 
625 /* Helper function for remap_type_2, called through walk_tree.  */
626 
627 static tree
remap_type_3(tree * tp,int * walk_subtrees,void * data)628 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
629 {
630   copy_body_data *id = (copy_body_data *) data;
631 
632   if (TYPE_P (*tp))
633     *walk_subtrees = 0;
634 
635   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
636     return *tp;
637 
638   return NULL_TREE;
639 }
640 
641 /* Return true if TYPE needs to be remapped because remap_decl on any
642    needed embedded decl returns something other than that decl.  */
643 
644 static bool
remap_type_2(tree type,copy_body_data * id)645 remap_type_2 (tree type, copy_body_data *id)
646 {
647   tree t;
648 
649 #define RETURN_TRUE_IF_VAR(T) \
650   do								\
651     {								\
652       tree _t = (T);						\
653       if (_t)							\
654 	{							\
655 	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
656 	    return true;					\
657 	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
658 	      && walk_tree (&_t, remap_type_3, id, NULL))	\
659 	    return true;					\
660 	}							\
661     }								\
662   while (0)
663 
664   switch (TREE_CODE (type))
665     {
666     case POINTER_TYPE:
667     case REFERENCE_TYPE:
668     case FUNCTION_TYPE:
669     case METHOD_TYPE:
670       return remap_type_2 (TREE_TYPE (type), id);
671 
672     case INTEGER_TYPE:
673     case REAL_TYPE:
674     case FIXED_POINT_TYPE:
675     case ENUMERAL_TYPE:
676     case BOOLEAN_TYPE:
677       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
678       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
679       return false;
680 
681     case ARRAY_TYPE:
682       if (remap_type_2 (TREE_TYPE (type), id)
683 	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
684 	return true;
685       break;
686 
687     case RECORD_TYPE:
688     case UNION_TYPE:
689     case QUAL_UNION_TYPE:
690       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
691 	if (TREE_CODE (t) == FIELD_DECL)
692 	  {
693 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
694 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
695 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
696 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
697 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
698 	  }
699       break;
700 
701     default:
702       return false;
703     }
704 
705   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
706   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
707   return false;
708 #undef RETURN_TRUE_IF_VAR
709 }
710 
711 tree
remap_type(tree type,copy_body_data * id)712 remap_type (tree type, copy_body_data *id)
713 {
714   tree *node;
715   tree tmp;
716 
717   if (type == NULL)
718     return type;
719 
720   /* See if we have remapped this type.  */
721   node = id->decl_map->get (type);
722   if (node)
723     return *node;
724 
725   /* The type only needs remapping if it's variably modified.  */
726   if (! variably_modified_type_p (type, id->src_fn)
727       /* Don't remap if copy_decl method doesn't always return a new
728 	 decl and for all embedded decls returns the passed in decl.  */
729       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
730     {
731       insert_decl_map (id, type, type);
732       return type;
733     }
734 
735   id->remapping_type_depth++;
736   tmp = remap_type_1 (type, id);
737   id->remapping_type_depth--;
738 
739   return tmp;
740 }
741 
742 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
743 
744 static bool
can_be_nonlocal(tree decl,copy_body_data * id)745 can_be_nonlocal (tree decl, copy_body_data *id)
746 {
747   /* We cannot duplicate function decls.  */
748   if (TREE_CODE (decl) == FUNCTION_DECL)
749     return true;
750 
751   /* Local static vars must be non-local or we get multiple declaration
752      problems.  */
753   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
754     return true;
755 
756   return false;
757 }
758 
759 static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)760 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
761 	     copy_body_data *id)
762 {
763   tree old_var;
764   tree new_decls = NULL_TREE;
765 
766   /* Remap its variables.  */
767   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
768     {
769       tree new_var;
770 
771       if (can_be_nonlocal (old_var, id))
772 	{
773 	  /* We need to add this variable to the local decls as otherwise
774 	     nothing else will do so.  */
775 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
776 	    add_local_decl (cfun, old_var);
777 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
778 	      && !DECL_IGNORED_P (old_var)
779 	      && nonlocalized_list)
780 	    vec_safe_push (*nonlocalized_list, old_var);
781 	  continue;
782 	}
783 
784       /* Remap the variable.  */
785       new_var = remap_decl (old_var, id);
786 
787       /* If we didn't remap this variable, we can't mess with its
788 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
789 	 already declared somewhere else, so don't declare it here.  */
790 
791       if (new_var == id->retvar)
792 	;
793       else if (!new_var)
794         {
795 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
796 	      && !DECL_IGNORED_P (old_var)
797 	      && nonlocalized_list)
798 	    vec_safe_push (*nonlocalized_list, old_var);
799 	}
800       else
801 	{
802 	  gcc_assert (DECL_P (new_var));
803 	  DECL_CHAIN (new_var) = new_decls;
804 	  new_decls = new_var;
805 
806 	  /* Also copy value-expressions.  */
807 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
808 	    {
809 	      tree tem = DECL_VALUE_EXPR (new_var);
810 	      bool old_regimplify = id->regimplify;
811 	      id->remapping_type_depth++;
812 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
813 	      id->remapping_type_depth--;
814 	      id->regimplify = old_regimplify;
815 	      SET_DECL_VALUE_EXPR (new_var, tem);
816 	    }
817 	}
818     }
819 
820   return nreverse (new_decls);
821 }
822 
823 /* Copy the BLOCK to contain remapped versions of the variables
824    therein.  And hook the new block into the block-tree.  */
825 
826 static void
remap_block(tree * block,copy_body_data * id)827 remap_block (tree *block, copy_body_data *id)
828 {
829   tree old_block;
830   tree new_block;
831 
832   /* Make the new block.  */
833   old_block = *block;
834   new_block = make_node (BLOCK);
835   TREE_USED (new_block) = TREE_USED (old_block);
836   BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
837   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
838   BLOCK_NONLOCALIZED_VARS (new_block)
839     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
840   *block = new_block;
841 
842   /* Remap its variables.  */
843   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
844   					&BLOCK_NONLOCALIZED_VARS (new_block),
845 					id);
846 
847   if (id->transform_lang_insert_block)
848     id->transform_lang_insert_block (new_block);
849 
850   /* Remember the remapped block.  */
851   insert_decl_map (id, old_block, new_block);
852 }
853 
854 /* Copy the whole block tree and root it in id->block.  */
855 
856 static tree
remap_blocks(tree block,copy_body_data * id)857 remap_blocks (tree block, copy_body_data *id)
858 {
859   tree t;
860   tree new_tree = block;
861 
862   if (!block)
863     return NULL;
864 
865   remap_block (&new_tree, id);
866   gcc_assert (new_tree != block);
867   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
868     prepend_lexical_block (new_tree, remap_blocks (t, id));
869   /* Blocks are in arbitrary order, but make things slightly prettier and do
870      not swap order when producing a copy.  */
871   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
872   return new_tree;
873 }
874 
875 /* Remap the block tree rooted at BLOCK to nothing.  */
876 
877 static void
remap_blocks_to_null(tree block,copy_body_data * id)878 remap_blocks_to_null (tree block, copy_body_data *id)
879 {
880   tree t;
881   insert_decl_map (id, block, NULL_TREE);
882   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
883     remap_blocks_to_null (t, id);
884 }
885 
886 /* Remap the location info pointed to by LOCUS.  */
887 
888 static location_t
remap_location(location_t locus,copy_body_data * id)889 remap_location (location_t locus, copy_body_data *id)
890 {
891   if (LOCATION_BLOCK (locus))
892     {
893       tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
894       gcc_assert (n);
895       if (*n)
896 	return set_block (locus, *n);
897     }
898 
899   locus = LOCATION_LOCUS (locus);
900 
901   if (locus != UNKNOWN_LOCATION && id->block)
902     return set_block (locus, id->block);
903 
904   return locus;
905 }
906 
907 static void
copy_statement_list(tree * tp)908 copy_statement_list (tree *tp)
909 {
910   tree_stmt_iterator oi, ni;
911   tree new_tree;
912 
913   new_tree = alloc_stmt_list ();
914   ni = tsi_start (new_tree);
915   oi = tsi_start (*tp);
916   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
917   *tp = new_tree;
918 
919   for (; !tsi_end_p (oi); tsi_next (&oi))
920     {
921       tree stmt = tsi_stmt (oi);
922       if (TREE_CODE (stmt) == STATEMENT_LIST)
923 	/* This copy is not redundant; tsi_link_after will smash this
924 	   STATEMENT_LIST into the end of the one we're building, and we
925 	   don't want to do that with the original.  */
926 	copy_statement_list (&stmt);
927       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
928     }
929 }
930 
931 static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)932 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
933 {
934   tree block = BIND_EXPR_BLOCK (*tp);
935   /* Copy (and replace) the statement.  */
936   copy_tree_r (tp, walk_subtrees, NULL);
937   if (block)
938     {
939       remap_block (&block, id);
940       BIND_EXPR_BLOCK (*tp) = block;
941     }
942 
943   if (BIND_EXPR_VARS (*tp))
944     /* This will remap a lot of the same decls again, but this should be
945        harmless.  */
946     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
947 }
948 
949 
950 /* Create a new gimple_seq by remapping all the statements in BODY
951    using the inlining information in ID.  */
952 
953 static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)954 remap_gimple_seq (gimple_seq body, copy_body_data *id)
955 {
956   gimple_stmt_iterator si;
957   gimple_seq new_body = NULL;
958 
959   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
960     {
961       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
962       gimple_seq_add_seq (&new_body, new_stmts);
963     }
964 
965   return new_body;
966 }
967 
968 
969 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
970    block using the mapping information in ID.  */
971 
972 static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)973 copy_gimple_bind (gbind *stmt, copy_body_data *id)
974 {
975   gimple *new_bind;
976   tree new_block, new_vars;
977   gimple_seq body, new_body;
978 
979   /* Copy the statement.  Note that we purposely don't use copy_stmt
980      here because we need to remap statements as we copy.  */
981   body = gimple_bind_body (stmt);
982   new_body = remap_gimple_seq (body, id);
983 
984   new_block = gimple_bind_block (stmt);
985   if (new_block)
986     remap_block (&new_block, id);
987 
988   /* This will remap a lot of the same decls again, but this should be
989      harmless.  */
990   new_vars = gimple_bind_vars (stmt);
991   if (new_vars)
992     new_vars = remap_decls (new_vars, NULL, id);
993 
994   new_bind = gimple_build_bind (new_vars, new_body, new_block);
995 
996   return new_bind;
997 }
998 
999 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
1000 
1001 static bool
is_parm(tree decl)1002 is_parm (tree decl)
1003 {
1004   if (TREE_CODE (decl) == SSA_NAME)
1005     {
1006       decl = SSA_NAME_VAR (decl);
1007       if (!decl)
1008 	return false;
1009     }
1010 
1011   return (TREE_CODE (decl) == PARM_DECL);
1012 }
1013 
1014 /* Remap the dependence CLIQUE from the source to the destination function
1015    as specified in ID.  */
1016 
1017 static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)1018 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1019 {
1020   if (clique == 0 || processing_debug_stmt)
1021     return 0;
1022   if (!id->dependence_map)
1023     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1024   bool existed;
1025   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1026   if (!existed)
1027     {
1028       /* Clique 1 is reserved for local ones set by PTA.  */
1029       if (cfun->last_clique == 0)
1030 	cfun->last_clique = 1;
1031       newc = ++cfun->last_clique;
1032     }
1033   return newc;
1034 }
1035 
1036 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
1037    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
1038    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1039    recursing into the children nodes of *TP.  */
1040 
1041 static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)1042 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1043 {
1044   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1045   copy_body_data *id = (copy_body_data *) wi_p->info;
1046   tree fn = id->src_fn;
1047 
1048   /* For recursive invocations this is no longer the LHS itself.  */
1049   bool is_lhs = wi_p->is_lhs;
1050   wi_p->is_lhs = false;
1051 
1052   if (TREE_CODE (*tp) == SSA_NAME)
1053     {
1054       *tp = remap_ssa_name (*tp, id);
1055       *walk_subtrees = 0;
1056       if (is_lhs)
1057 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1058       return NULL;
1059     }
1060   else if (auto_var_in_fn_p (*tp, fn))
1061     {
1062       /* Local variables and labels need to be replaced by equivalent
1063 	 variables.  We don't want to copy static variables; there's
1064 	 only one of those, no matter how many times we inline the
1065 	 containing function.  Similarly for globals from an outer
1066 	 function.  */
1067       tree new_decl;
1068 
1069       /* Remap the declaration.  */
1070       new_decl = remap_decl (*tp, id);
1071       gcc_assert (new_decl);
1072       /* Replace this variable with the copy.  */
1073       STRIP_TYPE_NOPS (new_decl);
1074       /* ???  The C++ frontend uses void * pointer zero to initialize
1075          any other type.  This confuses the middle-end type verification.
1076 	 As cloned bodies do not go through gimplification again the fixup
1077 	 there doesn't trigger.  */
1078       if (TREE_CODE (new_decl) == INTEGER_CST
1079 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1080 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1081       *tp = new_decl;
1082       *walk_subtrees = 0;
1083     }
1084   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1085     gcc_unreachable ();
1086   else if (TREE_CODE (*tp) == SAVE_EXPR)
1087     gcc_unreachable ();
1088   else if (TREE_CODE (*tp) == LABEL_DECL
1089 	   && (!DECL_CONTEXT (*tp)
1090 	       || decl_function_context (*tp) == id->src_fn))
1091     /* These may need to be remapped for EH handling.  */
1092     *tp = remap_decl (*tp, id);
1093   else if (TREE_CODE (*tp) == FIELD_DECL)
1094     {
1095       /* If the enclosing record type is variably_modified_type_p, the field
1096 	 has already been remapped.  Otherwise, it need not be.  */
1097       tree *n = id->decl_map->get (*tp);
1098       if (n)
1099 	*tp = *n;
1100       *walk_subtrees = 0;
1101     }
1102   else if (TYPE_P (*tp))
1103     /* Types may need remapping as well.  */
1104     *tp = remap_type (*tp, id);
1105   else if (CONSTANT_CLASS_P (*tp))
1106     {
1107       /* If this is a constant, we have to copy the node iff the type
1108 	 will be remapped.  copy_tree_r will not copy a constant.  */
1109       tree new_type = remap_type (TREE_TYPE (*tp), id);
1110 
1111       if (new_type == TREE_TYPE (*tp))
1112 	*walk_subtrees = 0;
1113 
1114       else if (TREE_CODE (*tp) == INTEGER_CST)
1115 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1116       else
1117 	{
1118 	  *tp = copy_node (*tp);
1119 	  TREE_TYPE (*tp) = new_type;
1120 	}
1121     }
1122   else
1123     {
1124       /* Otherwise, just copy the node.  Note that copy_tree_r already
1125 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1126 
1127       if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1128 	{
1129 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 	     that can happen when a pointer argument is an ADDR_EXPR.
1131 	     Recurse here manually to allow that.  */
1132 	  tree ptr = TREE_OPERAND (*tp, 0);
1133 	  tree type = remap_type (TREE_TYPE (*tp), id);
1134 	  tree old = *tp;
1135 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1136 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1141 	    {
1142 	      MR_DEPENDENCE_CLIQUE (*tp)
1143 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1144 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1145 	    }
1146 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1147 	     remapped a parameter as the property might be valid only
1148 	     for the parameter itself.  */
1149 	  if (TREE_THIS_NOTRAP (old)
1150 	      && (!is_parm (TREE_OPERAND (old, 0))
1151 		  || (!id->transform_parameter && is_parm (ptr))))
1152 	    TREE_THIS_NOTRAP (*tp) = 1;
1153 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1154 	  *walk_subtrees = 0;
1155 	  return NULL;
1156 	}
1157 
1158       /* Here is the "usual case".  Copy this tree node, and then
1159 	 tweak some special cases.  */
1160       copy_tree_r (tp, walk_subtrees, NULL);
1161 
1162       if (TREE_CODE (*tp) != OMP_CLAUSE)
1163 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1164 
1165       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1166 	{
1167 	  /* The copied TARGET_EXPR has never been expanded, even if the
1168 	     original node was expanded already.  */
1169 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1170 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1171 	}
1172       else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 	{
1174 	  /* Variable substitution need not be simple.  In particular,
1175 	     the MEM_REF substitution above.  Make sure that
1176 	     TREE_CONSTANT and friends are up-to-date.  */
1177 	  int invariant = is_gimple_min_invariant (*tp);
1178 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1179 	  recompute_tree_invariant_for_addr_expr (*tp);
1180 
1181 	  /* If this used to be invariant, but is not any longer,
1182 	     then regimplification is probably needed.  */
1183 	  if (invariant && !is_gimple_min_invariant (*tp))
1184 	    id->regimplify = true;
1185 
1186 	  *walk_subtrees = 0;
1187 	}
1188     }
1189 
1190   /* Update the TREE_BLOCK for the cloned expr.  */
1191   if (EXPR_P (*tp))
1192     {
1193       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194       tree old_block = TREE_BLOCK (*tp);
1195       if (old_block)
1196 	{
1197 	  tree *n;
1198 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1199 	  if (n)
1200 	    new_block = *n;
1201 	}
1202       TREE_SET_BLOCK (*tp, new_block);
1203     }
1204 
1205   /* Keep iterating.  */
1206   return NULL_TREE;
1207 }
1208 
1209 
1210 /* Called from copy_body_id via walk_tree.  DATA is really a
1211    `copy_body_data *'.  */
1212 
1213 tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)1214 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1215 {
1216   copy_body_data *id = (copy_body_data *) data;
1217   tree fn = id->src_fn;
1218   tree new_block;
1219 
1220   /* Begin by recognizing trees that we'll completely rewrite for the
1221      inlining context.  Our output for these trees is completely
1222      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223      into an edge).  Further down, we'll handle trees that get
1224      duplicated and/or tweaked.  */
1225 
1226   /* When requested, RETURN_EXPRs should be transformed to just the
1227      contained MODIFY_EXPR.  The branch semantics of the return will
1228      be handled elsewhere by manipulating the CFG rather than a statement.  */
1229   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1230     {
1231       tree assignment = TREE_OPERAND (*tp, 0);
1232 
1233       /* If we're returning something, just turn that into an
1234 	 assignment into the equivalent of the original RESULT_DECL.
1235 	 If the "assignment" is just the result decl, the result
1236 	 decl has already been set (e.g. a recent "foo (&result_decl,
1237 	 ...)"); just toss the entire RETURN_EXPR.  */
1238       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1239 	{
1240 	  /* Replace the RETURN_EXPR with (a copy of) the
1241 	     MODIFY_EXPR hanging underneath.  */
1242 	  *tp = copy_node (assignment);
1243 	}
1244       else /* Else the RETURN_EXPR returns no value.  */
1245 	{
1246 	  *tp = NULL;
1247 	  return (tree) (void *)1;
1248 	}
1249     }
1250   else if (TREE_CODE (*tp) == SSA_NAME)
1251     {
1252       *tp = remap_ssa_name (*tp, id);
1253       *walk_subtrees = 0;
1254       return NULL;
1255     }
1256 
1257   /* Local variables and labels need to be replaced by equivalent
1258      variables.  We don't want to copy static variables; there's only
1259      one of those, no matter how many times we inline the containing
1260      function.  Similarly for globals from an outer function.  */
1261   else if (auto_var_in_fn_p (*tp, fn))
1262     {
1263       tree new_decl;
1264 
1265       /* Remap the declaration.  */
1266       new_decl = remap_decl (*tp, id);
1267       gcc_assert (new_decl);
1268       /* Replace this variable with the copy.  */
1269       STRIP_TYPE_NOPS (new_decl);
1270       *tp = new_decl;
1271       *walk_subtrees = 0;
1272     }
1273   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274     copy_statement_list (tp);
1275   else if (TREE_CODE (*tp) == SAVE_EXPR
1276 	   || TREE_CODE (*tp) == TARGET_EXPR)
1277     remap_save_expr (tp, id->decl_map, walk_subtrees);
1278   else if (TREE_CODE (*tp) == LABEL_DECL
1279 	   && (! DECL_CONTEXT (*tp)
1280 	       || decl_function_context (*tp) == id->src_fn))
1281     /* These may need to be remapped for EH handling.  */
1282     *tp = remap_decl (*tp, id);
1283   else if (TREE_CODE (*tp) == BIND_EXPR)
1284     copy_bind_expr (tp, walk_subtrees, id);
1285   /* Types may need remapping as well.  */
1286   else if (TYPE_P (*tp))
1287     *tp = remap_type (*tp, id);
1288 
1289   /* If this is a constant, we have to copy the node iff the type will be
1290      remapped.  copy_tree_r will not copy a constant.  */
1291   else if (CONSTANT_CLASS_P (*tp))
1292     {
1293       tree new_type = remap_type (TREE_TYPE (*tp), id);
1294 
1295       if (new_type == TREE_TYPE (*tp))
1296 	*walk_subtrees = 0;
1297 
1298       else if (TREE_CODE (*tp) == INTEGER_CST)
1299 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300       else
1301 	{
1302 	  *tp = copy_node (*tp);
1303 	  TREE_TYPE (*tp) = new_type;
1304 	}
1305     }
1306 
1307   /* Otherwise, just copy the node.  Note that copy_tree_r already
1308      knows not to copy VAR_DECLs, etc., so this is safe.  */
1309   else
1310     {
1311       /* Here we handle trees that are not completely rewritten.
1312 	 First we detect some inlining-induced bogosities for
1313 	 discarding.  */
1314       if (TREE_CODE (*tp) == MODIFY_EXPR
1315 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1317 	{
1318 	  /* Some assignments VAR = VAR; don't generate any rtl code
1319 	     and thus don't count as variable modification.  Avoid
1320 	     keeping bogosities like 0 = 0.  */
1321 	  tree decl = TREE_OPERAND (*tp, 0), value;
1322 	  tree *n;
1323 
1324 	  n = id->decl_map->get (decl);
1325 	  if (n)
1326 	    {
1327 	      value = *n;
1328 	      STRIP_TYPE_NOPS (value);
1329 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1330 		{
1331 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 		  return copy_tree_body_r (tp, walk_subtrees, data);
1333 		}
1334 	    }
1335 	}
1336       else if (TREE_CODE (*tp) == INDIRECT_REF)
1337 	{
1338 	  /* Get rid of *& from inline substitutions that can happen when a
1339 	     pointer argument is an ADDR_EXPR.  */
1340 	  tree decl = TREE_OPERAND (*tp, 0);
1341 	  tree *n = id->decl_map->get (decl);
1342 	  if (n)
1343 	    {
1344 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1345 	         it manually here as we'll eventually get ADDR_EXPRs
1346 		 which lie about their types pointed to.  In this case
1347 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 		 but we absolutely rely on that.  As fold_indirect_ref
1349 	         does other useful transformations, try that first, though.  */
1350 	      tree type = TREE_TYPE (*tp);
1351 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 	      tree old = *tp;
1353 	      *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 	      if (! *tp)
1355 	        {
1356 		  type = remap_type (type, id);
1357 		  if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1358 		    {
1359 		      *tp
1360 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 		      /* ???  We should either assert here or build
1362 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1363 			 incompatible types to our IL.  */
1364 		      if (! *tp)
1365 			*tp = TREE_OPERAND (ptr, 0);
1366 		    }
1367 	          else
1368 		    {
1369 	              *tp = build1 (INDIRECT_REF, type, ptr);
1370 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1373 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1374 			 have remapped a parameter as the property might be
1375 			 valid only for the parameter itself.  */
1376 		      if (TREE_THIS_NOTRAP (old)
1377 			  && (!is_parm (TREE_OPERAND (old, 0))
1378 			      || (!id->transform_parameter && is_parm (ptr))))
1379 		        TREE_THIS_NOTRAP (*tp) = 1;
1380 		    }
1381 		}
1382 	      *walk_subtrees = 0;
1383 	      return NULL;
1384 	    }
1385 	}
1386       else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1387 	{
1388 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1389 	     that can happen when a pointer argument is an ADDR_EXPR.
1390 	     Recurse here manually to allow that.  */
1391 	  tree ptr = TREE_OPERAND (*tp, 0);
1392 	  tree type = remap_type (TREE_TYPE (*tp), id);
1393 	  tree old = *tp;
1394 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1395 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1396 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1397 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1398 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1399 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1400 	    {
1401 	      MR_DEPENDENCE_CLIQUE (*tp)
1402 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1403 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1404 	    }
1405 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1406 	     remapped a parameter as the property might be valid only
1407 	     for the parameter itself.  */
1408 	  if (TREE_THIS_NOTRAP (old)
1409 	      && (!is_parm (TREE_OPERAND (old, 0))
1410 		  || (!id->transform_parameter && is_parm (ptr))))
1411 	    TREE_THIS_NOTRAP (*tp) = 1;
1412 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1413 	  *walk_subtrees = 0;
1414 	  return NULL;
1415 	}
1416 
1417       /* Here is the "usual case".  Copy this tree node, and then
1418 	 tweak some special cases.  */
1419       copy_tree_r (tp, walk_subtrees, NULL);
1420 
1421       /* If EXPR has block defined, map it to newly constructed block.
1422          When inlining we want EXPRs without block appear in the block
1423 	 of function call if we are not remapping a type.  */
1424       if (EXPR_P (*tp))
1425 	{
1426 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1427 	  if (TREE_BLOCK (*tp))
1428 	    {
1429 	      tree *n;
1430 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1431 	      if (n)
1432 		new_block = *n;
1433 	    }
1434 	  TREE_SET_BLOCK (*tp, new_block);
1435 	}
1436 
1437       if (TREE_CODE (*tp) != OMP_CLAUSE)
1438 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1439 
1440       /* The copied TARGET_EXPR has never been expanded, even if the
1441 	 original node was expanded already.  */
1442       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1443 	{
1444 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1445 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1446 	}
1447 
1448       /* Variable substitution need not be simple.  In particular, the
1449 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1450 	 and friends are up-to-date.  */
1451       else if (TREE_CODE (*tp) == ADDR_EXPR)
1452 	{
1453 	  int invariant = is_gimple_min_invariant (*tp);
1454 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1455 
1456 	  /* Handle the case where we substituted an INDIRECT_REF
1457 	     into the operand of the ADDR_EXPR.  */
1458 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1459 	      && !id->do_not_fold)
1460 	    {
1461 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1462 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1463 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1464 	      *tp = t;
1465 	    }
1466 	  else
1467 	    recompute_tree_invariant_for_addr_expr (*tp);
1468 
1469 	  /* If this used to be invariant, but is not any longer,
1470 	     then regimplification is probably needed.  */
1471 	  if (invariant && !is_gimple_min_invariant (*tp))
1472 	    id->regimplify = true;
1473 
1474 	  *walk_subtrees = 0;
1475 	}
1476     }
1477 
1478   /* Keep iterating.  */
1479   return NULL_TREE;
1480 }
1481 
1482 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1483    source function, map that to the duplicate EH region number in
1484    the destination function.  */
1485 
1486 static int
remap_eh_region_nr(int old_nr,copy_body_data * id)1487 remap_eh_region_nr (int old_nr, copy_body_data *id)
1488 {
1489   eh_region old_r, new_r;
1490 
1491   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1492   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1493 
1494   return new_r->index;
1495 }
1496 
1497 /* Similar, but operate on INTEGER_CSTs.  */
1498 
1499 static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)1500 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1501 {
1502   int old_nr, new_nr;
1503 
1504   old_nr = tree_to_shwi (old_t_nr);
1505   new_nr = remap_eh_region_nr (old_nr, id);
1506 
1507   return build_int_cst (integer_type_node, new_nr);
1508 }
1509 
1510 /* Helper for copy_bb.  Remap statement STMT using the inlining
1511    information in ID.  Return the new statement copy.  */
1512 
1513 static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)1514 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1515 {
1516   gimple *copy = NULL;
1517   struct walk_stmt_info wi;
1518   bool skip_first = false;
1519   gimple_seq stmts = NULL;
1520 
1521   if (is_gimple_debug (stmt)
1522       && (gimple_debug_nonbind_marker_p (stmt)
1523 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1524 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1525     return NULL;
1526 
1527   /* Begin by recognizing trees that we'll completely rewrite for the
1528      inlining context.  Our output for these trees is completely
1529      different from our input (e.g. RETURN_EXPR is deleted and morphs
1530      into an edge).  Further down, we'll handle trees that get
1531      duplicated and/or tweaked.  */
1532 
1533   /* When requested, GIMPLE_RETURN should be transformed to just the
1534      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1535      be handled elsewhere by manipulating the CFG rather than the
1536      statement.  */
1537   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1538     {
1539       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1540 
1541       /* If we're returning something, just turn that into an
1542 	 assignment to the equivalent of the original RESULT_DECL.
1543 	 If RETVAL is just the result decl, the result decl has
1544 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1545 	 just toss the entire GIMPLE_RETURN.  Likewise for when the
1546 	 call doesn't want the return value.  */
1547       if (retval
1548 	  && (TREE_CODE (retval) != RESULT_DECL
1549 	      && (!id->call_stmt
1550 		  || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1551 	      && (TREE_CODE (retval) != SSA_NAME
1552 		  || ! SSA_NAME_VAR (retval)
1553 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1554         {
1555 	  copy = gimple_build_assign (id->do_not_unshare
1556 				      ? id->retvar : unshare_expr (id->retvar),
1557 				      retval);
1558 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1559 	  skip_first = true;
1560 	}
1561       else
1562 	return NULL;
1563     }
1564   else if (gimple_has_substatements (stmt))
1565     {
1566       gimple_seq s1, s2;
1567 
1568       /* When cloning bodies from the C++ front end, we will be handed bodies
1569 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1570 	 have embedded statements.  */
1571       switch (gimple_code (stmt))
1572 	{
1573 	case GIMPLE_BIND:
1574 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1575 	  break;
1576 
1577 	case GIMPLE_CATCH:
1578 	  {
1579 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1580 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1581 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1582 	  }
1583 	  break;
1584 
1585 	case GIMPLE_EH_FILTER:
1586 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1587 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1588 	  break;
1589 
1590 	case GIMPLE_TRY:
1591 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1592 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1593 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1594 	  break;
1595 
1596 	case GIMPLE_WITH_CLEANUP_EXPR:
1597 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1598 	  copy = gimple_build_wce (s1);
1599 	  break;
1600 
1601 	case GIMPLE_OMP_PARALLEL:
1602 	  {
1603 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1604 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1605 	    copy = gimple_build_omp_parallel
1606 	             (s1,
1607 		      gimple_omp_parallel_clauses (omp_par_stmt),
1608 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1609 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1610 	  }
1611 	  break;
1612 
1613 	case GIMPLE_OMP_TASK:
1614 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1615 	  copy = gimple_build_omp_task
1616 	           (s1,
1617 		    gimple_omp_task_clauses (stmt),
1618 		    gimple_omp_task_child_fn (stmt),
1619 		    gimple_omp_task_data_arg (stmt),
1620 		    gimple_omp_task_copy_fn (stmt),
1621 		    gimple_omp_task_arg_size (stmt),
1622 		    gimple_omp_task_arg_align (stmt));
1623 	  break;
1624 
1625 	case GIMPLE_OMP_FOR:
1626 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1627 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1628 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1629 				       gimple_omp_for_clauses (stmt),
1630 				       gimple_omp_for_collapse (stmt), s2);
1631 	  {
1632 	    size_t i;
1633 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1634 	      {
1635 		gimple_omp_for_set_index (copy, i,
1636 					  gimple_omp_for_index (stmt, i));
1637 		gimple_omp_for_set_initial (copy, i,
1638 					    gimple_omp_for_initial (stmt, i));
1639 		gimple_omp_for_set_final (copy, i,
1640 					  gimple_omp_for_final (stmt, i));
1641 		gimple_omp_for_set_incr (copy, i,
1642 					 gimple_omp_for_incr (stmt, i));
1643 		gimple_omp_for_set_cond (copy, i,
1644 					 gimple_omp_for_cond (stmt, i));
1645 	      }
1646 	  }
1647 	  break;
1648 
1649 	case GIMPLE_OMP_MASTER:
1650 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1651 	  copy = gimple_build_omp_master (s1);
1652 	  break;
1653 
1654 	case GIMPLE_OMP_TASKGROUP:
1655 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1656 	  copy = gimple_build_omp_taskgroup
1657 		   (s1, gimple_omp_taskgroup_clauses (stmt));
1658 	  break;
1659 
1660 	case GIMPLE_OMP_ORDERED:
1661 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1662 	  copy = gimple_build_omp_ordered
1663 		   (s1,
1664 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1665 	  break;
1666 
1667 	case GIMPLE_OMP_SCAN:
1668 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1669 	  copy = gimple_build_omp_scan
1670 		   (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1671 	  break;
1672 
1673 	case GIMPLE_OMP_SECTION:
1674 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1675 	  copy = gimple_build_omp_section (s1);
1676 	  break;
1677 
1678 	case GIMPLE_OMP_SECTIONS:
1679 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1680 	  copy = gimple_build_omp_sections
1681 	           (s1, gimple_omp_sections_clauses (stmt));
1682 	  break;
1683 
1684 	case GIMPLE_OMP_SINGLE:
1685 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1686 	  copy = gimple_build_omp_single
1687 	           (s1, gimple_omp_single_clauses (stmt));
1688 	  break;
1689 
1690 	case GIMPLE_OMP_TARGET:
1691 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1692 	  copy = gimple_build_omp_target
1693 		   (s1, gimple_omp_target_kind (stmt),
1694 		    gimple_omp_target_clauses (stmt));
1695 	  break;
1696 
1697 	case GIMPLE_OMP_TEAMS:
1698 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1699 	  copy = gimple_build_omp_teams
1700 		   (s1, gimple_omp_teams_clauses (stmt));
1701 	  break;
1702 
1703 	case GIMPLE_OMP_CRITICAL:
1704 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1705 	  copy = gimple_build_omp_critical (s1,
1706 					    gimple_omp_critical_name
1707 					      (as_a <gomp_critical *> (stmt)),
1708 					    gimple_omp_critical_clauses
1709 					      (as_a <gomp_critical *> (stmt)));
1710 	  break;
1711 
1712 	case GIMPLE_TRANSACTION:
1713 	  {
1714 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1715 	    gtransaction *new_trans_stmt;
1716 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1717 				   id);
1718 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1719 	    gimple_transaction_set_subcode (new_trans_stmt,
1720 	      gimple_transaction_subcode (old_trans_stmt));
1721 	    gimple_transaction_set_label_norm (new_trans_stmt,
1722 	      gimple_transaction_label_norm (old_trans_stmt));
1723 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1724 	      gimple_transaction_label_uninst (old_trans_stmt));
1725 	    gimple_transaction_set_label_over (new_trans_stmt,
1726 	      gimple_transaction_label_over (old_trans_stmt));
1727 	  }
1728 	  break;
1729 
1730 	default:
1731 	  gcc_unreachable ();
1732 	}
1733     }
1734   else
1735     {
1736       if (gimple_assign_copy_p (stmt)
1737 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1738 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1739 	{
1740 	  /* Here we handle statements that are not completely rewritten.
1741 	     First we detect some inlining-induced bogosities for
1742 	     discarding.  */
1743 
1744 	  /* Some assignments VAR = VAR; don't generate any rtl code
1745 	     and thus don't count as variable modification.  Avoid
1746 	     keeping bogosities like 0 = 0.  */
1747 	  tree decl = gimple_assign_lhs (stmt), value;
1748 	  tree *n;
1749 
1750 	  n = id->decl_map->get (decl);
1751 	  if (n)
1752 	    {
1753 	      value = *n;
1754 	      STRIP_TYPE_NOPS (value);
1755 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1756 		return NULL;
1757 	    }
1758 	}
1759 
1760       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1761 	 in a block that we aren't copying during tree_function_versioning,
1762 	 just drop the clobber stmt.  */
1763       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1764 	{
1765 	  tree lhs = gimple_assign_lhs (stmt);
1766 	  if (TREE_CODE (lhs) == MEM_REF
1767 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1768 	    {
1769 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1770 	      if (gimple_bb (def_stmt)
1771 		  && !bitmap_bit_p (id->blocks_to_copy,
1772 				    gimple_bb (def_stmt)->index))
1773 		return NULL;
1774 	    }
1775 	}
1776 
1777       /* We do not allow CLOBBERs of handled components.  In case
1778 	 returned value is stored via such handled component, remove
1779 	 the clobber so stmt verifier is happy.  */
1780       if (gimple_clobber_p (stmt)
1781 	  && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1782 	{
1783 	  tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1784 	  if (!DECL_P (remapped)
1785 	      && TREE_CODE (remapped) != MEM_REF)
1786 	    return NULL;
1787 	}
1788 
1789       if (gimple_debug_bind_p (stmt))
1790 	{
1791 	  gdebug *copy
1792 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1793 				       gimple_debug_bind_get_value (stmt),
1794 				       stmt);
1795 	  if (id->reset_location)
1796 	    gimple_set_location (copy, input_location);
1797 	  id->debug_stmts.safe_push (copy);
1798 	  gimple_seq_add_stmt (&stmts, copy);
1799 	  return stmts;
1800 	}
1801       if (gimple_debug_source_bind_p (stmt))
1802 	{
1803 	  gdebug *copy = gimple_build_debug_source_bind
1804 	                   (gimple_debug_source_bind_get_var (stmt),
1805 			    gimple_debug_source_bind_get_value (stmt),
1806 			    stmt);
1807 	  if (id->reset_location)
1808 	    gimple_set_location (copy, input_location);
1809 	  id->debug_stmts.safe_push (copy);
1810 	  gimple_seq_add_stmt (&stmts, copy);
1811 	  return stmts;
1812 	}
1813       if (gimple_debug_nonbind_marker_p (stmt))
1814 	{
1815 	  /* If the inlined function has too many debug markers,
1816 	     don't copy them.  */
1817 	  if (id->src_cfun->debug_marker_count
1818 	      > param_max_debug_marker_count)
1819 	    return stmts;
1820 
1821 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1822 	  if (id->reset_location)
1823 	    gimple_set_location (copy, input_location);
1824 	  id->debug_stmts.safe_push (copy);
1825 	  gimple_seq_add_stmt (&stmts, copy);
1826 	  return stmts;
1827 	}
1828 
1829       /* Create a new deep copy of the statement.  */
1830       copy = gimple_copy (stmt);
1831 
1832       /* Clear flags that need revisiting.  */
1833       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1834         {
1835 	  if (gimple_call_tail_p (call_stmt))
1836 	    gimple_call_set_tail (call_stmt, false);
1837 	  if (gimple_call_from_thunk_p (call_stmt))
1838 	    gimple_call_set_from_thunk (call_stmt, false);
1839 	  if (gimple_call_internal_p (call_stmt))
1840 	    switch (gimple_call_internal_fn (call_stmt))
1841 	      {
1842 	      case IFN_GOMP_SIMD_LANE:
1843 	      case IFN_GOMP_SIMD_VF:
1844 	      case IFN_GOMP_SIMD_LAST_LANE:
1845 	      case IFN_GOMP_SIMD_ORDERED_START:
1846 	      case IFN_GOMP_SIMD_ORDERED_END:
1847 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1848 	        break;
1849 	      default:
1850 		break;
1851 	      }
1852 	}
1853 
1854       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1855 	 RESX and EH_DISPATCH.  */
1856       if (id->eh_map)
1857 	switch (gimple_code (copy))
1858 	  {
1859 	  case GIMPLE_CALL:
1860 	    {
1861 	      tree r, fndecl = gimple_call_fndecl (copy);
1862 	      if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1863 		switch (DECL_FUNCTION_CODE (fndecl))
1864 		  {
1865 		  case BUILT_IN_EH_COPY_VALUES:
1866 		    r = gimple_call_arg (copy, 1);
1867 		    r = remap_eh_region_tree_nr (r, id);
1868 		    gimple_call_set_arg (copy, 1, r);
1869 		    /* FALLTHRU */
1870 
1871 		  case BUILT_IN_EH_POINTER:
1872 		  case BUILT_IN_EH_FILTER:
1873 		    r = gimple_call_arg (copy, 0);
1874 		    r = remap_eh_region_tree_nr (r, id);
1875 		    gimple_call_set_arg (copy, 0, r);
1876 		    break;
1877 
1878 		  default:
1879 		    break;
1880 		  }
1881 
1882 	      /* Reset alias info if we didn't apply measures to
1883 		 keep it valid over inlining by setting DECL_PT_UID.  */
1884 	      if (!id->src_cfun->gimple_df
1885 		  || !id->src_cfun->gimple_df->ipa_pta)
1886 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1887 	    }
1888 	    break;
1889 
1890 	  case GIMPLE_RESX:
1891 	    {
1892 	      gresx *resx_stmt = as_a <gresx *> (copy);
1893 	      int r = gimple_resx_region (resx_stmt);
1894 	      r = remap_eh_region_nr (r, id);
1895 	      gimple_resx_set_region (resx_stmt, r);
1896 	    }
1897 	    break;
1898 
1899 	  case GIMPLE_EH_DISPATCH:
1900 	    {
1901 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1902 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1903 	      r = remap_eh_region_nr (r, id);
1904 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1905 	    }
1906 	    break;
1907 
1908 	  default:
1909 	    break;
1910 	  }
1911     }
1912 
1913   /* If STMT has a block defined, map it to the newly constructed block.  */
1914   if (tree block = gimple_block (copy))
1915     {
1916       tree *n;
1917       n = id->decl_map->get (block);
1918       gcc_assert (n);
1919       gimple_set_block (copy, *n);
1920     }
1921   if (id->param_body_adjs)
1922     {
1923       gimple_seq extra_stmts = NULL;
1924       id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1925       if (!gimple_seq_empty_p (extra_stmts))
1926 	{
1927 	  memset (&wi, 0, sizeof (wi));
1928 	  wi.info = id;
1929 	  for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1930 	       !gsi_end_p (egsi);
1931 	       gsi_next (&egsi))
1932 	    walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1933 	  gimple_seq_add_seq (&stmts, extra_stmts);
1934 	}
1935     }
1936 
1937   if (id->reset_location)
1938     gimple_set_location (copy, input_location);
1939 
1940   /* Debug statements ought to be rebuilt and not copied.  */
1941   gcc_checking_assert (!is_gimple_debug (copy));
1942 
1943   /* Remap all the operands in COPY.  */
1944   memset (&wi, 0, sizeof (wi));
1945   wi.info = id;
1946   if (skip_first)
1947     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1948   else
1949     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1950 
1951   /* Clear the copied virtual operands.  We are not remapping them here
1952      but are going to recreate them from scratch.  */
1953   if (gimple_has_mem_ops (copy))
1954     {
1955       gimple_set_vdef (copy, NULL_TREE);
1956       gimple_set_vuse (copy, NULL_TREE);
1957     }
1958 
1959   if (cfun->can_throw_non_call_exceptions)
1960     {
1961       /* When inlining a function which does not have non-call exceptions
1962 	 enabled into a function that has (which only happens with
1963 	 always-inline) we have to fixup stmts that cannot throw.  */
1964       if (gcond *cond = dyn_cast <gcond *> (copy))
1965 	if (gimple_could_trap_p (cond))
1966 	  {
1967 	    gassign *cmp
1968 	      = gimple_build_assign (make_ssa_name (boolean_type_node),
1969 				     gimple_cond_code (cond),
1970 				     gimple_cond_lhs (cond),
1971 				     gimple_cond_rhs (cond));
1972 	    gimple_seq_add_stmt (&stmts, cmp);
1973 	    gimple_cond_set_code (cond, NE_EXPR);
1974 	    gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1975 	    gimple_cond_set_rhs (cond, boolean_false_node);
1976 	  }
1977       if (gassign *ass = dyn_cast <gassign *> (copy))
1978 	if ((gimple_assign_rhs_code (ass) == COND_EXPR
1979 	     || gimple_assign_rhs_code (ass) == VEC_COND_EXPR)
1980 	    && gimple_could_trap_p (ass))
1981 	  {
1982 	    tree def = make_ssa_name (TREE_TYPE (gimple_assign_rhs1 (ass)));
1983 	    gassign *cmp = gimple_build_assign (def, gimple_assign_rhs1 (ass));
1984 	    gimple_seq_add_stmt (&stmts, cmp);
1985 	    gimple_assign_set_rhs1 (ass, def);
1986 	  }
1987     }
1988 
1989   gimple_seq_add_stmt (&stmts, copy);
1990   return stmts;
1991 }
1992 
1993 
1994 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1995    later  */
1996 
1997 static basic_block
copy_bb(copy_body_data * id,basic_block bb,profile_count num,profile_count den)1998 copy_bb (copy_body_data *id, basic_block bb,
1999          profile_count num, profile_count den)
2000 {
2001   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2002   basic_block copy_basic_block;
2003   tree decl;
2004   basic_block prev;
2005 
2006   profile_count::adjust_for_ipa_scaling (&num, &den);
2007 
2008   /* Search for previous copied basic block.  */
2009   prev = bb->prev_bb;
2010   while (!prev->aux)
2011     prev = prev->prev_bb;
2012 
2013   /* create_basic_block() will append every new block to
2014      basic_block_info automatically.  */
2015   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2016   copy_basic_block->count = bb->count.apply_scale (num, den);
2017 
2018   copy_gsi = gsi_start_bb (copy_basic_block);
2019 
2020   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2021     {
2022       gimple_seq stmts;
2023       gimple *stmt = gsi_stmt (gsi);
2024       gimple *orig_stmt = stmt;
2025       gimple_stmt_iterator stmts_gsi;
2026       bool stmt_added = false;
2027 
2028       id->regimplify = false;
2029       stmts = remap_gimple_stmt (stmt, id);
2030 
2031       if (gimple_seq_empty_p (stmts))
2032 	continue;
2033 
2034       seq_gsi = copy_gsi;
2035 
2036       for (stmts_gsi = gsi_start (stmts);
2037 	   !gsi_end_p (stmts_gsi); )
2038 	{
2039 	  stmt = gsi_stmt (stmts_gsi);
2040 
2041 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
2042 	  gsi_next (&stmts_gsi);
2043 
2044 	  if (gimple_nop_p (stmt))
2045 	      continue;
2046 
2047 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2048 					    orig_stmt);
2049 
2050 	  /* With return slot optimization we can end up with
2051 	     non-gimple (foo *)&this->m, fix that here.  */
2052 	  if (is_gimple_assign (stmt)
2053 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2054 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2055 	    {
2056 	      tree new_rhs;
2057 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
2058 						  gimple_assign_rhs1 (stmt),
2059 						  true, NULL, false,
2060 						  GSI_CONTINUE_LINKING);
2061 	      gimple_assign_set_rhs1 (stmt, new_rhs);
2062 	      id->regimplify = false;
2063 	    }
2064 
2065 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2066 
2067 	  if (id->regimplify)
2068 	    gimple_regimplify_operands (stmt, &seq_gsi);
2069 
2070 	  stmt_added = true;
2071 	}
2072 
2073       if (!stmt_added)
2074 	continue;
2075 
2076       /* If copy_basic_block has been empty at the start of this iteration,
2077 	 call gsi_start_bb again to get at the newly added statements.  */
2078       if (gsi_end_p (copy_gsi))
2079 	copy_gsi = gsi_start_bb (copy_basic_block);
2080       else
2081 	gsi_next (&copy_gsi);
2082 
2083       /* Process the new statement.  The call to gimple_regimplify_operands
2084 	 possibly turned the statement into multiple statements, we
2085 	 need to process all of them.  */
2086       do
2087 	{
2088 	  tree fn;
2089 	  gcall *call_stmt;
2090 
2091 	  stmt = gsi_stmt (copy_gsi);
2092 	  call_stmt = dyn_cast <gcall *> (stmt);
2093 	  if (call_stmt
2094 	      && gimple_call_va_arg_pack_p (call_stmt)
2095 	      && id->call_stmt
2096 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
2097 	    {
2098 	      /* __builtin_va_arg_pack () should be replaced by
2099 		 all arguments corresponding to ... in the caller.  */
2100 	      tree p;
2101 	      gcall *new_call;
2102 	      vec<tree> argarray;
2103 	      size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2104 	      size_t nargs = nargs_caller;
2105 
2106 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2107 		{
2108 		  /* Avoid crashing on invalid IL that doesn't have a
2109 		     varargs function or that passes not enough arguments.  */
2110 		  if (nargs == 0)
2111 		    break;
2112 		  nargs--;
2113 		}
2114 
2115 	      /* Create the new array of arguments.  */
2116 	      size_t nargs_callee = gimple_call_num_args (call_stmt);
2117 	      size_t n = nargs + nargs_callee;
2118 	      argarray.create (n);
2119 	      argarray.safe_grow_cleared (n);
2120 
2121 	      /* Copy all the arguments before '...'  */
2122 	      if (nargs_callee)
2123 		memcpy (argarray.address (),
2124 			gimple_call_arg_ptr (call_stmt, 0),
2125 			nargs_callee * sizeof (tree));
2126 
2127 	      /* Append the arguments passed in '...'  */
2128 	      if (nargs)
2129 		memcpy (argarray.address () + nargs_callee,
2130 			gimple_call_arg_ptr (id->call_stmt, 0)
2131 			+ (nargs_caller - nargs), nargs * sizeof (tree));
2132 
2133 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2134 						argarray);
2135 
2136 	      argarray.release ();
2137 
2138 	      /* Copy all GIMPLE_CALL flags, location and block, except
2139 		 GF_CALL_VA_ARG_PACK.  */
2140 	      gimple_call_copy_flags (new_call, call_stmt);
2141 	      gimple_call_set_va_arg_pack (new_call, false);
2142 	      gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2143 	      /* location includes block.  */
2144 	      gimple_set_location (new_call, gimple_location (stmt));
2145 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2146 
2147 	      gsi_replace (&copy_gsi, new_call, false);
2148 	      stmt = new_call;
2149 	    }
2150 	  else if (call_stmt
2151 		   && id->call_stmt
2152 		   && (decl = gimple_call_fndecl (stmt))
2153 		   && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2154 	    {
2155 	      /* __builtin_va_arg_pack_len () should be replaced by
2156 		 the number of anonymous arguments.  */
2157 	      size_t nargs = gimple_call_num_args (id->call_stmt);
2158 	      tree count, p;
2159 	      gimple *new_stmt;
2160 
2161 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2162 		nargs--;
2163 
2164 	      if (!gimple_call_lhs (stmt))
2165 		{
2166 		  /* Drop unused calls.  */
2167 		  gsi_remove (&copy_gsi, false);
2168 		  continue;
2169 		}
2170 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2171 		{
2172 		  count = build_int_cst (integer_type_node, nargs);
2173 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2174 		  gsi_replace (&copy_gsi, new_stmt, false);
2175 		  stmt = new_stmt;
2176 		}
2177 	      else if (nargs != 0)
2178 		{
2179 		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2180 		  count = build_int_cst (integer_type_node, nargs);
2181 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2182 						  PLUS_EXPR, newlhs, count);
2183 		  gimple_call_set_lhs (stmt, newlhs);
2184 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2185 		}
2186 	    }
2187 	  else if (call_stmt
2188 		   && id->call_stmt
2189 		   && gimple_call_internal_p (stmt)
2190 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2191 	    {
2192 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
2193 	      gsi_remove (&copy_gsi, false);
2194 	      continue;
2195 	    }
2196 
2197 	  /* Statements produced by inlining can be unfolded, especially
2198 	     when we constant propagated some operands.  We can't fold
2199 	     them right now for two reasons:
2200 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2201 	     2) we can't change function calls to builtins.
2202 	     So we just mark statement for later folding.  We mark
2203 	     all new statements, instead just statements that has changed
2204 	     by some nontrivial substitution so even statements made
2205 	     foldable indirectly are updated.  If this turns out to be
2206 	     expensive, copy_body can be told to watch for nontrivial
2207 	     changes.  */
2208 	  if (id->statements_to_fold)
2209 	    id->statements_to_fold->add (stmt);
2210 
2211 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2212 	     callgraph edges and update or duplicate them.  */
2213 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2214 	    {
2215 	      struct cgraph_edge *edge;
2216 
2217 	      switch (id->transform_call_graph_edges)
2218 		{
2219 		case CB_CGE_DUPLICATE:
2220 		  edge = id->src_node->get_edge (orig_stmt);
2221 		  if (edge)
2222 		    {
2223 		      struct cgraph_edge *old_edge = edge;
2224 
2225 		      /* A speculative call is consist of multiple
2226 			 edges - indirect edge and one or more direct edges
2227 			 Duplicate the whole thing and distribute frequencies
2228 			 accordingly.  */
2229 		      if (edge->speculative)
2230 			{
2231 			  int n = 0;
2232 			  profile_count direct_cnt
2233 				 = profile_count::zero ();
2234 
2235 			  /* First figure out the distribution of counts
2236 			     so we can re-scale BB profile accordingly.  */
2237 			  for (cgraph_edge *e = old_edge; e;
2238 			       e = e->next_speculative_call_target ())
2239 			    direct_cnt = direct_cnt + e->count;
2240 
2241 			  cgraph_edge *indirect
2242 				 = old_edge->speculative_call_indirect_edge ();
2243 			  profile_count indir_cnt = indirect->count;
2244 
2245 			  /* Next iterate all direct edges, clone it and its
2246 			     corresponding reference and update profile.  */
2247 			  for (cgraph_edge *e = old_edge;
2248 			       e;
2249 			       e = e->next_speculative_call_target ())
2250 			    {
2251 			      profile_count cnt = e->count;
2252 
2253 			      id->dst_node->clone_reference
2254 				 (e->speculative_call_target_ref (), stmt);
2255 			      edge = e->clone (id->dst_node, call_stmt,
2256 					       gimple_uid (stmt), num, den,
2257 					       true);
2258 			      profile_probability prob
2259 				 = cnt.probability_in (direct_cnt
2260 						       + indir_cnt);
2261 			      edge->count
2262 				 = copy_basic_block->count.apply_probability
2263 					 (prob);
2264 			      n++;
2265 			    }
2266 			  gcc_checking_assert
2267 				 (indirect->num_speculative_call_targets_p ()
2268 				  == n);
2269 
2270 			  /* Duplicate the indirect edge after all direct edges
2271 			     cloned.  */
2272 			  indirect = indirect->clone (id->dst_node, call_stmt,
2273 						      gimple_uid (stmt),
2274 						      num, den,
2275 						      true);
2276 
2277 			  profile_probability prob
2278 			     = indir_cnt.probability_in (direct_cnt
2279 							 + indir_cnt);
2280 			  indirect->count
2281 			     = copy_basic_block->count.apply_probability (prob);
2282 			}
2283 		      else
2284 			{
2285 			  edge = edge->clone (id->dst_node, call_stmt,
2286 					      gimple_uid (stmt),
2287 					      num, den,
2288 					      true);
2289 			  edge->count = copy_basic_block->count;
2290 			}
2291 		    }
2292 		  break;
2293 
2294 		case CB_CGE_MOVE_CLONES:
2295 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2296 								call_stmt);
2297 		  edge = id->dst_node->get_edge (stmt);
2298 		  break;
2299 
2300 		case CB_CGE_MOVE:
2301 		  edge = id->dst_node->get_edge (orig_stmt);
2302 		  if (edge)
2303 		    edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2304 		  break;
2305 
2306 		default:
2307 		  gcc_unreachable ();
2308 		}
2309 
2310 	      /* Constant propagation on argument done during inlining
2311 		 may create new direct call.  Produce an edge for it.  */
2312 	      if ((!edge
2313 		   || (edge->indirect_inlining_edge
2314 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2315 		  && id->dst_node->definition
2316 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2317 		{
2318 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2319 
2320 		  /* We have missing edge in the callgraph.  This can happen
2321 		     when previous inlining turned an indirect call into a
2322 		     direct call by constant propagating arguments or we are
2323 		     producing dead clone (for further cloning).  In all
2324 		     other cases we hit a bug (incorrect node sharing is the
2325 		     most common reason for missing edges).  */
2326 		  gcc_assert (!dest->definition
2327 			      || dest->address_taken
2328 		  	      || !id->src_node->definition
2329 			      || !id->dst_node->definition);
2330 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2331 		    id->dst_node->create_edge_including_clones
2332 		      (dest, orig_stmt, call_stmt, bb->count,
2333 		       CIF_ORIGINALLY_INDIRECT_CALL);
2334 		  else
2335 		    id->dst_node->create_edge (dest, call_stmt,
2336 					bb->count)->inline_failed
2337 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2338 		  if (dump_file)
2339 		    {
2340 		      fprintf (dump_file, "Created new direct edge to %s\n",
2341 			       dest->dump_name ());
2342 		    }
2343 		}
2344 
2345 	      notice_special_calls (as_a <gcall *> (stmt));
2346 	    }
2347 
2348 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2349 				      id->eh_map, id->eh_lp_nr);
2350 
2351 	  gsi_next (&copy_gsi);
2352 	}
2353       while (!gsi_end_p (copy_gsi));
2354 
2355       copy_gsi = gsi_last_bb (copy_basic_block);
2356     }
2357 
2358   return copy_basic_block;
2359 }
2360 
2361 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2362    form is quite easy, since dominator relationship for old basic blocks does
2363    not change.
2364 
2365    There is however exception where inlining might change dominator relation
2366    across EH edges from basic block within inlined functions destinating
2367    to landing pads in function we inline into.
2368 
2369    The function fills in PHI_RESULTs of such PHI nodes if they refer
2370    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2371    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2372    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2373    set, and this means that there will be no overlapping live ranges
2374    for the underlying symbol.
2375 
2376    This might change in future if we allow redirecting of EH edges and
2377    we might want to change way build CFG pre-inlining to include
2378    all the possible edges then.  */
2379 static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)2380 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2381 				  bool can_throw, bool nonlocal_goto)
2382 {
2383   edge e;
2384   edge_iterator ei;
2385 
2386   FOR_EACH_EDGE (e, ei, bb->succs)
2387     if (!e->dest->aux
2388 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2389       {
2390 	gphi *phi;
2391 	gphi_iterator si;
2392 
2393 	if (!nonlocal_goto)
2394 	  gcc_assert (e->flags & EDGE_EH);
2395 
2396 	if (!can_throw)
2397 	  gcc_assert (!(e->flags & EDGE_EH));
2398 
2399 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2400 	  {
2401 	    edge re;
2402 
2403 	    phi = si.phi ();
2404 
2405 	    /* For abnormal goto/call edges the receiver can be the
2406 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2407 
2408 	    gcc_assert ((e->flags & EDGE_EH)
2409 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2410 
2411 	    re = find_edge (ret_bb, e->dest);
2412 	    gcc_checking_assert (re);
2413 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2414 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2415 
2416 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2417 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2418 	  }
2419       }
2420 }
2421 
2422 /* Insert clobbers for automatic variables of inlined ID->src_fn
2423    function at the start of basic block ID->eh_landing_pad_dest.  */
2424 
2425 static void
add_clobbers_to_eh_landing_pad(copy_body_data * id)2426 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2427 {
2428   tree var;
2429   basic_block bb = id->eh_landing_pad_dest;
2430   live_vars_map *vars = NULL;
2431   unsigned int cnt = 0;
2432   unsigned int i;
2433   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2434     if (VAR_P (var)
2435 	&& !DECL_HARD_REGISTER (var)
2436 	&& !TREE_THIS_VOLATILE (var)
2437 	&& !DECL_HAS_VALUE_EXPR_P (var)
2438 	&& !is_gimple_reg (var)
2439 	&& auto_var_in_fn_p (var, id->src_fn)
2440 	&& !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2441       {
2442 	tree *t = id->decl_map->get (var);
2443 	if (!t)
2444 	  continue;
2445 	tree new_var = *t;
2446 	if (VAR_P (new_var)
2447 	    && !DECL_HARD_REGISTER (new_var)
2448 	    && !TREE_THIS_VOLATILE (new_var)
2449 	    && !DECL_HAS_VALUE_EXPR_P (new_var)
2450 	    && !is_gimple_reg (new_var)
2451 	    && auto_var_in_fn_p (new_var, id->dst_fn))
2452 	  {
2453 	    if (vars == NULL)
2454 	      vars = new live_vars_map;
2455             vars->put (DECL_UID (var), cnt++);
2456 	  }
2457       }
2458   if (vars == NULL)
2459     return;
2460 
2461   vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2462   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2463     if (VAR_P (var))
2464       {
2465 	edge e;
2466 	edge_iterator ei;
2467 	bool needed = false;
2468 	unsigned int *v = vars->get (DECL_UID (var));
2469 	if (v == NULL)
2470 	  continue;
2471 	FOR_EACH_EDGE (e, ei, bb->preds)
2472 	  if ((e->flags & EDGE_EH) != 0
2473 	      && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2474 	    {
2475 	      basic_block src_bb = (basic_block) e->src->aux;
2476 
2477 	      if (bitmap_bit_p (&live[src_bb->index], *v))
2478 		{
2479 		  needed = true;
2480 		  break;
2481 		}
2482 	    }
2483 	if (needed)
2484 	  {
2485 	    tree new_var = *id->decl_map->get (var);
2486 	    gimple_stmt_iterator gsi = gsi_after_labels (bb);
2487 	    tree clobber = build_clobber (TREE_TYPE (new_var));
2488 	    gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2489 	    gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2490 	  }
2491       }
2492   destroy_live_vars (live);
2493   delete vars;
2494 }
2495 
2496 /* Copy edges from BB into its copy constructed earlier, scale profile
2497    accordingly.  Edges will be taken care of later.  Assume aux
2498    pointers to point to the copies of each BB.  Return true if any
2499    debug stmts are left after a statement that must end the basic block.  */
2500 
2501 static bool
copy_edges_for_bb(basic_block bb,profile_count num,profile_count den,basic_block ret_bb,basic_block abnormal_goto_dest,copy_body_data * id)2502 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2503 		   basic_block ret_bb, basic_block abnormal_goto_dest,
2504 		   copy_body_data *id)
2505 {
2506   basic_block new_bb = (basic_block) bb->aux;
2507   edge_iterator ei;
2508   edge old_edge;
2509   gimple_stmt_iterator si;
2510   bool need_debug_cleanup = false;
2511 
2512   /* Use the indices from the original blocks to create edges for the
2513      new ones.  */
2514   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2515     if (!(old_edge->flags & EDGE_EH))
2516       {
2517 	edge new_edge;
2518 	int flags = old_edge->flags;
2519 	location_t locus = old_edge->goto_locus;
2520 
2521 	/* Return edges do get a FALLTHRU flag when they get inlined.  */
2522 	if (old_edge->dest->index == EXIT_BLOCK
2523 	    && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2524 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2525 	  flags |= EDGE_FALLTHRU;
2526 
2527 	new_edge
2528 	  = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2529 	new_edge->probability = old_edge->probability;
2530 	if (!id->reset_location)
2531 	  new_edge->goto_locus = remap_location (locus, id);
2532       }
2533 
2534   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2535     return false;
2536 
2537   /* When doing function splitting, we must decrease count of the return block
2538      which was previously reachable by block we did not copy.  */
2539   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2540     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2541       if (old_edge->src->index != ENTRY_BLOCK
2542 	  && !old_edge->src->aux)
2543 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2544 
2545   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2546     {
2547       gimple *copy_stmt;
2548       bool can_throw, nonlocal_goto;
2549 
2550       copy_stmt = gsi_stmt (si);
2551       if (!is_gimple_debug (copy_stmt))
2552 	update_stmt (copy_stmt);
2553 
2554       /* Do this before the possible split_block.  */
2555       gsi_next (&si);
2556 
2557       /* If this tree could throw an exception, there are two
2558          cases where we need to add abnormal edge(s): the
2559          tree wasn't in a region and there is a "current
2560          region" in the caller; or the original tree had
2561          EH edges.  In both cases split the block after the tree,
2562          and add abnormal edge(s) as needed; we need both
2563          those from the callee and the caller.
2564          We check whether the copy can throw, because the const
2565          propagation can change an INDIRECT_REF which throws
2566          into a COMPONENT_REF which doesn't.  If the copy
2567          can throw, the original could also throw.  */
2568       can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2569       nonlocal_goto
2570 	= (stmt_can_make_abnormal_goto (copy_stmt)
2571 	   && !computed_goto_p (copy_stmt));
2572 
2573       if (can_throw || nonlocal_goto)
2574 	{
2575 	  if (!gsi_end_p (si))
2576 	    {
2577 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2578 		gsi_next (&si);
2579 	      if (gsi_end_p (si))
2580 		need_debug_cleanup = true;
2581 	    }
2582 	  if (!gsi_end_p (si))
2583 	    /* Note that bb's predecessor edges aren't necessarily
2584 	       right at this point; split_block doesn't care.  */
2585 	    {
2586 	      edge e = split_block (new_bb, copy_stmt);
2587 
2588 	      new_bb = e->dest;
2589 	      new_bb->aux = e->src->aux;
2590 	      si = gsi_start_bb (new_bb);
2591 	    }
2592 	}
2593 
2594       bool update_probs = false;
2595 
2596       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2597 	{
2598 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2599 	  update_probs = true;
2600 	}
2601       else if (can_throw)
2602 	{
2603 	  make_eh_edges (copy_stmt);
2604 	  update_probs = true;
2605 	}
2606 
2607       /* EH edges may not match old edges.  Copy as much as possible.  */
2608       if (update_probs)
2609 	{
2610           edge e;
2611           edge_iterator ei;
2612 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2613 
2614           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2615             if ((old_edge->flags & EDGE_EH)
2616 		&& (e = find_edge (copy_stmt_bb,
2617 				   (basic_block) old_edge->dest->aux))
2618 		&& (e->flags & EDGE_EH))
2619 	      e->probability = old_edge->probability;
2620 
2621           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2622 	    if (e->flags & EDGE_EH)
2623 	      {
2624 		if (!e->probability.initialized_p ())
2625 		  e->probability = profile_probability::never ();
2626 		if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2627 		  {
2628 		    if (id->eh_landing_pad_dest == NULL)
2629 		      id->eh_landing_pad_dest = e->dest;
2630 		    else
2631 		      gcc_assert (id->eh_landing_pad_dest == e->dest);
2632 		  }
2633 	      }
2634         }
2635 
2636 
2637       /* If the call we inline cannot make abnormal goto do not add
2638          additional abnormal edges but only retain those already present
2639 	 in the original function body.  */
2640       if (abnormal_goto_dest == NULL)
2641 	nonlocal_goto = false;
2642       if (nonlocal_goto)
2643 	{
2644 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2645 
2646 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2647 	    nonlocal_goto = false;
2648 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2649 	     in OpenMP regions which aren't allowed to be left abnormally.
2650 	     So, no need to add abnormal edge in that case.  */
2651 	  else if (is_gimple_call (copy_stmt)
2652 		   && gimple_call_internal_p (copy_stmt)
2653 		   && (gimple_call_internal_fn (copy_stmt)
2654 		       == IFN_ABNORMAL_DISPATCHER)
2655 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2656 	    nonlocal_goto = false;
2657 	  else
2658 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2659 				   EDGE_ABNORMAL);
2660 	}
2661 
2662       if ((can_throw || nonlocal_goto)
2663 	  && gimple_in_ssa_p (cfun))
2664 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2665 					  can_throw, nonlocal_goto);
2666     }
2667   return need_debug_cleanup;
2668 }
2669 
2670 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2671    was possibly split and new outgoing EH edges inserted.
2672    BB points to the block of original function and AUX pointers links
2673    the original and newly copied blocks.  */
2674 
2675 static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)2676 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2677 {
2678   basic_block const new_bb = (basic_block) bb->aux;
2679   edge_iterator ei;
2680   gphi *phi;
2681   gphi_iterator si;
2682   edge new_edge;
2683   bool inserted = false;
2684 
2685   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2686     {
2687       tree res, new_res;
2688       gphi *new_phi;
2689 
2690       phi = si.phi ();
2691       res = PHI_RESULT (phi);
2692       new_res = res;
2693       if (!virtual_operand_p (res))
2694 	{
2695 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2696 	  if (EDGE_COUNT (new_bb->preds) == 0)
2697 	    {
2698 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2699 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2700 	    }
2701 	  else
2702 	    {
2703 	      new_phi = create_phi_node (new_res, new_bb);
2704 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2705 		{
2706 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2707 					     bb);
2708 		  tree arg;
2709 		  tree new_arg;
2710 		  edge_iterator ei2;
2711 		  location_t locus;
2712 
2713 		  /* When doing partial cloning, we allow PHIs on the entry
2714 		     block as long as all the arguments are the same.
2715 		     Find any input edge to see argument to copy.  */
2716 		  if (!old_edge)
2717 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2718 		      if (!old_edge->src->aux)
2719 			break;
2720 
2721 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2722 		  new_arg = arg;
2723 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2724 		  gcc_assert (new_arg);
2725 		  /* With return slot optimization we can end up with
2726 		     non-gimple (foo *)&this->m, fix that here.  */
2727 		  if (TREE_CODE (new_arg) != SSA_NAME
2728 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2729 		      && !is_gimple_val (new_arg))
2730 		    {
2731 		      gimple_seq stmts = NULL;
2732 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2733 						      NULL);
2734 		      gsi_insert_seq_on_edge (new_edge, stmts);
2735 		      inserted = true;
2736 		    }
2737 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2738 		  if (id->reset_location)
2739 		    locus = input_location;
2740 		  else
2741 		    locus = remap_location (locus, id);
2742 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2743 		}
2744 	    }
2745 	}
2746     }
2747 
2748   /* Commit the delayed edge insertions.  */
2749   if (inserted)
2750     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2751       gsi_commit_one_edge_insert (new_edge, NULL);
2752 }
2753 
2754 
2755 /* Wrapper for remap_decl so it can be used as a callback.  */
2756 
2757 static tree
remap_decl_1(tree decl,void * data)2758 remap_decl_1 (tree decl, void *data)
2759 {
2760   return remap_decl (decl, (copy_body_data *) data);
2761 }
2762 
2763 /* Build struct function and associated datastructures for the new clone
2764    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2765    the cfun to the function of new_fndecl (and current_function_decl too).  */
2766 
2767 static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,profile_count count)2768 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2769 {
2770   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2771 
2772   /* Register specific tree functions.  */
2773   gimple_register_cfg_hooks ();
2774 
2775   /* Get clean struct function.  */
2776   push_struct_function (new_fndecl, true);
2777   targetm.target_option.relayout_function (new_fndecl);
2778 
2779   /* We will rebuild these, so just sanity check that they are empty.  */
2780   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2781   gcc_assert (cfun->local_decls == NULL);
2782   gcc_assert (cfun->cfg == NULL);
2783   gcc_assert (cfun->decl == new_fndecl);
2784 
2785   /* Copy items we preserve during cloning.  */
2786   cfun->static_chain_decl = src_cfun->static_chain_decl;
2787   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2788   cfun->function_end_locus = src_cfun->function_end_locus;
2789   cfun->curr_properties = src_cfun->curr_properties;
2790   cfun->last_verified = src_cfun->last_verified;
2791   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2792   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2793   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2794   cfun->calls_eh_return = src_cfun->calls_eh_return;
2795   cfun->stdarg = src_cfun->stdarg;
2796   cfun->after_inlining = src_cfun->after_inlining;
2797   cfun->can_throw_non_call_exceptions
2798     = src_cfun->can_throw_non_call_exceptions;
2799   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2800   cfun->returns_struct = src_cfun->returns_struct;
2801   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2802 
2803   init_empty_tree_cfg ();
2804 
2805   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2806 
2807   profile_count num = count;
2808   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2809   profile_count::adjust_for_ipa_scaling (&num, &den);
2810 
2811   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2812     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2813 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2814   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2815     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2816 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2817   if (src_cfun->eh)
2818     init_eh_for_function ();
2819 
2820   if (src_cfun->gimple_df)
2821     {
2822       init_tree_ssa (cfun);
2823       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2824       if (cfun->gimple_df->in_ssa_p)
2825 	init_ssa_operands (cfun);
2826     }
2827 }
2828 
2829 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2830    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2831    successor has multiple predecessors, reset them, otherwise keep
2832    their value.  */
2833 
2834 static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)2835 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2836 {
2837   edge e;
2838   edge_iterator ei;
2839   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2840 
2841   if (gsi_end_p (si)
2842       || gsi_one_before_end_p (si)
2843       || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2844 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2845     return;
2846 
2847   FOR_EACH_EDGE (e, ei, new_bb->succs)
2848     {
2849       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2850       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2851       while (is_gimple_debug (gsi_stmt (ssi)))
2852 	{
2853 	  gimple *stmt = gsi_stmt (ssi);
2854 	  gdebug *new_stmt;
2855 	  tree var;
2856 	  tree value;
2857 
2858 	  /* For the last edge move the debug stmts instead of copying
2859 	     them.  */
2860 	  if (ei_one_before_end_p (ei))
2861 	    {
2862 	      si = ssi;
2863 	      gsi_prev (&ssi);
2864 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2865 		{
2866 		  gimple_debug_bind_reset_value (stmt);
2867 		  gimple_set_location (stmt, UNKNOWN_LOCATION);
2868 		}
2869 	      gsi_remove (&si, false);
2870 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2871 	      continue;
2872 	    }
2873 
2874 	  if (gimple_debug_bind_p (stmt))
2875 	    {
2876 	      var = gimple_debug_bind_get_var (stmt);
2877 	      if (single_pred_p (e->dest))
2878 		{
2879 		  value = gimple_debug_bind_get_value (stmt);
2880 		  value = unshare_expr (value);
2881 		  new_stmt = gimple_build_debug_bind (var, value, stmt);
2882 		}
2883 	      else
2884 		new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2885 	    }
2886 	  else if (gimple_debug_source_bind_p (stmt))
2887 	    {
2888 	      var = gimple_debug_source_bind_get_var (stmt);
2889 	      value = gimple_debug_source_bind_get_value (stmt);
2890 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2891 	    }
2892 	  else if (gimple_debug_nonbind_marker_p (stmt))
2893 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2894 	  else
2895 	    gcc_unreachable ();
2896 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2897 	  id->debug_stmts.safe_push (new_stmt);
2898 	  gsi_prev (&ssi);
2899 	}
2900     }
2901 }
2902 
2903 /* Make a copy of the sub-loops of SRC_PARENT and place them
2904    as siblings of DEST_PARENT.  */
2905 
2906 static void
copy_loops(copy_body_data * id,class loop * dest_parent,class loop * src_parent)2907 copy_loops (copy_body_data *id,
2908 	    class loop *dest_parent, class loop *src_parent)
2909 {
2910   class loop *src_loop = src_parent->inner;
2911   while (src_loop)
2912     {
2913       if (!id->blocks_to_copy
2914 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2915 	{
2916 	  class loop *dest_loop = alloc_loop ();
2917 
2918 	  /* Assign the new loop its header and latch and associate
2919 	     those with the new loop.  */
2920 	  dest_loop->header = (basic_block)src_loop->header->aux;
2921 	  dest_loop->header->loop_father = dest_loop;
2922 	  if (src_loop->latch != NULL)
2923 	    {
2924 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2925 	      dest_loop->latch->loop_father = dest_loop;
2926 	    }
2927 
2928 	  /* Copy loop meta-data.  */
2929 	  copy_loop_info (src_loop, dest_loop);
2930 	  if (dest_loop->unroll)
2931 	    cfun->has_unroll = true;
2932 	  if (dest_loop->force_vectorize)
2933 	    cfun->has_force_vectorize_loops = true;
2934 	  if (id->src_cfun->last_clique != 0)
2935 	    dest_loop->owned_clique
2936 	      = remap_dependence_clique (id,
2937 					 src_loop->owned_clique
2938 					 ? src_loop->owned_clique : 1);
2939 
2940 	  /* Finally place it into the loop array and the loop tree.  */
2941 	  place_new_loop (cfun, dest_loop);
2942 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2943 
2944 	  if (src_loop->simduid)
2945 	    {
2946 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2947 	      cfun->has_simduid_loops = true;
2948 	    }
2949 
2950 	  /* Recurse.  */
2951 	  copy_loops (id, dest_loop, src_loop);
2952 	}
2953       src_loop = src_loop->next;
2954     }
2955 }
2956 
2957 /* Call redirect_call_stmt_to_callee on all calls in BB.  */
2958 
2959 void
redirect_all_calls(copy_body_data * id,basic_block bb)2960 redirect_all_calls (copy_body_data * id, basic_block bb)
2961 {
2962   gimple_stmt_iterator si;
2963   gimple *last = last_stmt (bb);
2964   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2965     {
2966       gimple *stmt = gsi_stmt (si);
2967       if (is_gimple_call (stmt))
2968 	{
2969 	  tree old_lhs = gimple_call_lhs (stmt);
2970 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2971 	  if (edge)
2972 	    {
2973 	      gimple *new_stmt
2974 		= cgraph_edge::redirect_call_stmt_to_callee (edge);
2975 	      /* If IPA-SRA transformation, run as part of edge redirection,
2976 		 removed the LHS because it is unused, save it to
2977 		 killed_new_ssa_names so that we can prune it from debug
2978 		 statements.  */
2979 	      if (old_lhs
2980 		  && TREE_CODE (old_lhs) == SSA_NAME
2981 		  && !gimple_call_lhs (new_stmt))
2982 		{
2983 		  if (!id->killed_new_ssa_names)
2984 		    id->killed_new_ssa_names = new hash_set<tree> (16);
2985 		  id->killed_new_ssa_names->add (old_lhs);
2986 		}
2987 
2988 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2989 		gimple_purge_dead_eh_edges (bb);
2990 	    }
2991 	}
2992     }
2993 }
2994 
2995 /* Make a copy of the body of FN so that it can be inserted inline in
2996    another function.  Walks FN via CFG, returns new fndecl.  */
2997 
2998 static tree
copy_cfg_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2999 copy_cfg_body (copy_body_data * id,
3000 	       basic_block entry_block_map, basic_block exit_block_map,
3001 	       basic_block new_entry)
3002 {
3003   tree callee_fndecl = id->src_fn;
3004   /* Original cfun for the callee, doesn't change.  */
3005   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3006   struct function *cfun_to_copy;
3007   basic_block bb;
3008   tree new_fndecl = NULL;
3009   bool need_debug_cleanup = false;
3010   int last;
3011   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3012   profile_count num = entry_block_map->count;
3013 
3014   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3015 
3016   /* Register specific tree functions.  */
3017   gimple_register_cfg_hooks ();
3018 
3019   /* If we are inlining just region of the function, make sure to connect
3020      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
3021      part of loop, we must compute frequency and probability of
3022      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3023      probabilities of edges incoming from nonduplicated region.  */
3024   if (new_entry)
3025     {
3026       edge e;
3027       edge_iterator ei;
3028       den = profile_count::zero ();
3029 
3030       FOR_EACH_EDGE (e, ei, new_entry->preds)
3031 	if (!e->src->aux)
3032 	  den += e->count ();
3033       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3034     }
3035 
3036   profile_count::adjust_for_ipa_scaling (&num, &den);
3037 
3038   /* Must have a CFG here at this point.  */
3039   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3040 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
3041 
3042 
3043   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3044   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3045   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3046   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3047 
3048   /* Duplicate any exception-handling regions.  */
3049   if (cfun->eh)
3050     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3051 				       remap_decl_1, id);
3052 
3053   /* Use aux pointers to map the original blocks to copy.  */
3054   FOR_EACH_BB_FN (bb, cfun_to_copy)
3055     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3056       {
3057 	basic_block new_bb = copy_bb (id, bb, num, den);
3058 	bb->aux = new_bb;
3059 	new_bb->aux = bb;
3060 	new_bb->loop_father = entry_block_map->loop_father;
3061       }
3062 
3063   last = last_basic_block_for_fn (cfun);
3064 
3065   /* Now that we've duplicated the blocks, duplicate their edges.  */
3066   basic_block abnormal_goto_dest = NULL;
3067   if (id->call_stmt
3068       && stmt_can_make_abnormal_goto (id->call_stmt))
3069     {
3070       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3071 
3072       bb = gimple_bb (id->call_stmt);
3073       gsi_next (&gsi);
3074       if (gsi_end_p (gsi))
3075 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3076     }
3077   FOR_ALL_BB_FN (bb, cfun_to_copy)
3078     if (!id->blocks_to_copy
3079 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3080       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3081 					       abnormal_goto_dest, id);
3082 
3083   if (id->eh_landing_pad_dest)
3084     {
3085       add_clobbers_to_eh_landing_pad (id);
3086       id->eh_landing_pad_dest = NULL;
3087     }
3088 
3089   if (new_entry)
3090     {
3091       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3092 			  EDGE_FALLTHRU);
3093       e->probability = profile_probability::always ();
3094     }
3095 
3096   /* Duplicate the loop tree, if available and wanted.  */
3097   if (loops_for_fn (src_cfun) != NULL
3098       && current_loops != NULL)
3099     {
3100       copy_loops (id, entry_block_map->loop_father,
3101 		  get_loop (src_cfun, 0));
3102       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
3103       loops_state_set (LOOPS_NEED_FIXUP);
3104     }
3105 
3106   /* If the loop tree in the source function needed fixup, mark the
3107      destination loop tree for fixup, too.  */
3108   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3109     loops_state_set (LOOPS_NEED_FIXUP);
3110 
3111   if (gimple_in_ssa_p (cfun))
3112     FOR_ALL_BB_FN (bb, cfun_to_copy)
3113       if (!id->blocks_to_copy
3114 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3115 	copy_phis_for_bb (bb, id);
3116 
3117   FOR_ALL_BB_FN (bb, cfun_to_copy)
3118     if (bb->aux)
3119       {
3120 	if (need_debug_cleanup
3121 	    && bb->index != ENTRY_BLOCK
3122 	    && bb->index != EXIT_BLOCK)
3123 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3124 	/* Update call edge destinations.  This cannot be done before loop
3125 	   info is updated, because we may split basic blocks.  */
3126 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3127 	    && bb->index != ENTRY_BLOCK
3128 	    && bb->index != EXIT_BLOCK)
3129 	  redirect_all_calls (id, (basic_block)bb->aux);
3130 	((basic_block)bb->aux)->aux = NULL;
3131 	bb->aux = NULL;
3132       }
3133 
3134   /* Zero out AUX fields of newly created block during EH edge
3135      insertion. */
3136   for (; last < last_basic_block_for_fn (cfun); last++)
3137     {
3138       if (need_debug_cleanup)
3139 	maybe_move_debug_stmts_to_successors (id,
3140 					      BASIC_BLOCK_FOR_FN (cfun, last));
3141       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3142       /* Update call edge destinations.  This cannot be done before loop
3143 	 info is updated, because we may split basic blocks.  */
3144       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3145 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3146     }
3147   entry_block_map->aux = NULL;
3148   exit_block_map->aux = NULL;
3149 
3150   if (id->eh_map)
3151     {
3152       delete id->eh_map;
3153       id->eh_map = NULL;
3154     }
3155   if (id->dependence_map)
3156     {
3157       delete id->dependence_map;
3158       id->dependence_map = NULL;
3159     }
3160 
3161   return new_fndecl;
3162 }
3163 
3164 /* Copy the debug STMT using ID.  We deal with these statements in a
3165    special way: if any variable in their VALUE expression wasn't
3166    remapped yet, we won't remap it, because that would get decl uids
3167    out of sync, causing codegen differences between -g and -g0.  If
3168    this arises, we drop the VALUE expression altogether.  */
3169 
3170 static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)3171 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3172 {
3173   tree t, *n;
3174   struct walk_stmt_info wi;
3175 
3176   if (tree block = gimple_block (stmt))
3177     {
3178       n = id->decl_map->get (block);
3179       gimple_set_block (stmt, n ? *n : id->block);
3180     }
3181 
3182   if (gimple_debug_nonbind_marker_p (stmt))
3183     return;
3184 
3185   /* Remap all the operands in COPY.  */
3186   memset (&wi, 0, sizeof (wi));
3187   wi.info = id;
3188 
3189   processing_debug_stmt = 1;
3190 
3191   if (gimple_debug_source_bind_p (stmt))
3192     t = gimple_debug_source_bind_get_var (stmt);
3193   else if (gimple_debug_bind_p (stmt))
3194     t = gimple_debug_bind_get_var (stmt);
3195   else
3196     gcc_unreachable ();
3197 
3198   if (TREE_CODE (t) == PARM_DECL && id->debug_map
3199       && (n = id->debug_map->get (t)))
3200     {
3201       gcc_assert (VAR_P (*n));
3202       t = *n;
3203     }
3204   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3205     /* T is a non-localized variable.  */;
3206   else
3207     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3208 
3209   if (gimple_debug_bind_p (stmt))
3210     {
3211       gimple_debug_bind_set_var (stmt, t);
3212 
3213       if (gimple_debug_bind_has_value_p (stmt))
3214 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3215 		   remap_gimple_op_r, &wi, NULL);
3216 
3217       /* Punt if any decl couldn't be remapped.  */
3218       if (processing_debug_stmt < 0)
3219 	gimple_debug_bind_reset_value (stmt);
3220     }
3221   else if (gimple_debug_source_bind_p (stmt))
3222     {
3223       gimple_debug_source_bind_set_var (stmt, t);
3224       /* When inlining and source bind refers to one of the optimized
3225 	 away parameters, change the source bind into normal debug bind
3226 	 referring to the corresponding DEBUG_EXPR_DECL that should have
3227 	 been bound before the call stmt.  */
3228       t = gimple_debug_source_bind_get_value (stmt);
3229       if (t != NULL_TREE
3230 	  && TREE_CODE (t) == PARM_DECL
3231 	  && id->call_stmt)
3232 	{
3233 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3234 	  unsigned int i;
3235 	  if (debug_args != NULL)
3236 	    {
3237 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3238 		if ((**debug_args)[i] == DECL_ORIGIN (t)
3239 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3240 		  {
3241 		    t = (**debug_args)[i + 1];
3242 		    stmt->subcode = GIMPLE_DEBUG_BIND;
3243 		    gimple_debug_bind_set_value (stmt, t);
3244 		    break;
3245 		  }
3246 	    }
3247 	}
3248       if (gimple_debug_source_bind_p (stmt))
3249 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3250 		   remap_gimple_op_r, &wi, NULL);
3251     }
3252 
3253   processing_debug_stmt = 0;
3254 
3255   update_stmt (stmt);
3256 }
3257 
3258 /* Process deferred debug stmts.  In order to give values better odds
3259    of being successfully remapped, we delay the processing of debug
3260    stmts until all other stmts that might require remapping are
3261    processed.  */
3262 
3263 static void
copy_debug_stmts(copy_body_data * id)3264 copy_debug_stmts (copy_body_data *id)
3265 {
3266   size_t i;
3267   gdebug *stmt;
3268 
3269   if (!id->debug_stmts.exists ())
3270     return;
3271 
3272   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3273     copy_debug_stmt (stmt, id);
3274 
3275   id->debug_stmts.release ();
3276 }
3277 
3278 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3279    another function.  */
3280 
3281 static tree
copy_tree_body(copy_body_data * id)3282 copy_tree_body (copy_body_data *id)
3283 {
3284   tree fndecl = id->src_fn;
3285   tree body = DECL_SAVED_TREE (fndecl);
3286 
3287   walk_tree (&body, copy_tree_body_r, id, NULL);
3288 
3289   return body;
3290 }
3291 
3292 /* Make a copy of the body of FN so that it can be inserted inline in
3293    another function.  */
3294 
3295 static tree
copy_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)3296 copy_body (copy_body_data *id,
3297 	   basic_block entry_block_map, basic_block exit_block_map,
3298 	   basic_block new_entry)
3299 {
3300   tree fndecl = id->src_fn;
3301   tree body;
3302 
3303   /* If this body has a CFG, walk CFG and copy.  */
3304   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3305   body = copy_cfg_body (id, entry_block_map, exit_block_map,
3306 			new_entry);
3307   copy_debug_stmts (id);
3308   delete id->killed_new_ssa_names;
3309   id->killed_new_ssa_names = NULL;
3310 
3311   return body;
3312 }
3313 
3314 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3315    defined in function FN, or of a data member thereof.  */
3316 
3317 static bool
self_inlining_addr_expr(tree value,tree fn)3318 self_inlining_addr_expr (tree value, tree fn)
3319 {
3320   tree var;
3321 
3322   if (TREE_CODE (value) != ADDR_EXPR)
3323     return false;
3324 
3325   var = get_base_address (TREE_OPERAND (value, 0));
3326 
3327   return var && auto_var_in_fn_p (var, fn);
3328 }
3329 
3330 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3331    lexical block and line number information from base_stmt, if given,
3332    or from the last stmt of the block otherwise.  */
3333 
3334 static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)3335 insert_init_debug_bind (copy_body_data *id,
3336 			basic_block bb, tree var, tree value,
3337 			gimple *base_stmt)
3338 {
3339   gimple *note;
3340   gimple_stmt_iterator gsi;
3341   tree tracked_var;
3342 
3343   if (!gimple_in_ssa_p (id->src_cfun))
3344     return NULL;
3345 
3346   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3347     return NULL;
3348 
3349   tracked_var = target_for_debug_bind (var);
3350   if (!tracked_var)
3351     return NULL;
3352 
3353   if (bb)
3354     {
3355       gsi = gsi_last_bb (bb);
3356       if (!base_stmt && !gsi_end_p (gsi))
3357 	base_stmt = gsi_stmt (gsi);
3358     }
3359 
3360   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3361 
3362   if (bb)
3363     {
3364       if (!gsi_end_p (gsi))
3365 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3366       else
3367 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3368     }
3369 
3370   return note;
3371 }
3372 
3373 static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)3374 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3375 {
3376   /* If VAR represents a zero-sized variable, it's possible that the
3377      assignment statement may result in no gimple statements.  */
3378   if (init_stmt)
3379     {
3380       gimple_stmt_iterator si = gsi_last_bb (bb);
3381 
3382       /* We can end up with init statements that store to a non-register
3383          from a rhs with a conversion.  Handle that here by forcing the
3384 	 rhs into a temporary.  gimple_regimplify_operands is not
3385 	 prepared to do this for us.  */
3386       if (!is_gimple_debug (init_stmt)
3387 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3388 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3389 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3390 	{
3391 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3392 			     gimple_expr_type (init_stmt),
3393 			     gimple_assign_rhs1 (init_stmt));
3394 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3395 					  GSI_NEW_STMT);
3396 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3397 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3398 	}
3399       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3400       if (!is_gimple_debug (init_stmt))
3401 	{
3402 	  gimple_regimplify_operands (init_stmt, &si);
3403 
3404 	  tree def = gimple_assign_lhs (init_stmt);
3405 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3406 	}
3407     }
3408 }
3409 
3410 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3411    if need be (which should only be necessary for invalid programs).  Attempt
3412    to convert VAL to TYPE and return the result if it is possible, just return
3413    a zero constant of the given type if it fails.  */
3414 
3415 tree
force_value_to_type(tree type,tree value)3416 force_value_to_type (tree type, tree value)
3417 {
3418   /* If we can match up types by promotion/demotion do so.  */
3419   if (fold_convertible_p (type, value))
3420     return fold_convert (type, value);
3421 
3422   /* ???  For valid programs we should not end up here.
3423      Still if we end up with truly mismatched types here, fall back
3424      to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3425      GIMPLE to the following passes.  */
3426   if (!is_gimple_reg_type (TREE_TYPE (value))
3427 	   || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3428     return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3429   else
3430     return build_zero_cst (type);
3431 }
3432 
3433 /* Initialize parameter P with VALUE.  If needed, produce init statement
3434    at the end of BB.  When BB is NULL, we return init statement to be
3435    output later.  */
3436 static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)3437 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3438 		     basic_block bb, tree *vars)
3439 {
3440   gimple *init_stmt = NULL;
3441   tree var;
3442   tree rhs = value;
3443   tree def = (gimple_in_ssa_p (cfun)
3444 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3445 
3446   if (value
3447       && value != error_mark_node
3448       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3449     rhs = force_value_to_type (TREE_TYPE (p), value);
3450 
3451   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3452      here since the type of this decl must be visible to the calling
3453      function.  */
3454   var = copy_decl_to_var (p, id);
3455 
3456   /* Declare this new variable.  */
3457   DECL_CHAIN (var) = *vars;
3458   *vars = var;
3459 
3460   /* Make gimplifier happy about this variable.  */
3461   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3462 
3463   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3464      we would not need to create a new variable here at all, if it
3465      weren't for debug info.  Still, we can just use the argument
3466      value.  */
3467   if (TREE_READONLY (p)
3468       && !TREE_ADDRESSABLE (p)
3469       && value && !TREE_SIDE_EFFECTS (value)
3470       && !def)
3471     {
3472       /* We may produce non-gimple trees by adding NOPs or introduce
3473 	 invalid sharing when operand is not really constant.
3474 	 It is not big deal to prohibit constant propagation here as
3475 	 we will constant propagate in DOM1 pass anyway.  */
3476       if (is_gimple_min_invariant (value)
3477 	  && useless_type_conversion_p (TREE_TYPE (p),
3478 						 TREE_TYPE (value))
3479 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3480 	     the base variable isn't a local variable of the inlined
3481 	     function, e.g., when doing recursive inlining, direct or
3482 	     mutually-recursive or whatever, which is why we don't
3483 	     just test whether fn == current_function_decl.  */
3484 	  && ! self_inlining_addr_expr (value, fn))
3485 	{
3486 	  insert_decl_map (id, p, value);
3487 	  insert_debug_decl_map (id, p, var);
3488 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3489 	}
3490     }
3491 
3492   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3493      that way, when the PARM_DECL is encountered, it will be
3494      automatically replaced by the VAR_DECL.  */
3495   insert_decl_map (id, p, var);
3496 
3497   /* Even if P was TREE_READONLY, the new VAR should not be.
3498      In the original code, we would have constructed a
3499      temporary, and then the function body would have never
3500      changed the value of P.  However, now, we will be
3501      constructing VAR directly.  The constructor body may
3502      change its value multiple times as it is being
3503      constructed.  Therefore, it must not be TREE_READONLY;
3504      the back-end assumes that TREE_READONLY variable is
3505      assigned to only once.  */
3506   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3507     TREE_READONLY (var) = 0;
3508 
3509   /* If there is no setup required and we are in SSA, take the easy route
3510      replacing all SSA names representing the function parameter by the
3511      SSA name passed to function.
3512 
3513      We need to construct map for the variable anyway as it might be used
3514      in different SSA names when parameter is set in function.
3515 
3516      Do replacement at -O0 for const arguments replaced by constant.
3517      This is important for builtin_constant_p and other construct requiring
3518      constant argument to be visible in inlined function body.  */
3519   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3520       && (optimize
3521           || (TREE_READONLY (p)
3522 	      && is_gimple_min_invariant (rhs)))
3523       && (TREE_CODE (rhs) == SSA_NAME
3524 	  || is_gimple_min_invariant (rhs))
3525       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3526     {
3527       insert_decl_map (id, def, rhs);
3528       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3529     }
3530 
3531   /* If the value of argument is never used, don't care about initializing
3532      it.  */
3533   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3534     {
3535       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3536       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3537     }
3538 
3539   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3540      the argument to the proper type in case it was promoted.  */
3541   if (value)
3542     {
3543       if (rhs == error_mark_node)
3544 	{
3545 	  insert_decl_map (id, p, var);
3546 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3547 	}
3548 
3549       STRIP_USELESS_TYPE_CONVERSION (rhs);
3550 
3551       /* If we are in SSA form properly remap the default definition
3552          or assign to a dummy SSA name if the parameter is unused and
3553 	 we are not optimizing.  */
3554       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3555 	{
3556 	  if (def)
3557 	    {
3558 	      def = remap_ssa_name (def, id);
3559 	      init_stmt = gimple_build_assign (def, rhs);
3560 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3561 	      set_ssa_default_def (cfun, var, NULL);
3562 	    }
3563 	  else if (!optimize)
3564 	    {
3565 	      def = make_ssa_name (var);
3566 	      init_stmt = gimple_build_assign (def, rhs);
3567 	    }
3568 	}
3569       else
3570         init_stmt = gimple_build_assign (var, rhs);
3571 
3572       if (bb && init_stmt)
3573         insert_init_stmt (id, bb, init_stmt);
3574     }
3575   return init_stmt;
3576 }
3577 
3578 /* Generate code to initialize the parameters of the function at the
3579    top of the stack in ID from the GIMPLE_CALL STMT.  */
3580 
3581 static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)3582 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3583 			       tree fn, basic_block bb)
3584 {
3585   tree parms;
3586   size_t i;
3587   tree p;
3588   tree vars = NULL_TREE;
3589   tree static_chain = gimple_call_chain (stmt);
3590 
3591   /* Figure out what the parameters are.  */
3592   parms = DECL_ARGUMENTS (fn);
3593 
3594   /* Loop through the parameter declarations, replacing each with an
3595      equivalent VAR_DECL, appropriately initialized.  */
3596   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3597     {
3598       tree val;
3599       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3600       setup_one_parameter (id, p, val, fn, bb, &vars);
3601     }
3602   /* After remapping parameters remap their types.  This has to be done
3603      in a second loop over all parameters to appropriately remap
3604      variable sized arrays when the size is specified in a
3605      parameter following the array.  */
3606   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3607     {
3608       tree *varp = id->decl_map->get (p);
3609       if (varp && VAR_P (*varp))
3610 	{
3611 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3612 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3613 	  tree var = *varp;
3614 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3615 	  /* Also remap the default definition if it was remapped
3616 	     to the default definition of the parameter replacement
3617 	     by the parameter setup.  */
3618 	  if (def)
3619 	    {
3620 	      tree *defp = id->decl_map->get (def);
3621 	      if (defp
3622 		  && TREE_CODE (*defp) == SSA_NAME
3623 		  && SSA_NAME_VAR (*defp) == var)
3624 		TREE_TYPE (*defp) = TREE_TYPE (var);
3625 	    }
3626 	}
3627     }
3628 
3629   /* Initialize the static chain.  */
3630   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3631   gcc_assert (fn != current_function_decl);
3632   if (p)
3633     {
3634       /* No static chain?  Seems like a bug in tree-nested.c.  */
3635       gcc_assert (static_chain);
3636 
3637       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3638     }
3639 
3640   declare_inline_vars (id->block, vars);
3641 }
3642 
3643 
3644 /* Declare a return variable to replace the RESULT_DECL for the
3645    function we are calling.  An appropriate DECL_STMT is returned.
3646    The USE_STMT is filled to contain a use of the declaration to
3647    indicate the return value of the function.
3648 
3649    RETURN_SLOT, if non-null is place where to store the result.  It
3650    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3651    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3652 
3653    The return value is a (possibly null) value that holds the result
3654    as seen by the caller.  */
3655 
3656 static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,basic_block entry_bb)3657 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3658 			 basic_block entry_bb)
3659 {
3660   tree callee = id->src_fn;
3661   tree result = DECL_RESULT (callee);
3662   tree callee_type = TREE_TYPE (result);
3663   tree caller_type;
3664   tree var, use;
3665 
3666   /* Handle type-mismatches in the function declaration return type
3667      vs. the call expression.  */
3668   if (modify_dest)
3669     caller_type = TREE_TYPE (modify_dest);
3670   else if (return_slot)
3671     caller_type = TREE_TYPE (return_slot);
3672   else /* No LHS on the call.  */
3673     caller_type = TREE_TYPE (TREE_TYPE (callee));
3674 
3675   /* We don't need to do anything for functions that don't return anything.  */
3676   if (VOID_TYPE_P (callee_type))
3677     return NULL_TREE;
3678 
3679   /* If there was a return slot, then the return value is the
3680      dereferenced address of that object.  */
3681   if (return_slot)
3682     {
3683       /* The front end shouldn't have used both return_slot and
3684 	 a modify expression.  */
3685       gcc_assert (!modify_dest);
3686       if (DECL_BY_REFERENCE (result))
3687 	{
3688 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3689 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3690 
3691 	  /* We are going to construct *&return_slot and we can't do that
3692 	     for variables believed to be not addressable.
3693 
3694 	     FIXME: This check possibly can match, because values returned
3695 	     via return slot optimization are not believed to have address
3696 	     taken by alias analysis.  */
3697 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3698 	  var = return_slot_addr;
3699 	  mark_addressable (return_slot);
3700 	}
3701       else
3702 	{
3703 	  var = return_slot;
3704 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3705 	  if (TREE_ADDRESSABLE (result))
3706 	    mark_addressable (var);
3707 	}
3708       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3709            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3710 	  && !DECL_GIMPLE_REG_P (result)
3711 	  && DECL_P (var))
3712 	DECL_GIMPLE_REG_P (var) = 0;
3713 
3714       if (!useless_type_conversion_p (callee_type, caller_type))
3715 	var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3716 
3717       use = NULL;
3718       goto done;
3719     }
3720 
3721   /* All types requiring non-trivial constructors should have been handled.  */
3722   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3723 
3724   /* Attempt to avoid creating a new temporary variable.  */
3725   if (modify_dest
3726       && TREE_CODE (modify_dest) != SSA_NAME)
3727     {
3728       bool use_it = false;
3729 
3730       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3731       if (!useless_type_conversion_p (callee_type, caller_type))
3732 	use_it = false;
3733 
3734       /* ??? If we're assigning to a variable sized type, then we must
3735 	 reuse the destination variable, because we've no good way to
3736 	 create variable sized temporaries at this point.  */
3737       else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3738 	use_it = true;
3739 
3740       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3741 	 reuse it as the result of the call directly.  Don't do this if
3742 	 it would promote MODIFY_DEST to addressable.  */
3743       else if (TREE_ADDRESSABLE (result))
3744 	use_it = false;
3745       else
3746 	{
3747 	  tree base_m = get_base_address (modify_dest);
3748 
3749 	  /* If the base isn't a decl, then it's a pointer, and we don't
3750 	     know where that's going to go.  */
3751 	  if (!DECL_P (base_m))
3752 	    use_it = false;
3753 	  else if (is_global_var (base_m))
3754 	    use_it = false;
3755 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3756 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3757 		   && !DECL_GIMPLE_REG_P (result)
3758 		   && DECL_GIMPLE_REG_P (base_m))
3759 	    use_it = false;
3760 	  else if (!TREE_ADDRESSABLE (base_m))
3761 	    use_it = true;
3762 	}
3763 
3764       if (use_it)
3765 	{
3766 	  var = modify_dest;
3767 	  use = NULL;
3768 	  goto done;
3769 	}
3770     }
3771 
3772   gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3773 
3774   var = copy_result_decl_to_var (result, id);
3775   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3776 
3777   /* Do not have the rest of GCC warn about this variable as it should
3778      not be visible to the user.  */
3779   TREE_NO_WARNING (var) = 1;
3780 
3781   declare_inline_vars (id->block, var);
3782 
3783   /* Build the use expr.  If the return type of the function was
3784      promoted, convert it back to the expected type.  */
3785   use = var;
3786   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3787     {
3788       /* If we can match up types by promotion/demotion do so.  */
3789       if (fold_convertible_p (caller_type, var))
3790 	use = fold_convert (caller_type, var);
3791       else
3792 	{
3793 	  /* ???  For valid programs we should not end up here.
3794 	     Still if we end up with truly mismatched types here, fall back
3795 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3796 	     passes.  */
3797 	  /* Prevent var from being written into SSA form.  */
3798 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3799 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3800 	    DECL_GIMPLE_REG_P (var) = false;
3801 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3802 	    TREE_ADDRESSABLE (var) = true;
3803 	  use = fold_build2 (MEM_REF, caller_type,
3804 			     build_fold_addr_expr (var),
3805 			     build_int_cst (ptr_type_node, 0));
3806 	}
3807     }
3808 
3809   STRIP_USELESS_TYPE_CONVERSION (use);
3810 
3811   if (DECL_BY_REFERENCE (result))
3812     {
3813       TREE_ADDRESSABLE (var) = 1;
3814       var = build_fold_addr_expr (var);
3815     }
3816 
3817  done:
3818   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3819      way, when the RESULT_DECL is encountered, it will be
3820      automatically replaced by the VAR_DECL.
3821 
3822      When returning by reference, ensure that RESULT_DECL remaps to
3823      gimple_val.  */
3824   if (DECL_BY_REFERENCE (result)
3825       && !is_gimple_val (var))
3826     {
3827       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3828       insert_decl_map (id, result, temp);
3829       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3830 	 it's default_def SSA_NAME.  */
3831       if (gimple_in_ssa_p (id->src_cfun)
3832 	  && is_gimple_reg (result))
3833 	{
3834 	  temp = make_ssa_name (temp);
3835 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3836 	}
3837       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3838     }
3839   else
3840     insert_decl_map (id, result, var);
3841 
3842   /* Remember this so we can ignore it in remap_decls.  */
3843   id->retvar = var;
3844   return use;
3845 }
3846 
3847 /* Determine if the function can be copied.  If so return NULL.  If
3848    not return a string describng the reason for failure.  */
3849 
3850 const char *
copy_forbidden(struct function * fun)3851 copy_forbidden (struct function *fun)
3852 {
3853   const char *reason = fun->cannot_be_copied_reason;
3854 
3855   /* Only examine the function once.  */
3856   if (fun->cannot_be_copied_set)
3857     return reason;
3858 
3859   /* We cannot copy a function that receives a non-local goto
3860      because we cannot remap the destination label used in the
3861      function that is performing the non-local goto.  */
3862   /* ??? Actually, this should be possible, if we work at it.
3863      No doubt there's just a handful of places that simply
3864      assume it doesn't happen and don't substitute properly.  */
3865   if (fun->has_nonlocal_label)
3866     {
3867       reason = G_("function %q+F can never be copied "
3868 		  "because it receives a non-local goto");
3869       goto fail;
3870     }
3871 
3872   if (fun->has_forced_label_in_static)
3873     {
3874       reason = G_("function %q+F can never be copied because it saves "
3875 		  "address of local label in a static variable");
3876       goto fail;
3877     }
3878 
3879  fail:
3880   fun->cannot_be_copied_reason = reason;
3881   fun->cannot_be_copied_set = true;
3882   return reason;
3883 }
3884 
3885 
3886 static const char *inline_forbidden_reason;
3887 
3888 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3889    iff a function cannot be inlined.  Also sets the reason why. */
3890 
3891 static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)3892 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3893 			 struct walk_stmt_info *wip)
3894 {
3895   tree fn = (tree) wip->info;
3896   tree t;
3897   gimple *stmt = gsi_stmt (*gsi);
3898 
3899   switch (gimple_code (stmt))
3900     {
3901     case GIMPLE_CALL:
3902       /* Refuse to inline alloca call unless user explicitly forced so as
3903 	 this may change program's memory overhead drastically when the
3904 	 function using alloca is called in loop.  In GCC present in
3905 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3906 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3907 	 VLA objects as those can't cause unbounded growth (they're always
3908 	 wrapped inside stack_save/stack_restore regions.  */
3909       if (gimple_maybe_alloca_call_p (stmt)
3910 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3911 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3912 	{
3913 	  inline_forbidden_reason
3914 	    = G_("function %q+F can never be inlined because it uses "
3915 		 "alloca (override using the always_inline attribute)");
3916 	  *handled_ops_p = true;
3917 	  return fn;
3918 	}
3919 
3920       t = gimple_call_fndecl (stmt);
3921       if (t == NULL_TREE)
3922 	break;
3923 
3924       /* We cannot inline functions that call setjmp.  */
3925       if (setjmp_call_p (t))
3926 	{
3927 	  inline_forbidden_reason
3928 	    = G_("function %q+F can never be inlined because it uses setjmp");
3929 	  *handled_ops_p = true;
3930 	  return t;
3931 	}
3932 
3933       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3934 	switch (DECL_FUNCTION_CODE (t))
3935 	  {
3936 	    /* We cannot inline functions that take a variable number of
3937 	       arguments.  */
3938 	  case BUILT_IN_VA_START:
3939 	  case BUILT_IN_NEXT_ARG:
3940 	  case BUILT_IN_VA_END:
3941 	    inline_forbidden_reason
3942 	      = G_("function %q+F can never be inlined because it "
3943 		   "uses variable argument lists");
3944 	    *handled_ops_p = true;
3945 	    return t;
3946 
3947 	  case BUILT_IN_LONGJMP:
3948 	    /* We can't inline functions that call __builtin_longjmp at
3949 	       all.  The non-local goto machinery really requires the
3950 	       destination be in a different function.  If we allow the
3951 	       function calling __builtin_longjmp to be inlined into the
3952 	       function calling __builtin_setjmp, Things will Go Awry.  */
3953 	    inline_forbidden_reason
3954 	      = G_("function %q+F can never be inlined because "
3955 		   "it uses setjmp-longjmp exception handling");
3956 	    *handled_ops_p = true;
3957 	    return t;
3958 
3959 	  case BUILT_IN_NONLOCAL_GOTO:
3960 	    /* Similarly.  */
3961 	    inline_forbidden_reason
3962 	      = G_("function %q+F can never be inlined because "
3963 		   "it uses non-local goto");
3964 	    *handled_ops_p = true;
3965 	    return t;
3966 
3967 	  case BUILT_IN_RETURN:
3968 	  case BUILT_IN_APPLY_ARGS:
3969 	    /* If a __builtin_apply_args caller would be inlined,
3970 	       it would be saving arguments of the function it has
3971 	       been inlined into.  Similarly __builtin_return would
3972 	       return from the function the inline has been inlined into.  */
3973 	    inline_forbidden_reason
3974 	      = G_("function %q+F can never be inlined because "
3975 		   "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3976 	    *handled_ops_p = true;
3977 	    return t;
3978 
3979 	  default:
3980 	    break;
3981 	  }
3982       break;
3983 
3984     case GIMPLE_GOTO:
3985       t = gimple_goto_dest (stmt);
3986 
3987       /* We will not inline a function which uses computed goto.  The
3988 	 addresses of its local labels, which may be tucked into
3989 	 global storage, are of course not constant across
3990 	 instantiations, which causes unexpected behavior.  */
3991       if (TREE_CODE (t) != LABEL_DECL)
3992 	{
3993 	  inline_forbidden_reason
3994 	    = G_("function %q+F can never be inlined "
3995 		 "because it contains a computed goto");
3996 	  *handled_ops_p = true;
3997 	  return t;
3998 	}
3999       break;
4000 
4001     default:
4002       break;
4003     }
4004 
4005   *handled_ops_p = false;
4006   return NULL_TREE;
4007 }
4008 
4009 /* Return true if FNDECL is a function that cannot be inlined into
4010    another one.  */
4011 
4012 static bool
inline_forbidden_p(tree fndecl)4013 inline_forbidden_p (tree fndecl)
4014 {
4015   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4016   struct walk_stmt_info wi;
4017   basic_block bb;
4018   bool forbidden_p = false;
4019 
4020   /* First check for shared reasons not to copy the code.  */
4021   inline_forbidden_reason = copy_forbidden (fun);
4022   if (inline_forbidden_reason != NULL)
4023     return true;
4024 
4025   /* Next, walk the statements of the function looking for
4026      constraucts we can't handle, or are non-optimal for inlining.  */
4027   hash_set<tree> visited_nodes;
4028   memset (&wi, 0, sizeof (wi));
4029   wi.info = (void *) fndecl;
4030   wi.pset = &visited_nodes;
4031 
4032   FOR_EACH_BB_FN (bb, fun)
4033     {
4034       gimple *ret;
4035       gimple_seq seq = bb_seq (bb);
4036       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4037       forbidden_p = (ret != NULL);
4038       if (forbidden_p)
4039 	break;
4040     }
4041 
4042   return forbidden_p;
4043 }
4044 
4045 /* Return false if the function FNDECL cannot be inlined on account of its
4046    attributes, true otherwise.  */
4047 static bool
function_attribute_inlinable_p(const_tree fndecl)4048 function_attribute_inlinable_p (const_tree fndecl)
4049 {
4050   if (targetm.attribute_table)
4051     {
4052       const_tree a;
4053 
4054       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4055 	{
4056 	  const_tree name = get_attribute_name (a);
4057 	  int i;
4058 
4059 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4060 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
4061 	      return targetm.function_attribute_inlinable_p (fndecl);
4062 	}
4063     }
4064 
4065   return true;
4066 }
4067 
4068 /* Returns nonzero if FN is a function that does not have any
4069    fundamental inline blocking properties.  */
4070 
4071 bool
tree_inlinable_function_p(tree fn)4072 tree_inlinable_function_p (tree fn)
4073 {
4074   bool inlinable = true;
4075   bool do_warning;
4076   tree always_inline;
4077 
4078   /* If we've already decided this function shouldn't be inlined,
4079      there's no need to check again.  */
4080   if (DECL_UNINLINABLE (fn))
4081     return false;
4082 
4083   /* We only warn for functions declared `inline' by the user.  */
4084   do_warning = (opt_for_fn (fn, warn_inline)
4085 		&& DECL_DECLARED_INLINE_P (fn)
4086 		&& !DECL_NO_INLINE_WARNING_P (fn)
4087 		&& !DECL_IN_SYSTEM_HEADER (fn));
4088 
4089   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4090 
4091   if (flag_no_inline
4092       && always_inline == NULL)
4093     {
4094       if (do_warning)
4095 	warning (OPT_Winline, "function %q+F can never be inlined because it "
4096 		 "is suppressed using %<-fno-inline%>", fn);
4097       inlinable = false;
4098     }
4099 
4100   else if (!function_attribute_inlinable_p (fn))
4101     {
4102       if (do_warning)
4103         warning (OPT_Winline, "function %q+F can never be inlined because it "
4104                  "uses attributes conflicting with inlining", fn);
4105       inlinable = false;
4106     }
4107 
4108   else if (inline_forbidden_p (fn))
4109     {
4110       /* See if we should warn about uninlinable functions.  Previously,
4111 	 some of these warnings would be issued while trying to expand
4112 	 the function inline, but that would cause multiple warnings
4113 	 about functions that would for example call alloca.  But since
4114 	 this a property of the function, just one warning is enough.
4115 	 As a bonus we can now give more details about the reason why a
4116 	 function is not inlinable.  */
4117       if (always_inline)
4118 	error (inline_forbidden_reason, fn);
4119       else if (do_warning)
4120 	warning (OPT_Winline, inline_forbidden_reason, fn);
4121 
4122       inlinable = false;
4123     }
4124 
4125   /* Squirrel away the result so that we don't have to check again.  */
4126   DECL_UNINLINABLE (fn) = !inlinable;
4127 
4128   return inlinable;
4129 }
4130 
4131 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
4132    word size and take possible memcpy call into account and return
4133    cost based on whether optimizing for size or speed according to SPEED_P.  */
4134 
4135 int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))4136 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4137 {
4138   HOST_WIDE_INT size;
4139 
4140   gcc_assert (!VOID_TYPE_P (type));
4141 
4142   if (TREE_CODE (type) == VECTOR_TYPE)
4143     {
4144       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4145       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4146       int orig_mode_size
4147 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4148       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4149       return ((orig_mode_size + simd_mode_size - 1)
4150 	      / simd_mode_size);
4151     }
4152 
4153   size = int_size_in_bytes (type);
4154 
4155   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4156     /* Cost of a memcpy call, 3 arguments and the call.  */
4157     return 4;
4158   else
4159     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4160 }
4161 
4162 /* Returns cost of operation CODE, according to WEIGHTS  */
4163 
4164 static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)4165 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4166 			tree op1 ATTRIBUTE_UNUSED, tree op2)
4167 {
4168   switch (code)
4169     {
4170     /* These are "free" conversions, or their presumed cost
4171        is folded into other operations.  */
4172     case RANGE_EXPR:
4173     CASE_CONVERT:
4174     case COMPLEX_EXPR:
4175     case PAREN_EXPR:
4176     case VIEW_CONVERT_EXPR:
4177       return 0;
4178 
4179     /* Assign cost of 1 to usual operations.
4180        ??? We may consider mapping RTL costs to this.  */
4181     case COND_EXPR:
4182     case VEC_COND_EXPR:
4183     case VEC_PERM_EXPR:
4184 
4185     case PLUS_EXPR:
4186     case POINTER_PLUS_EXPR:
4187     case POINTER_DIFF_EXPR:
4188     case MINUS_EXPR:
4189     case MULT_EXPR:
4190     case MULT_HIGHPART_EXPR:
4191 
4192     case ADDR_SPACE_CONVERT_EXPR:
4193     case FIXED_CONVERT_EXPR:
4194     case FIX_TRUNC_EXPR:
4195 
4196     case NEGATE_EXPR:
4197     case FLOAT_EXPR:
4198     case MIN_EXPR:
4199     case MAX_EXPR:
4200     case ABS_EXPR:
4201     case ABSU_EXPR:
4202 
4203     case LSHIFT_EXPR:
4204     case RSHIFT_EXPR:
4205     case LROTATE_EXPR:
4206     case RROTATE_EXPR:
4207 
4208     case BIT_IOR_EXPR:
4209     case BIT_XOR_EXPR:
4210     case BIT_AND_EXPR:
4211     case BIT_NOT_EXPR:
4212 
4213     case TRUTH_ANDIF_EXPR:
4214     case TRUTH_ORIF_EXPR:
4215     case TRUTH_AND_EXPR:
4216     case TRUTH_OR_EXPR:
4217     case TRUTH_XOR_EXPR:
4218     case TRUTH_NOT_EXPR:
4219 
4220     case LT_EXPR:
4221     case LE_EXPR:
4222     case GT_EXPR:
4223     case GE_EXPR:
4224     case EQ_EXPR:
4225     case NE_EXPR:
4226     case ORDERED_EXPR:
4227     case UNORDERED_EXPR:
4228 
4229     case UNLT_EXPR:
4230     case UNLE_EXPR:
4231     case UNGT_EXPR:
4232     case UNGE_EXPR:
4233     case UNEQ_EXPR:
4234     case LTGT_EXPR:
4235 
4236     case CONJ_EXPR:
4237 
4238     case PREDECREMENT_EXPR:
4239     case PREINCREMENT_EXPR:
4240     case POSTDECREMENT_EXPR:
4241     case POSTINCREMENT_EXPR:
4242 
4243     case REALIGN_LOAD_EXPR:
4244 
4245     case WIDEN_SUM_EXPR:
4246     case WIDEN_MULT_EXPR:
4247     case DOT_PROD_EXPR:
4248     case SAD_EXPR:
4249     case WIDEN_MULT_PLUS_EXPR:
4250     case WIDEN_MULT_MINUS_EXPR:
4251     case WIDEN_LSHIFT_EXPR:
4252 
4253     case VEC_WIDEN_MULT_HI_EXPR:
4254     case VEC_WIDEN_MULT_LO_EXPR:
4255     case VEC_WIDEN_MULT_EVEN_EXPR:
4256     case VEC_WIDEN_MULT_ODD_EXPR:
4257     case VEC_UNPACK_HI_EXPR:
4258     case VEC_UNPACK_LO_EXPR:
4259     case VEC_UNPACK_FLOAT_HI_EXPR:
4260     case VEC_UNPACK_FLOAT_LO_EXPR:
4261     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4262     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4263     case VEC_PACK_TRUNC_EXPR:
4264     case VEC_PACK_SAT_EXPR:
4265     case VEC_PACK_FIX_TRUNC_EXPR:
4266     case VEC_PACK_FLOAT_EXPR:
4267     case VEC_WIDEN_LSHIFT_HI_EXPR:
4268     case VEC_WIDEN_LSHIFT_LO_EXPR:
4269     case VEC_DUPLICATE_EXPR:
4270     case VEC_SERIES_EXPR:
4271 
4272       return 1;
4273 
4274     /* Few special cases of expensive operations.  This is useful
4275        to avoid inlining on functions having too many of these.  */
4276     case TRUNC_DIV_EXPR:
4277     case CEIL_DIV_EXPR:
4278     case FLOOR_DIV_EXPR:
4279     case ROUND_DIV_EXPR:
4280     case EXACT_DIV_EXPR:
4281     case TRUNC_MOD_EXPR:
4282     case CEIL_MOD_EXPR:
4283     case FLOOR_MOD_EXPR:
4284     case ROUND_MOD_EXPR:
4285     case RDIV_EXPR:
4286       if (TREE_CODE (op2) != INTEGER_CST)
4287         return weights->div_mod_cost;
4288       return 1;
4289 
4290     /* Bit-field insertion needs several shift and mask operations.  */
4291     case BIT_INSERT_EXPR:
4292       return 3;
4293 
4294     default:
4295       /* We expect a copy assignment with no operator.  */
4296       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4297       return 0;
4298     }
4299 }
4300 
4301 
4302 /* Estimate number of instructions that will be created by expanding
4303    the statements in the statement sequence STMTS.
4304    WEIGHTS contains weights attributed to various constructs.  */
4305 
4306 int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)4307 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4308 {
4309   int cost;
4310   gimple_stmt_iterator gsi;
4311 
4312   cost = 0;
4313   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4314     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4315 
4316   return cost;
4317 }
4318 
4319 
4320 /* Estimate number of instructions that will be created by expanding STMT.
4321    WEIGHTS contains weights attributed to various constructs.  */
4322 
4323 int
estimate_num_insns(gimple * stmt,eni_weights * weights)4324 estimate_num_insns (gimple *stmt, eni_weights *weights)
4325 {
4326   unsigned cost, i;
4327   enum gimple_code code = gimple_code (stmt);
4328   tree lhs;
4329   tree rhs;
4330 
4331   switch (code)
4332     {
4333     case GIMPLE_ASSIGN:
4334       /* Try to estimate the cost of assignments.  We have three cases to
4335 	 deal with:
4336 	 1) Simple assignments to registers;
4337 	 2) Stores to things that must live in memory.  This includes
4338 	    "normal" stores to scalars, but also assignments of large
4339 	    structures, or constructors of big arrays;
4340 
4341 	 Let us look at the first two cases, assuming we have "a = b + C":
4342 	 <GIMPLE_ASSIGN <var_decl "a">
4343 	        <plus_expr <var_decl "b"> <constant C>>
4344 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4345 	 any target, because "a" usually ends up in a real register.  Hence
4346 	 the only cost of this expression comes from the PLUS_EXPR, and we
4347 	 can ignore the GIMPLE_ASSIGN.
4348 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4349 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4350 	 of moving something into "a", which we compute using the function
4351 	 estimate_move_cost.  */
4352       if (gimple_clobber_p (stmt))
4353 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4354 
4355       lhs = gimple_assign_lhs (stmt);
4356       rhs = gimple_assign_rhs1 (stmt);
4357 
4358       cost = 0;
4359 
4360       /* Account for the cost of moving to / from memory.  */
4361       if (gimple_store_p (stmt))
4362 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4363       if (gimple_assign_load_p (stmt))
4364 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4365 
4366       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4367       				      gimple_assign_rhs1 (stmt),
4368 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4369 				      == GIMPLE_BINARY_RHS
4370 				      ? gimple_assign_rhs2 (stmt) : NULL);
4371       break;
4372 
4373     case GIMPLE_COND:
4374       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4375       				         gimple_op (stmt, 0),
4376 				         gimple_op (stmt, 1));
4377       break;
4378 
4379     case GIMPLE_SWITCH:
4380       {
4381 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4382 	/* Take into account cost of the switch + guess 2 conditional jumps for
4383 	   each case label.
4384 
4385 	   TODO: once the switch expansion logic is sufficiently separated, we can
4386 	   do better job on estimating cost of the switch.  */
4387 	if (weights->time_based)
4388 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4389 	else
4390 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4391       }
4392       break;
4393 
4394     case GIMPLE_CALL:
4395       {
4396 	tree decl;
4397 
4398 	if (gimple_call_internal_p (stmt))
4399 	  return 0;
4400 	else if ((decl = gimple_call_fndecl (stmt))
4401 		 && fndecl_built_in_p (decl))
4402 	  {
4403 	    /* Do not special case builtins where we see the body.
4404 	       This just confuse inliner.  */
4405 	    struct cgraph_node *node;
4406 	    if (!(node = cgraph_node::get (decl))
4407 		|| node->definition)
4408 	      ;
4409 	    /* For buitins that are likely expanded to nothing or
4410 	       inlined do not account operand costs.  */
4411 	    else if (is_simple_builtin (decl))
4412 	      return 0;
4413 	    else if (is_inexpensive_builtin (decl))
4414 	      return weights->target_builtin_call_cost;
4415 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4416 	      {
4417 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4418 		   specialize the cheap expansion we do here.
4419 		   ???  This asks for a more general solution.  */
4420 		switch (DECL_FUNCTION_CODE (decl))
4421 		  {
4422 		    case BUILT_IN_POW:
4423 		    case BUILT_IN_POWF:
4424 		    case BUILT_IN_POWL:
4425 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4426 			  && (real_equal
4427 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4428 			       &dconst2)))
4429 			return estimate_operator_cost
4430 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4431 			     gimple_call_arg (stmt, 0));
4432 		      break;
4433 
4434 		    default:
4435 		      break;
4436 		  }
4437 	      }
4438 	  }
4439 
4440 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4441 	if (gimple_call_lhs (stmt))
4442 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4443 				      weights->time_based);
4444 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4445 	  {
4446 	    tree arg = gimple_call_arg (stmt, i);
4447 	    cost += estimate_move_cost (TREE_TYPE (arg),
4448 					weights->time_based);
4449 	  }
4450 	break;
4451       }
4452 
4453     case GIMPLE_RETURN:
4454       return weights->return_cost;
4455 
4456     case GIMPLE_GOTO:
4457     case GIMPLE_LABEL:
4458     case GIMPLE_NOP:
4459     case GIMPLE_PHI:
4460     case GIMPLE_PREDICT:
4461     case GIMPLE_DEBUG:
4462       return 0;
4463 
4464     case GIMPLE_ASM:
4465       {
4466 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4467 	/* 1000 means infinity. This avoids overflows later
4468 	   with very long asm statements.  */
4469 	if (count > 1000)
4470 	  count = 1000;
4471 	/* If this asm is asm inline, count anything as minimum size.  */
4472 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4473 	  count = MIN (1, count);
4474 	return MAX (1, count);
4475       }
4476 
4477     case GIMPLE_RESX:
4478       /* This is either going to be an external function call with one
4479 	 argument, or two register copy statements plus a goto.  */
4480       return 2;
4481 
4482     case GIMPLE_EH_DISPATCH:
4483       /* ??? This is going to turn into a switch statement.  Ideally
4484 	 we'd have a look at the eh region and estimate the number of
4485 	 edges involved.  */
4486       return 10;
4487 
4488     case GIMPLE_BIND:
4489       return estimate_num_insns_seq (
4490 	       gimple_bind_body (as_a <gbind *> (stmt)),
4491 	       weights);
4492 
4493     case GIMPLE_EH_FILTER:
4494       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4495 
4496     case GIMPLE_CATCH:
4497       return estimate_num_insns_seq (gimple_catch_handler (
4498 				       as_a <gcatch *> (stmt)),
4499 				     weights);
4500 
4501     case GIMPLE_TRY:
4502       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4503               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4504 
4505     /* OMP directives are generally very expensive.  */
4506 
4507     case GIMPLE_OMP_RETURN:
4508     case GIMPLE_OMP_SECTIONS_SWITCH:
4509     case GIMPLE_OMP_ATOMIC_STORE:
4510     case GIMPLE_OMP_CONTINUE:
4511       /* ...except these, which are cheap.  */
4512       return 0;
4513 
4514     case GIMPLE_OMP_ATOMIC_LOAD:
4515       return weights->omp_cost;
4516 
4517     case GIMPLE_OMP_FOR:
4518       return (weights->omp_cost
4519               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4520               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4521 
4522     case GIMPLE_OMP_PARALLEL:
4523     case GIMPLE_OMP_TASK:
4524     case GIMPLE_OMP_CRITICAL:
4525     case GIMPLE_OMP_MASTER:
4526     case GIMPLE_OMP_TASKGROUP:
4527     case GIMPLE_OMP_ORDERED:
4528     case GIMPLE_OMP_SCAN:
4529     case GIMPLE_OMP_SECTION:
4530     case GIMPLE_OMP_SECTIONS:
4531     case GIMPLE_OMP_SINGLE:
4532     case GIMPLE_OMP_TARGET:
4533     case GIMPLE_OMP_TEAMS:
4534       return (weights->omp_cost
4535               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4536 
4537     case GIMPLE_TRANSACTION:
4538       return (weights->tm_cost
4539 	      + estimate_num_insns_seq (gimple_transaction_body (
4540 					  as_a <gtransaction *> (stmt)),
4541 					weights));
4542 
4543     default:
4544       gcc_unreachable ();
4545     }
4546 
4547   return cost;
4548 }
4549 
4550 /* Estimate number of instructions that will be created by expanding
4551    function FNDECL.  WEIGHTS contains weights attributed to various
4552    constructs.  */
4553 
4554 int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)4555 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4556 {
4557   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4558   gimple_stmt_iterator bsi;
4559   basic_block bb;
4560   int n = 0;
4561 
4562   gcc_assert (my_function && my_function->cfg);
4563   FOR_EACH_BB_FN (bb, my_function)
4564     {
4565       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4566 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4567     }
4568 
4569   return n;
4570 }
4571 
4572 
4573 /* Initializes weights used by estimate_num_insns.  */
4574 
4575 void
init_inline_once(void)4576 init_inline_once (void)
4577 {
4578   eni_size_weights.call_cost = 1;
4579   eni_size_weights.indirect_call_cost = 3;
4580   eni_size_weights.target_builtin_call_cost = 1;
4581   eni_size_weights.div_mod_cost = 1;
4582   eni_size_weights.omp_cost = 40;
4583   eni_size_weights.tm_cost = 10;
4584   eni_size_weights.time_based = false;
4585   eni_size_weights.return_cost = 1;
4586 
4587   /* Estimating time for call is difficult, since we have no idea what the
4588      called function does.  In the current uses of eni_time_weights,
4589      underestimating the cost does less harm than overestimating it, so
4590      we choose a rather small value here.  */
4591   eni_time_weights.call_cost = 10;
4592   eni_time_weights.indirect_call_cost = 15;
4593   eni_time_weights.target_builtin_call_cost = 1;
4594   eni_time_weights.div_mod_cost = 10;
4595   eni_time_weights.omp_cost = 40;
4596   eni_time_weights.tm_cost = 40;
4597   eni_time_weights.time_based = true;
4598   eni_time_weights.return_cost = 2;
4599 }
4600 
4601 
4602 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4603 
4604 static void
prepend_lexical_block(tree current_block,tree new_block)4605 prepend_lexical_block (tree current_block, tree new_block)
4606 {
4607   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4608   BLOCK_SUBBLOCKS (current_block) = new_block;
4609   BLOCK_SUPERCONTEXT (new_block) = current_block;
4610 }
4611 
4612 /* Add local variables from CALLEE to CALLER.  */
4613 
4614 static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)4615 add_local_variables (struct function *callee, struct function *caller,
4616 		     copy_body_data *id)
4617 {
4618   tree var;
4619   unsigned ix;
4620 
4621   FOR_EACH_LOCAL_DECL (callee, ix, var)
4622     if (!can_be_nonlocal (var, id))
4623       {
4624         tree new_var = remap_decl (var, id);
4625 
4626         /* Remap debug-expressions.  */
4627 	if (VAR_P (new_var)
4628 	    && DECL_HAS_DEBUG_EXPR_P (var)
4629 	    && new_var != var)
4630 	  {
4631 	    tree tem = DECL_DEBUG_EXPR (var);
4632 	    bool old_regimplify = id->regimplify;
4633 	    id->remapping_type_depth++;
4634 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4635 	    id->remapping_type_depth--;
4636 	    id->regimplify = old_regimplify;
4637 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4638 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4639 	  }
4640 	add_local_decl (caller, new_var);
4641       }
4642 }
4643 
4644 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4645    have brought in or introduced any debug stmts for SRCVAR.  */
4646 
4647 static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)4648 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4649 {
4650   tree *remappedvarp = id->decl_map->get (srcvar);
4651 
4652   if (!remappedvarp)
4653     return;
4654 
4655   if (!VAR_P (*remappedvarp))
4656     return;
4657 
4658   if (*remappedvarp == id->retvar)
4659     return;
4660 
4661   tree tvar = target_for_debug_bind (*remappedvarp);
4662   if (!tvar)
4663     return;
4664 
4665   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4666 					  id->call_stmt);
4667   gimple_seq_add_stmt (bindings, stmt);
4668 }
4669 
4670 /* For each inlined variable for which we may have debug bind stmts,
4671    add before GSI a final debug stmt resetting it, marking the end of
4672    its life, so that var-tracking knows it doesn't have to compute
4673    further locations for it.  */
4674 
4675 static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)4676 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4677 {
4678   tree var;
4679   unsigned ix;
4680   gimple_seq bindings = NULL;
4681 
4682   if (!gimple_in_ssa_p (id->src_cfun))
4683     return;
4684 
4685   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4686     return;
4687 
4688   for (var = DECL_ARGUMENTS (id->src_fn);
4689        var; var = DECL_CHAIN (var))
4690     reset_debug_binding (id, var, &bindings);
4691 
4692   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4693     reset_debug_binding (id, var, &bindings);
4694 
4695   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4696 }
4697 
4698 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4699 
4700 static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id,bitmap to_purge)4701 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4702 		    bitmap to_purge)
4703 {
4704   tree use_retvar;
4705   tree fn;
4706   hash_map<tree, tree> *dst;
4707   hash_map<tree, tree> *st = NULL;
4708   tree return_slot;
4709   tree modify_dest;
4710   struct cgraph_edge *cg_edge;
4711   cgraph_inline_failed_t reason;
4712   basic_block return_block;
4713   edge e;
4714   gimple_stmt_iterator gsi, stmt_gsi;
4715   bool successfully_inlined = false;
4716   bool purge_dead_abnormal_edges;
4717   gcall *call_stmt;
4718   unsigned int prop_mask, src_properties;
4719   struct function *dst_cfun;
4720   tree simduid;
4721   use_operand_p use;
4722   gimple *simtenter_stmt = NULL;
4723   vec<tree> *simtvars_save;
4724 
4725   /* The gimplifier uses input_location in too many places, such as
4726      internal_get_tmp_var ().  */
4727   location_t saved_location = input_location;
4728   input_location = gimple_location (stmt);
4729 
4730   /* From here on, we're only interested in CALL_EXPRs.  */
4731   call_stmt = dyn_cast <gcall *> (stmt);
4732   if (!call_stmt)
4733     goto egress;
4734 
4735   cg_edge = id->dst_node->get_edge (stmt);
4736   gcc_checking_assert (cg_edge);
4737   /* First, see if we can figure out what function is being called.
4738      If we cannot, then there is no hope of inlining the function.  */
4739   if (cg_edge->indirect_unknown_callee)
4740     goto egress;
4741   fn = cg_edge->callee->decl;
4742   gcc_checking_assert (fn);
4743 
4744   /* If FN is a declaration of a function in a nested scope that was
4745      globally declared inline, we don't set its DECL_INITIAL.
4746      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4747      C++ front-end uses it for cdtors to refer to their internal
4748      declarations, that are not real functions.  Fortunately those
4749      don't have trees to be saved, so we can tell by checking their
4750      gimple_body.  */
4751   if (!DECL_INITIAL (fn)
4752       && DECL_ABSTRACT_ORIGIN (fn)
4753       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4754     fn = DECL_ABSTRACT_ORIGIN (fn);
4755 
4756   /* Don't try to inline functions that are not well-suited to inlining.  */
4757   if (cg_edge->inline_failed)
4758     {
4759       reason = cg_edge->inline_failed;
4760       /* If this call was originally indirect, we do not want to emit any
4761 	 inlining related warnings or sorry messages because there are no
4762 	 guarantees regarding those.  */
4763       if (cg_edge->indirect_inlining_edge)
4764 	goto egress;
4765 
4766       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4767           /* For extern inline functions that get redefined we always
4768 	     silently ignored always_inline flag. Better behavior would
4769 	     be to be able to keep both bodies and use extern inline body
4770 	     for inlining, but we can't do that because frontends overwrite
4771 	     the body.  */
4772 	  && !cg_edge->callee->redefined_extern_inline
4773 	  /* During early inline pass, report only when optimization is
4774 	     not turned on.  */
4775 	  && (symtab->global_info_ready
4776 	      || !optimize
4777 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4778 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4779 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4780 	{
4781 	  error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4782 		 cgraph_inline_failed_string (reason));
4783 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4784 	    inform (gimple_location (stmt), "called from here");
4785 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4786 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4787                    "called from this function");
4788 	}
4789       else if (opt_for_fn (fn, warn_inline)
4790 	       && DECL_DECLARED_INLINE_P (fn)
4791 	       && !DECL_NO_INLINE_WARNING_P (fn)
4792 	       && !DECL_IN_SYSTEM_HEADER (fn)
4793 	       && reason != CIF_UNSPECIFIED
4794 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4795 	       /* Do not warn about not inlined recursive calls.  */
4796 	       && !cg_edge->recursive_p ()
4797 	       /* Avoid warnings during early inline pass. */
4798 	       && symtab->global_info_ready)
4799 	{
4800 	  auto_diagnostic_group d;
4801 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4802 		       fn, _(cgraph_inline_failed_string (reason))))
4803 	    {
4804 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4805 		inform (gimple_location (stmt), "called from here");
4806 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4807 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4808                        "called from this function");
4809 	    }
4810 	}
4811       goto egress;
4812     }
4813   id->src_node = cg_edge->callee;
4814 
4815   /* If callee is thunk, all we need is to adjust the THIS pointer
4816      and redirect to function being thunked.  */
4817   if (id->src_node->thunk.thunk_p)
4818     {
4819       cgraph_edge *edge;
4820       tree virtual_offset = NULL;
4821       profile_count count = cg_edge->count;
4822       tree op;
4823       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4824 
4825       cgraph_edge::remove (cg_edge);
4826       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4827 		   		           gimple_uid (stmt),
4828 				   	   profile_count::one (),
4829 					   profile_count::one (),
4830 				           true);
4831       edge->count = count;
4832       if (id->src_node->thunk.virtual_offset_p)
4833         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4834       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4835 			      NULL);
4836       gsi_insert_before (&iter, gimple_build_assign (op,
4837 						    gimple_call_arg (stmt, 0)),
4838 			 GSI_NEW_STMT);
4839       gcc_assert (id->src_node->thunk.this_adjusting);
4840       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4841 			 virtual_offset, id->src_node->thunk.indirect_offset);
4842 
4843       gimple_call_set_arg (stmt, 0, op);
4844       gimple_call_set_fndecl (stmt, edge->callee->decl);
4845       update_stmt (stmt);
4846       id->src_node->remove ();
4847       expand_call_inline (bb, stmt, id, to_purge);
4848       maybe_remove_unused_call_args (cfun, stmt);
4849       return true;
4850     }
4851   fn = cg_edge->callee->decl;
4852   cg_edge->callee->get_untransformed_body ();
4853 
4854   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4855     cg_edge->callee->verify ();
4856 
4857   /* We will be inlining this callee.  */
4858   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4859 
4860   /* Update the callers EH personality.  */
4861   if (DECL_FUNCTION_PERSONALITY (fn))
4862     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4863       = DECL_FUNCTION_PERSONALITY (fn);
4864 
4865   /* Split the block before the GIMPLE_CALL.  */
4866   stmt_gsi = gsi_for_stmt (stmt);
4867   gsi_prev (&stmt_gsi);
4868   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4869   bb = e->src;
4870   return_block = e->dest;
4871   remove_edge (e);
4872 
4873   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4874      been the source of abnormal edges.  In this case, schedule
4875      the removal of dead abnormal edges.  */
4876   gsi = gsi_start_bb (return_block);
4877   gsi_next (&gsi);
4878   purge_dead_abnormal_edges = gsi_end_p (gsi);
4879 
4880   stmt_gsi = gsi_start_bb (return_block);
4881 
4882   /* Build a block containing code to initialize the arguments, the
4883      actual inline expansion of the body, and a label for the return
4884      statements within the function to jump to.  The type of the
4885      statement expression is the return type of the function call.
4886      ???  If the call does not have an associated block then we will
4887      remap all callee blocks to NULL, effectively dropping most of
4888      its debug information.  This should only happen for calls to
4889      artificial decls inserted by the compiler itself.  We need to
4890      either link the inlined blocks into the caller block tree or
4891      not refer to them in any way to not break GC for locations.  */
4892   if (tree block = gimple_block (stmt))
4893     {
4894       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4895          to make inlined_function_outer_scope_p return true on this BLOCK.  */
4896       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4897       if (loc == UNKNOWN_LOCATION)
4898 	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4899       if (loc == UNKNOWN_LOCATION)
4900 	loc = BUILTINS_LOCATION;
4901       id->block = make_node (BLOCK);
4902       BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4903       BLOCK_SOURCE_LOCATION (id->block) = loc;
4904       prepend_lexical_block (block, id->block);
4905     }
4906 
4907   /* Local declarations will be replaced by their equivalents in this map.  */
4908   st = id->decl_map;
4909   id->decl_map = new hash_map<tree, tree>;
4910   dst = id->debug_map;
4911   id->debug_map = NULL;
4912   if (flag_stack_reuse != SR_NONE)
4913     id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4914 
4915   /* Record the function we are about to inline.  */
4916   id->src_fn = fn;
4917   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4918   id->reset_location = DECL_IGNORED_P (fn);
4919   id->call_stmt = call_stmt;
4920 
4921   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4922      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4923   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4924   simtvars_save = id->dst_simt_vars;
4925   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4926       && (simduid = bb->loop_father->simduid) != NULL_TREE
4927       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4928       && single_imm_use (simduid, &use, &simtenter_stmt)
4929       && is_gimple_call (simtenter_stmt)
4930       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4931     vec_alloc (id->dst_simt_vars, 0);
4932   else
4933     id->dst_simt_vars = NULL;
4934 
4935   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4936     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4937 
4938   /* If the src function contains an IFN_VA_ARG, then so will the dst
4939      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4940   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4941   src_properties = id->src_cfun->curr_properties & prop_mask;
4942   if (src_properties != prop_mask)
4943     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4944   dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4945 
4946   gcc_assert (!id->src_cfun->after_inlining);
4947 
4948   id->entry_bb = bb;
4949   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4950     {
4951       gimple_stmt_iterator si = gsi_last_bb (bb);
4952       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4953       						   NOT_TAKEN),
4954 			GSI_NEW_STMT);
4955     }
4956   initialize_inlined_parameters (id, stmt, fn, bb);
4957   if (debug_nonbind_markers_p && debug_inline_points && id->block
4958       && inlined_function_outer_scope_p (id->block))
4959     {
4960       gimple_stmt_iterator si = gsi_last_bb (bb);
4961       gsi_insert_after (&si, gimple_build_debug_inline_entry
4962 			(id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4963 			GSI_NEW_STMT);
4964     }
4965 
4966   if (DECL_INITIAL (fn))
4967     {
4968       if (gimple_block (stmt))
4969 	{
4970 	  tree *var;
4971 
4972 	  prepend_lexical_block (id->block,
4973 				 remap_blocks (DECL_INITIAL (fn), id));
4974 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4975 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4976 				   == NULL_TREE));
4977 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4978 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4979 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4980 	     under it.  The parameters can be then evaluated in the debugger,
4981 	     but don't show in backtraces.  */
4982 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4983 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4984 	      {
4985 		tree v = *var;
4986 		*var = TREE_CHAIN (v);
4987 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4988 		BLOCK_VARS (id->block) = v;
4989 	      }
4990 	    else
4991 	      var = &TREE_CHAIN (*var);
4992 	}
4993       else
4994 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4995     }
4996 
4997   /* Return statements in the function body will be replaced by jumps
4998      to the RET_LABEL.  */
4999   gcc_assert (DECL_INITIAL (fn));
5000   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5001 
5002   /* Find the LHS to which the result of this call is assigned.  */
5003   return_slot = NULL;
5004   if (gimple_call_lhs (stmt))
5005     {
5006       modify_dest = gimple_call_lhs (stmt);
5007 
5008       /* The function which we are inlining might not return a value,
5009 	 in which case we should issue a warning that the function
5010 	 does not return a value.  In that case the optimizers will
5011 	 see that the variable to which the value is assigned was not
5012 	 initialized.  We do not want to issue a warning about that
5013 	 uninitialized variable.  */
5014       if (DECL_P (modify_dest))
5015 	TREE_NO_WARNING (modify_dest) = 1;
5016 
5017       if (gimple_call_return_slot_opt_p (call_stmt))
5018 	{
5019 	  return_slot = modify_dest;
5020 	  modify_dest = NULL;
5021 	}
5022     }
5023   else
5024     modify_dest = NULL;
5025 
5026   /* If we are inlining a call to the C++ operator new, we don't want
5027      to use type based alias analysis on the return value.  Otherwise
5028      we may get confused if the compiler sees that the inlined new
5029      function returns a pointer which was just deleted.  See bug
5030      33407.  */
5031   if (DECL_IS_OPERATOR_NEW_P (fn))
5032     {
5033       return_slot = NULL;
5034       modify_dest = NULL;
5035     }
5036 
5037   /* Declare the return variable for the function.  */
5038   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5039 
5040   /* Add local vars in this inlined callee to caller.  */
5041   add_local_variables (id->src_cfun, cfun, id);
5042 
5043   if (id->src_node->clone.performed_splits)
5044     {
5045       /* Any calls from the inlined function will be turned into calls from the
5046 	 function we inline into.  We must preserve notes about how to split
5047 	 parameters such calls should be redirected/updated.  */
5048       unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
5049       for (unsigned i = 0; i < len; i++)
5050 	{
5051 	  ipa_param_performed_split ps
5052 	    = (*id->src_node->clone.performed_splits)[i];
5053 	  ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5054 	  vec_safe_push (id->dst_node->clone.performed_splits, ps);
5055 	}
5056 
5057       if (flag_checking)
5058 	{
5059 	  len = vec_safe_length (id->dst_node->clone.performed_splits);
5060 	  for (unsigned i = 0; i < len; i++)
5061 	    {
5062 	      ipa_param_performed_split *ps1
5063 		= &(*id->dst_node->clone.performed_splits)[i];
5064 	      for (unsigned j = i + 1; j < len; j++)
5065 		{
5066 		  ipa_param_performed_split *ps2
5067 		    = &(*id->dst_node->clone.performed_splits)[j];
5068 		  gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5069 			      || ps1->unit_offset != ps2->unit_offset);
5070 		}
5071 	    }
5072 	}
5073     }
5074 
5075   if (dump_enabled_p ())
5076     {
5077       char buf[128];
5078       snprintf (buf, sizeof(buf), "%4.2f",
5079 		cg_edge->sreal_frequency ().to_double ());
5080       dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5081 		       call_stmt,
5082 		       "Inlining %C to %C with frequency %s\n",
5083 		       id->src_node, id->dst_node, buf);
5084       if (dump_file && (dump_flags & TDF_DETAILS))
5085 	{
5086 	  id->src_node->dump (dump_file);
5087 	  id->dst_node->dump (dump_file);
5088 	}
5089     }
5090 
5091   /* This is it.  Duplicate the callee body.  Assume callee is
5092      pre-gimplified.  Note that we must not alter the caller
5093      function in any way before this point, as this CALL_EXPR may be
5094      a self-referential call; if we're calling ourselves, we need to
5095      duplicate our body before altering anything.  */
5096   copy_body (id, bb, return_block, NULL);
5097 
5098   reset_debug_bindings (id, stmt_gsi);
5099 
5100   if (flag_stack_reuse != SR_NONE)
5101     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5102       if (!TREE_THIS_VOLATILE (p))
5103 	{
5104 	  tree *varp = id->decl_map->get (p);
5105 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5106 	    {
5107 	      tree clobber = build_clobber (TREE_TYPE (*varp));
5108 	      gimple *clobber_stmt;
5109 	      clobber_stmt = gimple_build_assign (*varp, clobber);
5110 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
5111 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5112 	    }
5113 	}
5114 
5115   /* Reset the escaped solution.  */
5116   if (cfun->gimple_df)
5117     pt_solution_reset (&cfun->gimple_df->escaped);
5118 
5119   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
5120   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5121     {
5122       size_t nargs = gimple_call_num_args (simtenter_stmt);
5123       vec<tree> *vars = id->dst_simt_vars;
5124       auto_vec<tree> newargs (nargs + vars->length ());
5125       for (size_t i = 0; i < nargs; i++)
5126 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5127       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5128 	{
5129 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5130 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5131 	}
5132       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5133       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5134       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5135       gsi_replace (&gsi, g, false);
5136     }
5137   vec_free (id->dst_simt_vars);
5138   id->dst_simt_vars = simtvars_save;
5139 
5140   /* Clean up.  */
5141   if (id->debug_map)
5142     {
5143       delete id->debug_map;
5144       id->debug_map = dst;
5145     }
5146   delete id->decl_map;
5147   id->decl_map = st;
5148 
5149   /* Unlink the calls virtual operands before replacing it.  */
5150   unlink_stmt_vdef (stmt);
5151   if (gimple_vdef (stmt)
5152       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5153     release_ssa_name (gimple_vdef (stmt));
5154 
5155   /* If the inlined function returns a result that we care about,
5156      substitute the GIMPLE_CALL with an assignment of the return
5157      variable to the LHS of the call.  That is, if STMT was
5158      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
5159   if (use_retvar && gimple_call_lhs (stmt))
5160     {
5161       gimple *old_stmt = stmt;
5162       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5163       gimple_set_location (stmt, gimple_location (old_stmt));
5164       gsi_replace (&stmt_gsi, stmt, false);
5165       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5166       /* Append a clobber for id->retvar if easily possible.  */
5167       if (flag_stack_reuse != SR_NONE
5168 	  && id->retvar
5169 	  && VAR_P (id->retvar)
5170 	  && id->retvar != return_slot
5171 	  && id->retvar != modify_dest
5172 	  && !TREE_THIS_VOLATILE (id->retvar)
5173 	  && !is_gimple_reg (id->retvar)
5174 	  && !stmt_ends_bb_p (stmt))
5175 	{
5176 	  tree clobber = build_clobber (TREE_TYPE (id->retvar));
5177 	  gimple *clobber_stmt;
5178 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5179 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5180 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5181 	}
5182     }
5183   else
5184     {
5185       /* Handle the case of inlining a function with no return
5186 	 statement, which causes the return value to become undefined.  */
5187       if (gimple_call_lhs (stmt)
5188 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5189 	{
5190 	  tree name = gimple_call_lhs (stmt);
5191 	  tree var = SSA_NAME_VAR (name);
5192 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
5193 
5194 	  if (def)
5195 	    {
5196 	      /* If the variable is used undefined, make this name
5197 		 undefined via a move.  */
5198 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5199 	      gsi_replace (&stmt_gsi, stmt, true);
5200 	    }
5201 	  else
5202 	    {
5203 	      if (!var)
5204 		{
5205 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5206 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5207 		}
5208 	      /* Otherwise make this variable undefined.  */
5209 	      gsi_remove (&stmt_gsi, true);
5210 	      set_ssa_default_def (cfun, var, name);
5211 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5212 	    }
5213 	}
5214       /* Replace with a clobber for id->retvar.  */
5215       else if (flag_stack_reuse != SR_NONE
5216 	       && id->retvar
5217 	       && VAR_P (id->retvar)
5218 	       && id->retvar != return_slot
5219 	       && id->retvar != modify_dest
5220 	       && !TREE_THIS_VOLATILE (id->retvar)
5221 	       && !is_gimple_reg (id->retvar))
5222 	{
5223 	  tree clobber = build_clobber (TREE_TYPE (id->retvar));
5224 	  gimple *clobber_stmt;
5225 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5226 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
5227 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
5228 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5229 	}
5230       else
5231 	gsi_remove (&stmt_gsi, true);
5232     }
5233 
5234   if (purge_dead_abnormal_edges)
5235     bitmap_set_bit (to_purge, return_block->index);
5236 
5237   /* If the value of the new expression is ignored, that's OK.  We
5238      don't warn about this for CALL_EXPRs, so we shouldn't warn about
5239      the equivalent inlined version either.  */
5240   if (is_gimple_assign (stmt))
5241     {
5242       gcc_assert (gimple_assign_single_p (stmt)
5243 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5244       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5245     }
5246 
5247   id->add_clobbers_to_eh_landing_pads = 0;
5248 
5249   /* Output the inlining info for this abstract function, since it has been
5250      inlined.  If we don't do this now, we can lose the information about the
5251      variables in the function when the blocks get blown away as soon as we
5252      remove the cgraph node.  */
5253   if (gimple_block (stmt))
5254     (*debug_hooks->outlining_inline_function) (fn);
5255 
5256   /* Update callgraph if needed.  */
5257   cg_edge->callee->remove ();
5258 
5259   id->block = NULL_TREE;
5260   id->retvar = NULL_TREE;
5261   successfully_inlined = true;
5262 
5263  egress:
5264   input_location = saved_location;
5265   return successfully_inlined;
5266 }
5267 
5268 /* Expand call statements reachable from STMT_P.
5269    We can only have CALL_EXPRs as the "toplevel" tree code or nested
5270    in a MODIFY_EXPR.  */
5271 
5272 static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id,bitmap to_purge)5273 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5274 			    bitmap to_purge)
5275 {
5276   gimple_stmt_iterator gsi;
5277   bool inlined = false;
5278 
5279   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5280     {
5281       gimple *stmt = gsi_stmt (gsi);
5282       gsi_prev (&gsi);
5283 
5284       if (is_gimple_call (stmt)
5285 	  && !gimple_call_internal_p (stmt))
5286 	inlined |= expand_call_inline (bb, stmt, id, to_purge);
5287     }
5288 
5289   return inlined;
5290 }
5291 
5292 
5293 /* Walk all basic blocks created after FIRST and try to fold every statement
5294    in the STATEMENTS pointer set.  */
5295 
5296 static void
fold_marked_statements(int first,hash_set<gimple * > * statements)5297 fold_marked_statements (int first, hash_set<gimple *> *statements)
5298 {
5299   auto_bitmap to_purge;
5300 
5301   auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5302   auto_sbitmap visited (last_basic_block_for_fn (cfun));
5303   bitmap_clear (visited);
5304 
5305   stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5306   while (!stack.is_empty ())
5307     {
5308       /* Look at the edge on the top of the stack.  */
5309       edge e = stack.pop ();
5310       basic_block dest = e->dest;
5311 
5312       if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5313 	  || bitmap_bit_p (visited, dest->index))
5314 	continue;
5315 
5316       bitmap_set_bit (visited, dest->index);
5317 
5318       if (dest->index >= first)
5319 	for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5320 	     !gsi_end_p (gsi); gsi_next (&gsi))
5321 	  {
5322 	    if (!statements->contains (gsi_stmt (gsi)))
5323 	      continue;
5324 
5325 	    gimple *old_stmt = gsi_stmt (gsi);
5326 	    tree old_decl = (is_gimple_call (old_stmt)
5327 			     ? gimple_call_fndecl (old_stmt) : 0);
5328 	    if (old_decl && fndecl_built_in_p (old_decl))
5329 	      {
5330 		/* Folding builtins can create multiple instructions,
5331 		   we need to look at all of them.  */
5332 		gimple_stmt_iterator i2 = gsi;
5333 		gsi_prev (&i2);
5334 		if (fold_stmt (&gsi))
5335 		  {
5336 		    gimple *new_stmt;
5337 		    /* If a builtin at the end of a bb folded into nothing,
5338 		       the following loop won't work.  */
5339 		    if (gsi_end_p (gsi))
5340 		      {
5341 			cgraph_update_edges_for_call_stmt (old_stmt,
5342 							   old_decl, NULL);
5343 			break;
5344 		      }
5345 		    if (gsi_end_p (i2))
5346 		      i2 = gsi_start_bb (dest);
5347 		    else
5348 		      gsi_next (&i2);
5349 		    while (1)
5350 		      {
5351 			new_stmt = gsi_stmt (i2);
5352 			update_stmt (new_stmt);
5353 			cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5354 							   new_stmt);
5355 
5356 			if (new_stmt == gsi_stmt (gsi))
5357 			  {
5358 			    /* It is okay to check only for the very last
5359 			       of these statements.  If it is a throwing
5360 			       statement nothing will change.  If it isn't
5361 			       this can remove EH edges.  If that weren't
5362 			       correct then because some intermediate stmts
5363 			       throw, but not the last one.  That would mean
5364 			       we'd have to split the block, which we can't
5365 			       here and we'd loose anyway.  And as builtins
5366 			       probably never throw, this all
5367 			       is mood anyway.  */
5368 			    if (maybe_clean_or_replace_eh_stmt (old_stmt,
5369 								new_stmt))
5370 			      bitmap_set_bit (to_purge, dest->index);
5371 			    break;
5372 			  }
5373 			gsi_next (&i2);
5374 		      }
5375 		  }
5376 	      }
5377 	    else if (fold_stmt (&gsi))
5378 	      {
5379 		/* Re-read the statement from GSI as fold_stmt() may
5380 		   have changed it.  */
5381 		gimple *new_stmt = gsi_stmt (gsi);
5382 		update_stmt (new_stmt);
5383 
5384 		if (is_gimple_call (old_stmt)
5385 		    || is_gimple_call (new_stmt))
5386 		  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5387 						     new_stmt);
5388 
5389 		if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5390 		  bitmap_set_bit (to_purge, dest->index);
5391 	      }
5392 	  }
5393 
5394       if (EDGE_COUNT (dest->succs) > 0)
5395 	{
5396 	  /* Avoid warnings emitted from folding statements that
5397 	     became unreachable because of inlined function parameter
5398 	     propagation.  */
5399 	  e = find_taken_edge (dest, NULL_TREE);
5400 	  if (e)
5401 	    stack.quick_push (e);
5402 	  else
5403 	    {
5404 	      edge_iterator ei;
5405 	      FOR_EACH_EDGE (e, ei, dest->succs)
5406 		stack.safe_push (e);
5407 	    }
5408 	}
5409     }
5410 
5411   gimple_purge_all_dead_eh_edges (to_purge);
5412 }
5413 
5414 /* Expand calls to inline functions in the body of FN.  */
5415 
5416 unsigned int
optimize_inline_calls(tree fn)5417 optimize_inline_calls (tree fn)
5418 {
5419   copy_body_data id;
5420   basic_block bb;
5421   int last = n_basic_blocks_for_fn (cfun);
5422   bool inlined_p = false;
5423 
5424   /* Clear out ID.  */
5425   memset (&id, 0, sizeof (id));
5426 
5427   id.src_node = id.dst_node = cgraph_node::get (fn);
5428   gcc_assert (id.dst_node->definition);
5429   id.dst_fn = fn;
5430   /* Or any functions that aren't finished yet.  */
5431   if (current_function_decl)
5432     id.dst_fn = current_function_decl;
5433 
5434   id.copy_decl = copy_decl_maybe_to_var;
5435   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5436   id.transform_new_cfg = false;
5437   id.transform_return_to_modify = true;
5438   id.transform_parameter = true;
5439   id.transform_lang_insert_block = NULL;
5440   id.statements_to_fold = new hash_set<gimple *>;
5441 
5442   push_gimplify_context ();
5443 
5444   /* We make no attempts to keep dominance info up-to-date.  */
5445   free_dominance_info (CDI_DOMINATORS);
5446   free_dominance_info (CDI_POST_DOMINATORS);
5447 
5448   /* Register specific gimple functions.  */
5449   gimple_register_cfg_hooks ();
5450 
5451   /* Reach the trees by walking over the CFG, and note the
5452      enclosing basic-blocks in the call edges.  */
5453   /* We walk the blocks going forward, because inlined function bodies
5454      will split id->current_basic_block, and the new blocks will
5455      follow it; we'll trudge through them, processing their CALL_EXPRs
5456      along the way.  */
5457   auto_bitmap to_purge;
5458   FOR_EACH_BB_FN (bb, cfun)
5459     inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5460 
5461   pop_gimplify_context (NULL);
5462 
5463   if (flag_checking)
5464     {
5465       struct cgraph_edge *e;
5466 
5467       id.dst_node->verify ();
5468 
5469       /* Double check that we inlined everything we are supposed to inline.  */
5470       for (e = id.dst_node->callees; e; e = e->next_callee)
5471 	gcc_assert (e->inline_failed);
5472     }
5473 
5474   /* If we didn't inline into the function there is nothing to do.  */
5475   if (!inlined_p)
5476     {
5477       delete id.statements_to_fold;
5478       return 0;
5479     }
5480 
5481   /* Fold queued statements.  */
5482   update_max_bb_count ();
5483   fold_marked_statements (last, id.statements_to_fold);
5484   delete id.statements_to_fold;
5485 
5486   /* Finally purge EH and abnormal edges from the call stmts we inlined.
5487      We need to do this after fold_marked_statements since that may walk
5488      the SSA use-def chain.  */
5489   unsigned i;
5490   bitmap_iterator bi;
5491   EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5492     {
5493       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5494       if (bb)
5495 	{
5496 	  gimple_purge_dead_eh_edges (bb);
5497 	  gimple_purge_dead_abnormal_call_edges (bb);
5498 	}
5499     }
5500 
5501   gcc_assert (!id.debug_stmts.exists ());
5502 
5503   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5504   number_blocks (fn);
5505 
5506   delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5507   id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5508 
5509   if (flag_checking)
5510     id.dst_node->verify ();
5511 
5512   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5513      not possible yet - the IPA passes might make various functions to not
5514      throw and they don't care to proactively update local EH info.  This is
5515      done later in fixup_cfg pass that also execute the verification.  */
5516   return (TODO_update_ssa
5517 	  | TODO_cleanup_cfg
5518 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5519 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5520 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5521 	     ? TODO_rebuild_frequencies : 0));
5522 }
5523 
5524 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5525 
5526 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5527 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5528 {
5529   enum tree_code code = TREE_CODE (*tp);
5530   enum tree_code_class cl = TREE_CODE_CLASS (code);
5531 
5532   /* We make copies of most nodes.  */
5533   if (IS_EXPR_CODE_CLASS (cl)
5534       || code == TREE_LIST
5535       || code == TREE_VEC
5536       || code == TYPE_DECL
5537       || code == OMP_CLAUSE)
5538     {
5539       /* Because the chain gets clobbered when we make a copy, we save it
5540 	 here.  */
5541       tree chain = NULL_TREE, new_tree;
5542 
5543       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5544 	chain = TREE_CHAIN (*tp);
5545 
5546       /* Copy the node.  */
5547       new_tree = copy_node (*tp);
5548 
5549       *tp = new_tree;
5550 
5551       /* Now, restore the chain, if appropriate.  That will cause
5552 	 walk_tree to walk into the chain as well.  */
5553       if (code == PARM_DECL
5554 	  || code == TREE_LIST
5555 	  || code == OMP_CLAUSE)
5556 	TREE_CHAIN (*tp) = chain;
5557 
5558       /* For now, we don't update BLOCKs when we make copies.  So, we
5559 	 have to nullify all BIND_EXPRs.  */
5560       if (TREE_CODE (*tp) == BIND_EXPR)
5561 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5562     }
5563   else if (code == CONSTRUCTOR)
5564     {
5565       /* CONSTRUCTOR nodes need special handling because
5566          we need to duplicate the vector of elements.  */
5567       tree new_tree;
5568 
5569       new_tree = copy_node (*tp);
5570       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5571       *tp = new_tree;
5572     }
5573   else if (code == STATEMENT_LIST)
5574     /* We used to just abort on STATEMENT_LIST, but we can run into them
5575        with statement-expressions (c++/40975).  */
5576     copy_statement_list (tp);
5577   else if (TREE_CODE_CLASS (code) == tcc_type)
5578     *walk_subtrees = 0;
5579   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5580     *walk_subtrees = 0;
5581   else if (TREE_CODE_CLASS (code) == tcc_constant)
5582     *walk_subtrees = 0;
5583   return NULL_TREE;
5584 }
5585 
5586 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5587    information indicating to what new SAVE_EXPR this one should be mapped,
5588    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5589    the function into which the copy will be placed.  */
5590 
5591 static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)5592 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5593 {
5594   tree *n;
5595   tree t;
5596 
5597   /* See if we already encountered this SAVE_EXPR.  */
5598   n = st->get (*tp);
5599 
5600   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5601   if (!n)
5602     {
5603       t = copy_node (*tp);
5604 
5605       /* Remember this SAVE_EXPR.  */
5606       st->put (*tp, t);
5607       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5608       st->put (t, t);
5609     }
5610   else
5611     {
5612       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5613       *walk_subtrees = 0;
5614       t = *n;
5615     }
5616 
5617   /* Replace this SAVE_EXPR with the copy.  */
5618   *tp = t;
5619 }
5620 
5621 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5622    label, copies the declaration and enters it in the splay_tree in DATA (which
5623    is really a 'copy_body_data *'.  */
5624 
5625 static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5626 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5627 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5628 		        struct walk_stmt_info *wi)
5629 {
5630   copy_body_data *id = (copy_body_data *) wi->info;
5631   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5632 
5633   if (stmt)
5634     {
5635       tree decl = gimple_label_label (stmt);
5636 
5637       /* Copy the decl and remember the copy.  */
5638       insert_decl_map (id, decl, id->copy_decl (decl, id));
5639     }
5640 
5641   return NULL_TREE;
5642 }
5643 
5644 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5645 						  struct walk_stmt_info *wi);
5646 
5647 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5648    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5649    remaps all local declarations to appropriate replacements in gimple
5650    operands. */
5651 
5652 static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)5653 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5654 {
5655   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5656   copy_body_data *id = (copy_body_data *) wi->info;
5657   hash_map<tree, tree> *st = id->decl_map;
5658   tree *n;
5659   tree expr = *tp;
5660 
5661   /* For recursive invocations this is no longer the LHS itself.  */
5662   bool is_lhs = wi->is_lhs;
5663   wi->is_lhs = false;
5664 
5665   if (TREE_CODE (expr) == SSA_NAME)
5666     {
5667       *tp = remap_ssa_name (*tp, id);
5668       *walk_subtrees = 0;
5669       if (is_lhs)
5670 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5671     }
5672   /* Only a local declaration (variable or label).  */
5673   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5674 	   || TREE_CODE (expr) == LABEL_DECL)
5675     {
5676       /* Lookup the declaration.  */
5677       n = st->get (expr);
5678 
5679       /* If it's there, remap it.  */
5680       if (n)
5681 	*tp = *n;
5682       *walk_subtrees = 0;
5683     }
5684   else if (TREE_CODE (expr) == STATEMENT_LIST
5685 	   || TREE_CODE (expr) == BIND_EXPR
5686 	   || TREE_CODE (expr) == SAVE_EXPR)
5687     gcc_unreachable ();
5688   else if (TREE_CODE (expr) == TARGET_EXPR)
5689     {
5690       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5691          It's OK for this to happen if it was part of a subtree that
5692          isn't immediately expanded, such as operand 2 of another
5693          TARGET_EXPR.  */
5694       if (!TREE_OPERAND (expr, 1))
5695 	{
5696 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5697 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5698 	}
5699     }
5700   else if (TREE_CODE (expr) == OMP_CLAUSE)
5701     {
5702       /* Before the omplower pass completes, some OMP clauses can contain
5703 	 sequences that are neither copied by gimple_seq_copy nor walked by
5704 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5705 	 in those situations, we have to copy and process them explicitely.  */
5706 
5707       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5708 	{
5709 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5710 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5711 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5712 	}
5713       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5714 	{
5715 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5716 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5717 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5718 	}
5719       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5720 	{
5721 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5722 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5723 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5724 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5725 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5726 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5727 	}
5728     }
5729 
5730   /* Keep iterating.  */
5731   return NULL_TREE;
5732 }
5733 
5734 
5735 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5736    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5737    remaps all local declarations to appropriate replacements in gimple
5738    statements. */
5739 
5740 static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5741 replace_locals_stmt (gimple_stmt_iterator *gsip,
5742 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5743 		     struct walk_stmt_info *wi)
5744 {
5745   copy_body_data *id = (copy_body_data *) wi->info;
5746   gimple *gs = gsi_stmt (*gsip);
5747 
5748   if (gbind *stmt = dyn_cast <gbind *> (gs))
5749     {
5750       tree block = gimple_bind_block (stmt);
5751 
5752       if (block)
5753 	{
5754 	  remap_block (&block, id);
5755 	  gimple_bind_set_block (stmt, block);
5756 	}
5757 
5758       /* This will remap a lot of the same decls again, but this should be
5759 	 harmless.  */
5760       if (gimple_bind_vars (stmt))
5761 	{
5762 	  tree old_var, decls = gimple_bind_vars (stmt);
5763 
5764 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5765 	    if (!can_be_nonlocal (old_var, id)
5766 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5767 	      remap_decl (old_var, id);
5768 
5769 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5770 	  id->prevent_decl_creation_for_types = true;
5771 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5772 	  id->prevent_decl_creation_for_types = false;
5773 	}
5774     }
5775 
5776   /* Keep iterating.  */
5777   return NULL_TREE;
5778 }
5779 
5780 /* Create a copy of SEQ and remap all decls in it.  */
5781 
5782 static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)5783 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5784 {
5785   if (!seq)
5786     return NULL;
5787 
5788   /* If there are any labels in OMP sequences, they can be only referred to in
5789      the sequence itself and therefore we can do both here.  */
5790   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5791   gimple_seq copy = gimple_seq_copy (seq);
5792   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5793   return copy;
5794 }
5795 
5796 /* Copies everything in SEQ and replaces variables and labels local to
5797    current_function_decl.  */
5798 
5799 gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)5800 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5801 {
5802   copy_body_data id;
5803   struct walk_stmt_info wi;
5804   gimple_seq copy;
5805 
5806   /* There's nothing to do for NULL_TREE.  */
5807   if (seq == NULL)
5808     return seq;
5809 
5810   /* Set up ID.  */
5811   memset (&id, 0, sizeof (id));
5812   id.src_fn = current_function_decl;
5813   id.dst_fn = current_function_decl;
5814   id.src_cfun = cfun;
5815   id.decl_map = new hash_map<tree, tree>;
5816   id.debug_map = NULL;
5817 
5818   id.copy_decl = copy_decl_no_change;
5819   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5820   id.transform_new_cfg = false;
5821   id.transform_return_to_modify = false;
5822   id.transform_parameter = false;
5823   id.transform_lang_insert_block = NULL;
5824 
5825   /* Walk the tree once to find local labels.  */
5826   memset (&wi, 0, sizeof (wi));
5827   hash_set<tree> visited;
5828   wi.info = &id;
5829   wi.pset = &visited;
5830   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5831 
5832   copy = gimple_seq_copy (seq);
5833 
5834   /* Walk the copy, remapping decls.  */
5835   memset (&wi, 0, sizeof (wi));
5836   wi.info = &id;
5837   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5838 
5839   /* Clean up.  */
5840   delete id.decl_map;
5841   if (id.debug_map)
5842     delete id.debug_map;
5843   if (id.dependence_map)
5844     {
5845       delete id.dependence_map;
5846       id.dependence_map = NULL;
5847     }
5848 
5849   return copy;
5850 }
5851 
5852 
5853 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5854 
5855 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)5856 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5857 {
5858   if (*tp == data)
5859     return (tree) data;
5860   else
5861     return NULL;
5862 }
5863 
5864 DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)5865 debug_find_tree (tree top, tree search)
5866 {
5867   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5868 }
5869 
5870 
5871 /* Declare the variables created by the inliner.  Add all the variables in
5872    VARS to BIND_EXPR.  */
5873 
5874 static void
declare_inline_vars(tree block,tree vars)5875 declare_inline_vars (tree block, tree vars)
5876 {
5877   tree t;
5878   for (t = vars; t; t = DECL_CHAIN (t))
5879     {
5880       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5881       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5882       add_local_decl (cfun, t);
5883     }
5884 
5885   if (block)
5886     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5887 }
5888 
5889 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5890    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5891    VAR_DECL translation.  */
5892 
5893 tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)5894 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5895 {
5896   /* Don't generate debug information for the copy if we wouldn't have
5897      generated it for the copy either.  */
5898   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5899   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5900 
5901   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5902      declaration inspired this copy.  */
5903   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5904 
5905   /* The new variable/label has no RTL, yet.  */
5906   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5907       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5908     SET_DECL_RTL (copy, 0);
5909   /* For vector typed decls make sure to update DECL_MODE according
5910      to the new function context.  */
5911   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5912     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5913 
5914   /* These args would always appear unused, if not for this.  */
5915   TREE_USED (copy) = 1;
5916 
5917   /* Set the context for the new declaration.  */
5918   if (!DECL_CONTEXT (decl))
5919     /* Globals stay global.  */
5920     ;
5921   else if (DECL_CONTEXT (decl) != id->src_fn)
5922     /* Things that weren't in the scope of the function we're inlining
5923        from aren't in the scope we're inlining to, either.  */
5924     ;
5925   else if (TREE_STATIC (decl))
5926     /* Function-scoped static variables should stay in the original
5927        function.  */
5928     ;
5929   else
5930     {
5931       /* Ordinary automatic local variables are now in the scope of the
5932 	 new function.  */
5933       DECL_CONTEXT (copy) = id->dst_fn;
5934       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5935 	{
5936 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5937 	    DECL_ATTRIBUTES (copy)
5938 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5939 			   DECL_ATTRIBUTES (copy));
5940 	  id->dst_simt_vars->safe_push (copy);
5941 	}
5942     }
5943 
5944   return copy;
5945 }
5946 
5947 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5948    DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL.  The original
5949    DECL must come from ID->src_fn and the copy will be part of ID->dst_fn.  */
5950 
5951 tree
copy_decl_to_var(tree decl,copy_body_data * id)5952 copy_decl_to_var (tree decl, copy_body_data *id)
5953 {
5954   tree copy, type;
5955 
5956   gcc_assert (TREE_CODE (decl) == PARM_DECL
5957 	      || TREE_CODE (decl) == RESULT_DECL);
5958 
5959   type = TREE_TYPE (decl);
5960 
5961   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5962 		     VAR_DECL, DECL_NAME (decl), type);
5963   if (DECL_PT_UID_SET_P (decl))
5964     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5965   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5966   TREE_READONLY (copy) = TREE_READONLY (decl);
5967   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5968   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5969   DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5970 
5971   return copy_decl_for_dup_finish (id, decl, copy);
5972 }
5973 
5974 /* Like copy_decl_to_var, but create a return slot object instead of a
5975    pointer variable for return by invisible reference.  */
5976 
5977 static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)5978 copy_result_decl_to_var (tree decl, copy_body_data *id)
5979 {
5980   tree copy, type;
5981 
5982   gcc_assert (TREE_CODE (decl) == PARM_DECL
5983 	      || TREE_CODE (decl) == RESULT_DECL);
5984 
5985   type = TREE_TYPE (decl);
5986   if (DECL_BY_REFERENCE (decl))
5987     type = TREE_TYPE (type);
5988 
5989   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5990 		     VAR_DECL, DECL_NAME (decl), type);
5991   if (DECL_PT_UID_SET_P (decl))
5992     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5993   TREE_READONLY (copy) = TREE_READONLY (decl);
5994   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5995   if (!DECL_BY_REFERENCE (decl))
5996     {
5997       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5998       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5999     }
6000 
6001   return copy_decl_for_dup_finish (id, decl, copy);
6002 }
6003 
6004 tree
copy_decl_no_change(tree decl,copy_body_data * id)6005 copy_decl_no_change (tree decl, copy_body_data *id)
6006 {
6007   tree copy;
6008 
6009   copy = copy_node (decl);
6010 
6011   /* The COPY is not abstract; it will be generated in DST_FN.  */
6012   DECL_ABSTRACT_P (copy) = false;
6013   lang_hooks.dup_lang_specific_decl (copy);
6014 
6015   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6016      been taken; it's for internal bookkeeping in expand_goto_internal.  */
6017   if (TREE_CODE (copy) == LABEL_DECL)
6018     {
6019       TREE_ADDRESSABLE (copy) = 0;
6020       LABEL_DECL_UID (copy) = -1;
6021     }
6022 
6023   return copy_decl_for_dup_finish (id, decl, copy);
6024 }
6025 
6026 static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)6027 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6028 {
6029   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6030     return copy_decl_to_var (decl, id);
6031   else
6032     return copy_decl_no_change (decl, id);
6033 }
6034 
6035 /* Return a copy of the function's argument tree without any modifications.  */
6036 
6037 static tree
copy_arguments_nochange(tree orig_parm,copy_body_data * id)6038 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6039 {
6040   tree arg, *parg;
6041   tree new_parm = NULL;
6042 
6043   parg = &new_parm;
6044   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6045     {
6046       tree new_tree = remap_decl (arg, id);
6047       if (TREE_CODE (new_tree) != PARM_DECL)
6048 	new_tree = id->copy_decl (arg, id);
6049       lang_hooks.dup_lang_specific_decl (new_tree);
6050       *parg = new_tree;
6051       parg = &DECL_CHAIN (new_tree);
6052     }
6053   return new_parm;
6054 }
6055 
6056 /* Return a copy of the function's static chain.  */
6057 static tree
copy_static_chain(tree static_chain,copy_body_data * id)6058 copy_static_chain (tree static_chain, copy_body_data * id)
6059 {
6060   tree *chain_copy, *pvar;
6061 
6062   chain_copy = &static_chain;
6063   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6064     {
6065       tree new_tree = remap_decl (*pvar, id);
6066       lang_hooks.dup_lang_specific_decl (new_tree);
6067       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6068       *pvar = new_tree;
6069     }
6070   return static_chain;
6071 }
6072 
6073 /* Return true if the function is allowed to be versioned.
6074    This is a guard for the versioning functionality.  */
6075 
6076 bool
tree_versionable_function_p(tree fndecl)6077 tree_versionable_function_p (tree fndecl)
6078 {
6079   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6080 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6081 }
6082 
6083 /* Update clone info after duplication.  */
6084 
6085 static void
update_clone_info(copy_body_data * id)6086 update_clone_info (copy_body_data * id)
6087 {
6088   vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6089     = id->dst_node->clone.performed_splits;
6090   if (cur_performed_splits)
6091     {
6092       unsigned len = cur_performed_splits->length ();
6093       for (unsigned i = 0; i < len; i++)
6094 	{
6095 	  ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6096 	  ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6097 	}
6098     }
6099 
6100   struct cgraph_node *node;
6101   if (!id->dst_node->clones)
6102     return;
6103   for (node = id->dst_node->clones; node != id->dst_node;)
6104     {
6105       /* First update replace maps to match the new body.  */
6106       if (node->clone.tree_map)
6107         {
6108 	  unsigned int i;
6109           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6110 	    {
6111 	      struct ipa_replace_map *replace_info;
6112 	      replace_info = (*node->clone.tree_map)[i];
6113 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6114 	    }
6115 	}
6116       if (node->clone.performed_splits)
6117 	{
6118 	  unsigned len = vec_safe_length (node->clone.performed_splits);
6119 	  for (unsigned i = 0; i < len; i++)
6120 	    {
6121 	      ipa_param_performed_split *ps
6122 		= &(*node->clone.performed_splits)[i];
6123 	      ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6124 	    }
6125 	}
6126       if (unsigned len = vec_safe_length (cur_performed_splits))
6127 	{
6128 	  /* We do not want to add current performed splits when we are saving
6129 	     a copy of function body for later during inlining, that would just
6130 	     duplicate all entries.  So let's have a look whether anything
6131 	     referring to the first dummy_decl is present.  */
6132 	  unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6133 	  ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6134 	  for (unsigned i = 0; i < dst_len; i++)
6135 	    if ((*node->clone.performed_splits)[i].dummy_decl
6136 		== first->dummy_decl)
6137 	      {
6138 		len = 0;
6139 		break;
6140 	      }
6141 
6142 	  for (unsigned i = 0; i < len; i++)
6143 	    vec_safe_push (node->clone.performed_splits,
6144 			   (*cur_performed_splits)[i]);
6145 	  if (flag_checking)
6146 	    {
6147 	      for (unsigned i = 0; i < dst_len; i++)
6148 		{
6149 		  ipa_param_performed_split *ps1
6150 		    = &(*node->clone.performed_splits)[i];
6151 		  for (unsigned j = i + 1; j < dst_len; j++)
6152 		    {
6153 		      ipa_param_performed_split *ps2
6154 			= &(*node->clone.performed_splits)[j];
6155 		      gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6156 				  || ps1->unit_offset != ps2->unit_offset);
6157 		    }
6158 		}
6159 	    }
6160 	}
6161 
6162       if (node->clones)
6163 	node = node->clones;
6164       else if (node->next_sibling_clone)
6165 	node = node->next_sibling_clone;
6166       else
6167 	{
6168 	  while (node != id->dst_node && !node->next_sibling_clone)
6169 	    node = node->clone_of;
6170 	  if (node != id->dst_node)
6171 	    node = node->next_sibling_clone;
6172 	}
6173     }
6174 }
6175 
6176 /* Create a copy of a function's tree.
6177    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6178    of the original function and the new copied function
6179    respectively.  In case we want to replace a DECL
6180    tree with another tree while duplicating the function's
6181    body, TREE_MAP represents the mapping between these
6182    trees. If UPDATE_CLONES is set, the call_stmt fields
6183    of edges of clones of the function will be updated.
6184 
6185    If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6186    function parameters and return value) should be modified).
6187    If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6188    If non_NULL NEW_ENTRY determine new entry BB of the clone.
6189 */
6190 void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,ipa_param_adjustments * param_adjustments,bool update_clones,bitmap blocks_to_copy,basic_block new_entry)6191 tree_function_versioning (tree old_decl, tree new_decl,
6192 			  vec<ipa_replace_map *, va_gc> *tree_map,
6193 			  ipa_param_adjustments *param_adjustments,
6194 			  bool update_clones, bitmap blocks_to_copy,
6195 			  basic_block new_entry)
6196 {
6197   struct cgraph_node *old_version_node;
6198   struct cgraph_node *new_version_node;
6199   copy_body_data id;
6200   tree p;
6201   unsigned i;
6202   struct ipa_replace_map *replace_info;
6203   basic_block old_entry_block, bb;
6204   auto_vec<gimple *, 10> init_stmts;
6205   tree vars = NULL_TREE;
6206 
6207   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6208 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
6209   DECL_POSSIBLY_INLINED (old_decl) = 1;
6210 
6211   old_version_node = cgraph_node::get (old_decl);
6212   gcc_checking_assert (old_version_node);
6213   new_version_node = cgraph_node::get (new_decl);
6214   gcc_checking_assert (new_version_node);
6215 
6216   /* Copy over debug args.  */
6217   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6218     {
6219       vec<tree, va_gc> **new_debug_args, **old_debug_args;
6220       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6221       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6222       old_debug_args = decl_debug_args_lookup (old_decl);
6223       if (old_debug_args)
6224 	{
6225 	  new_debug_args = decl_debug_args_insert (new_decl);
6226 	  *new_debug_args = vec_safe_copy (*old_debug_args);
6227 	}
6228     }
6229 
6230   /* Output the inlining info for this abstract function, since it has been
6231      inlined.  If we don't do this now, we can lose the information about the
6232      variables in the function when the blocks get blown away as soon as we
6233      remove the cgraph node.  */
6234   (*debug_hooks->outlining_inline_function) (old_decl);
6235 
6236   DECL_ARTIFICIAL (new_decl) = 1;
6237   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6238   if (DECL_ORIGIN (old_decl) == old_decl)
6239     old_version_node->used_as_abstract_origin = true;
6240   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6241 
6242   /* Prepare the data structures for the tree copy.  */
6243   memset (&id, 0, sizeof (id));
6244 
6245   /* Generate a new name for the new version. */
6246   id.statements_to_fold = new hash_set<gimple *>;
6247 
6248   id.decl_map = new hash_map<tree, tree>;
6249   id.debug_map = NULL;
6250   id.src_fn = old_decl;
6251   id.dst_fn = new_decl;
6252   id.src_node = old_version_node;
6253   id.dst_node = new_version_node;
6254   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6255   id.blocks_to_copy = blocks_to_copy;
6256 
6257   id.copy_decl = copy_decl_no_change;
6258   id.transform_call_graph_edges
6259     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6260   id.transform_new_cfg = true;
6261   id.transform_return_to_modify = false;
6262   id.transform_parameter = false;
6263   id.transform_lang_insert_block = NULL;
6264 
6265   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
6266   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6267   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6268   initialize_cfun (new_decl, old_decl,
6269 		   new_entry ? new_entry->count : old_entry_block->count);
6270   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6271     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6272       = id.src_cfun->gimple_df->ipa_pta;
6273 
6274   /* Copy the function's static chain.  */
6275   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6276   if (p)
6277     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6278       = copy_static_chain (p, &id);
6279 
6280   auto_vec<int, 16> new_param_indices;
6281   ipa_param_adjustments *old_param_adjustments
6282     = old_version_node->clone.param_adjustments;
6283   if (old_param_adjustments)
6284     old_param_adjustments->get_updated_indices (&new_param_indices);
6285 
6286   /* If there's a tree_map, prepare for substitution.  */
6287   if (tree_map)
6288     for (i = 0; i < tree_map->length (); i++)
6289       {
6290 	gimple *init;
6291 	replace_info = (*tree_map)[i];
6292 
6293 	int p = replace_info->parm_num;
6294 	if (old_param_adjustments)
6295 	  p = new_param_indices[p];
6296 
6297 	tree parm;
6298 	tree req_type, new_type;
6299 
6300 	for (parm = DECL_ARGUMENTS (old_decl); p;
6301 	     parm = DECL_CHAIN (parm))
6302 	  p--;
6303 	tree old_tree = parm;
6304 	req_type = TREE_TYPE (parm);
6305 	new_type = TREE_TYPE (replace_info->new_tree);
6306 	if (!useless_type_conversion_p (req_type, new_type))
6307 	  {
6308 	    if (fold_convertible_p (req_type, replace_info->new_tree))
6309 	      replace_info->new_tree
6310 		= fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6311 	    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6312 	      replace_info->new_tree
6313 		= fold_build1 (VIEW_CONVERT_EXPR, req_type,
6314 			       replace_info->new_tree);
6315 	    else
6316 	      {
6317 		if (dump_file)
6318 		  {
6319 		    fprintf (dump_file, "    const ");
6320 		    print_generic_expr (dump_file,
6321 					replace_info->new_tree);
6322 		    fprintf (dump_file,
6323 			     "  can't be converted to param ");
6324 		    print_generic_expr (dump_file, parm);
6325 		    fprintf (dump_file, "\n");
6326 		  }
6327 		old_tree = NULL;
6328 	      }
6329 	  }
6330 
6331 	if (old_tree)
6332 	  {
6333 	    init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6334 					id.src_fn, NULL, &vars);
6335 	    if (init)
6336 	      init_stmts.safe_push (init);
6337 	  }
6338       }
6339 
6340   ipa_param_body_adjustments *param_body_adjs = NULL;
6341   if (param_adjustments)
6342     {
6343       param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6344 							new_decl, old_decl,
6345 							&id, &vars, tree_map);
6346       id.param_body_adjs = param_body_adjs;
6347       DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6348     }
6349   else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6350     DECL_ARGUMENTS (new_decl)
6351       = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6352 
6353   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6354   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6355 
6356   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6357 
6358   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6359     /* Add local vars.  */
6360     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6361 
6362   if (DECL_RESULT (old_decl) == NULL_TREE)
6363     ;
6364   else if (param_adjustments && param_adjustments->m_skip_return
6365 	   && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6366     {
6367       tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6368 						   &id);
6369       declare_inline_vars (NULL, resdecl_repl);
6370       insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6371 
6372       DECL_RESULT (new_decl)
6373 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6374 		      RESULT_DECL, NULL_TREE, void_type_node);
6375       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6376       DECL_IS_MALLOC (new_decl) = false;
6377       cfun->returns_struct = 0;
6378       cfun->returns_pcc_struct = 0;
6379     }
6380   else
6381     {
6382       tree old_name;
6383       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6384       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6385       if (gimple_in_ssa_p (id.src_cfun)
6386 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6387 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6388 	{
6389 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6390 	  insert_decl_map (&id, old_name, new_name);
6391 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6392 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6393 	}
6394     }
6395 
6396   /* Set up the destination functions loop tree.  */
6397   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6398     {
6399       cfun->curr_properties &= ~PROP_loops;
6400       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6401       cfun->curr_properties |= PROP_loops;
6402     }
6403 
6404   /* Copy the Function's body.  */
6405   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6406 	     new_entry);
6407 
6408   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6409   number_blocks (new_decl);
6410 
6411   /* We want to create the BB unconditionally, so that the addition of
6412      debug stmts doesn't affect BB count, which may in the end cause
6413      codegen differences.  */
6414   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6415   while (init_stmts.length ())
6416     insert_init_stmt (&id, bb, init_stmts.pop ());
6417   update_clone_info (&id);
6418 
6419   /* Remap the nonlocal_goto_save_area, if any.  */
6420   if (cfun->nonlocal_goto_save_area)
6421     {
6422       struct walk_stmt_info wi;
6423 
6424       memset (&wi, 0, sizeof (wi));
6425       wi.info = &id;
6426       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6427     }
6428 
6429   /* Clean up.  */
6430   delete id.decl_map;
6431   if (id.debug_map)
6432     delete id.debug_map;
6433   free_dominance_info (CDI_DOMINATORS);
6434   free_dominance_info (CDI_POST_DOMINATORS);
6435 
6436   update_max_bb_count ();
6437   fold_marked_statements (0, id.statements_to_fold);
6438   delete id.statements_to_fold;
6439   delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6440   if (id.dst_node->definition)
6441     cgraph_edge::rebuild_references ();
6442   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6443     {
6444       calculate_dominance_info (CDI_DOMINATORS);
6445       fix_loop_structure (NULL);
6446     }
6447   update_ssa (TODO_update_ssa);
6448 
6449   /* After partial cloning we need to rescale frequencies, so they are
6450      within proper range in the cloned function.  */
6451   if (new_entry)
6452     {
6453       struct cgraph_edge *e;
6454       rebuild_frequencies ();
6455 
6456       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6457       for (e = new_version_node->callees; e; e = e->next_callee)
6458 	{
6459 	  basic_block bb = gimple_bb (e->call_stmt);
6460 	  e->count = bb->count;
6461 	}
6462       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6463 	{
6464 	  basic_block bb = gimple_bb (e->call_stmt);
6465 	  e->count = bb->count;
6466 	}
6467     }
6468 
6469   if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6470     {
6471       vec<tree, va_gc> **debug_args = NULL;
6472       unsigned int len = 0;
6473       unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6474 
6475       for (i = 0; i < reset_len; i++)
6476 	{
6477 	  tree parm = param_body_adjs->m_reset_debug_decls[i];
6478 	  gcc_assert (is_gimple_reg (parm));
6479 	  tree ddecl;
6480 
6481 	  if (debug_args == NULL)
6482 	    {
6483 	      debug_args = decl_debug_args_insert (new_decl);
6484 	      len = vec_safe_length (*debug_args);
6485 	    }
6486 	  ddecl = make_node (DEBUG_EXPR_DECL);
6487 	  DECL_ARTIFICIAL (ddecl) = 1;
6488 	  TREE_TYPE (ddecl) = TREE_TYPE (parm);
6489 	  SET_DECL_MODE (ddecl, DECL_MODE (parm));
6490 	  vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6491 	  vec_safe_push (*debug_args, ddecl);
6492 	}
6493       if (debug_args != NULL)
6494 	{
6495 	  /* On the callee side, add
6496 	     DEBUG D#Y s=> parm
6497 	     DEBUG var => D#Y
6498 	     stmts to the first bb where var is a VAR_DECL created for the
6499 	     optimized away parameter in DECL_INITIAL block.  This hints
6500 	     in the debug info that var (whole DECL_ORIGIN is the parm
6501 	     PARM_DECL) is optimized away, but could be looked up at the
6502 	     call site as value of D#X there.  */
6503 	  tree vexpr;
6504 	  gimple_stmt_iterator cgsi
6505 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6506 	  gimple *def_temp;
6507 	  tree var = vars;
6508 	  i = vec_safe_length (*debug_args);
6509 	  do
6510 	    {
6511 	      i -= 2;
6512 	      while (var != NULL_TREE
6513 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6514 		var = TREE_CHAIN (var);
6515 	      if (var == NULL_TREE)
6516 		break;
6517 	      vexpr = make_node (DEBUG_EXPR_DECL);
6518 	      tree parm = (**debug_args)[i];
6519 	      DECL_ARTIFICIAL (vexpr) = 1;
6520 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6521 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6522 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6523 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6524 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6525 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6526 	    }
6527 	  while (i > len);
6528 	}
6529     }
6530   delete param_body_adjs;
6531   free_dominance_info (CDI_DOMINATORS);
6532   free_dominance_info (CDI_POST_DOMINATORS);
6533 
6534   gcc_assert (!id.debug_stmts.exists ());
6535   pop_cfun ();
6536   return;
6537 }
6538 
6539 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6540    the callee and return the inlined body on success.  */
6541 
6542 tree
maybe_inline_call_in_expr(tree exp)6543 maybe_inline_call_in_expr (tree exp)
6544 {
6545   tree fn = get_callee_fndecl (exp);
6546 
6547   /* We can only try to inline "const" functions.  */
6548   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6549     {
6550       call_expr_arg_iterator iter;
6551       copy_body_data id;
6552       tree param, arg, t;
6553       hash_map<tree, tree> decl_map;
6554 
6555       /* Remap the parameters.  */
6556       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6557 	   param;
6558 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6559 	decl_map.put (param, arg);
6560 
6561       memset (&id, 0, sizeof (id));
6562       id.src_fn = fn;
6563       id.dst_fn = current_function_decl;
6564       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6565       id.decl_map = &decl_map;
6566 
6567       id.copy_decl = copy_decl_no_change;
6568       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6569       id.transform_new_cfg = false;
6570       id.transform_return_to_modify = true;
6571       id.transform_parameter = true;
6572       id.transform_lang_insert_block = NULL;
6573 
6574       /* Make sure not to unshare trees behind the front-end's back
6575 	 since front-end specific mechanisms may rely on sharing.  */
6576       id.regimplify = false;
6577       id.do_not_unshare = true;
6578 
6579       /* We're not inside any EH region.  */
6580       id.eh_lp_nr = 0;
6581 
6582       t = copy_tree_body (&id);
6583 
6584       /* We can only return something suitable for use in a GENERIC
6585 	 expression tree.  */
6586       if (TREE_CODE (t) == MODIFY_EXPR)
6587 	return TREE_OPERAND (t, 1);
6588     }
6589 
6590    return NULL_TREE;
6591 }
6592 
6593 /* Duplicate a type, fields and all.  */
6594 
6595 tree
build_duplicate_type(tree type)6596 build_duplicate_type (tree type)
6597 {
6598   struct copy_body_data id;
6599 
6600   memset (&id, 0, sizeof (id));
6601   id.src_fn = current_function_decl;
6602   id.dst_fn = current_function_decl;
6603   id.src_cfun = cfun;
6604   id.decl_map = new hash_map<tree, tree>;
6605   id.debug_map = NULL;
6606   id.copy_decl = copy_decl_no_change;
6607 
6608   type = remap_type_1 (type, &id);
6609 
6610   delete id.decl_map;
6611   if (id.debug_map)
6612     delete id.debug_map;
6613 
6614   TYPE_CANONICAL (type) = type;
6615 
6616   return type;
6617 }
6618 
6619 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6620    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6621    evaluation.  */
6622 
6623 tree
copy_fn(tree fn,tree & parms,tree & result)6624 copy_fn (tree fn, tree& parms, tree& result)
6625 {
6626   copy_body_data id;
6627   tree param;
6628   hash_map<tree, tree> decl_map;
6629 
6630   tree *p = &parms;
6631   *p = NULL_TREE;
6632 
6633   memset (&id, 0, sizeof (id));
6634   id.src_fn = fn;
6635   id.dst_fn = current_function_decl;
6636   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6637   id.decl_map = &decl_map;
6638 
6639   id.copy_decl = copy_decl_no_change;
6640   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6641   id.transform_new_cfg = false;
6642   id.transform_return_to_modify = false;
6643   id.transform_parameter = true;
6644   id.transform_lang_insert_block = NULL;
6645 
6646   /* Make sure not to unshare trees behind the front-end's back
6647      since front-end specific mechanisms may rely on sharing.  */
6648   id.regimplify = false;
6649   id.do_not_unshare = true;
6650   id.do_not_fold = true;
6651 
6652   /* We're not inside any EH region.  */
6653   id.eh_lp_nr = 0;
6654 
6655   /* Remap the parameters and result and return them to the caller.  */
6656   for (param = DECL_ARGUMENTS (fn);
6657        param;
6658        param = DECL_CHAIN (param))
6659     {
6660       *p = remap_decl (param, &id);
6661       p = &DECL_CHAIN (*p);
6662     }
6663 
6664   if (DECL_RESULT (fn))
6665     result = remap_decl (DECL_RESULT (fn), &id);
6666   else
6667     result = NULL_TREE;
6668 
6669   return copy_tree_body (&id);
6670 }
6671