xref: /netbsd-src/external/gpl3/gcc/dist/gcc/tree-inline.cc (revision 0a3071956a3a9fdebdbf7f338cf2d439b45fc728)
1 /* Tree inlining.
2    Copyright (C) 2001-2022 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
68 #include "asan.h"
69 
70 /* I'm not real happy about this, but we need to handle gimple and
71    non-gimple trees.  */
72 
73 /* Inlining, Cloning, Versioning, Parallelization
74 
75    Inlining: a function body is duplicated, but the PARM_DECLs are
76    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
77    MODIFY_EXPRs that store to a dedicated returned-value variable.
78    The duplicated eh_region info of the copy will later be appended
79    to the info for the caller; the eh_region info in copied throwing
80    statements and RESX statements are adjusted accordingly.
81 
82    Cloning: (only in C++) We have one body for a con/de/structor, and
83    multiple function decls, each with a unique parameter list.
84    Duplicate the body, using the given splay tree; some parameters
85    will become constants (like 0 or 1).
86 
87    Versioning: a function body is duplicated and the result is a new
88    function rather than into blocks of an existing function as with
89    inlining.  Some parameters will become constants.
90 
91    Parallelization: a region of a function is duplicated resulting in
92    a new function.  Variables may be replaced with complex expressions
93    to enable shared variable semantics.
94 
95    All of these will simultaneously lookup any callgraph edges.  If
96    we're going to inline the duplicated function body, and the given
97    function has some cloned callgraph nodes (one for each place this
98    function will be inlined) those callgraph edges will be duplicated.
99    If we're cloning the body, those callgraph edges will be
100    updated to point into the new body.  (Note that the original
101    callgraph node and edge list will not be altered.)
102 
103    See the CALL_EXPR handling case in copy_tree_body_r ().  */
104 
105 /* To Do:
106 
107    o In order to make inlining-on-trees work, we pessimized
108      function-local static constants.  In particular, they are now
109      always output, even when not addressed.  Fix this by treating
110      function-local static constants just like global static
111      constants; the back-end already knows not to output them if they
112      are not needed.
113 
114    o Provide heuristics to clamp inlining of recursive template
115      calls?  */
116 
117 
118 /* Weights that estimate_num_insns uses to estimate the size of the
119    produced code.  */
120 
121 eni_weights eni_size_weights;
122 
123 /* Weights that estimate_num_insns uses to estimate the time necessary
124    to execute the produced code.  */
125 
126 eni_weights eni_time_weights;
127 
128 /* Prototypes.  */
129 
130 static tree declare_return_variable (copy_body_data *, tree, tree,
131 				     basic_block);
132 static void remap_block (tree *, copy_body_data *);
133 static void copy_bind_expr (tree *, int *, copy_body_data *);
134 static void declare_inline_vars (tree, tree);
135 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
136 static void prepend_lexical_block (tree current_block, tree new_block);
137 static tree copy_result_decl_to_var (tree, copy_body_data *);
138 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
139 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
140 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 
142 /* Insert a tree->tree mapping for ID.  Despite the name suggests
143    that the trees should be variables, it is used for more than that.  */
144 
145 void
insert_decl_map(copy_body_data * id,tree key,tree value)146 insert_decl_map (copy_body_data *id, tree key, tree value)
147 {
148   id->decl_map->put (key, value);
149 
150   /* Always insert an identity map as well.  If we see this same new
151      node again, we won't want to duplicate it a second time.  */
152   if (key != value)
153     id->decl_map->put (value, value);
154 }
155 
156 /* If nonzero, we're remapping the contents of inlined debug
157    statements.  If negative, an error has occurred, such as a
158    reference to a variable that isn't available in the inlined
159    context.  */
160 static int processing_debug_stmt = 0;
161 
162 /* Construct new SSA name for old NAME. ID is the inline context.  */
163 
164 static tree
remap_ssa_name(tree name,copy_body_data * id)165 remap_ssa_name (tree name, copy_body_data *id)
166 {
167   tree new_tree, var;
168   tree *n;
169 
170   gcc_assert (TREE_CODE (name) == SSA_NAME);
171 
172   n = id->decl_map->get (name);
173   if (n)
174     {
175       /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
176 	 remove an unused LHS from a call statement.  Such LHS can however
177 	 still appear in debug statements, but their value is lost in this
178 	 function and we do not want to map them.  */
179       if (id->killed_new_ssa_names
180 	  && id->killed_new_ssa_names->contains (*n))
181 	{
182 	  gcc_assert (processing_debug_stmt);
183 	  processing_debug_stmt = -1;
184 	  return name;
185 	}
186 
187       return unshare_expr (*n);
188     }
189 
190   if (processing_debug_stmt)
191     {
192       if (SSA_NAME_IS_DEFAULT_DEF (name)
193 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
194 	  && id->entry_bb == NULL
195 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
196 	{
197 	  gimple *def_temp;
198 	  gimple_stmt_iterator gsi;
199 	  tree val = SSA_NAME_VAR (name);
200 
201 	  n = id->decl_map->get (val);
202 	  if (n != NULL)
203 	    val = *n;
204 	  if (TREE_CODE (val) != PARM_DECL
205 	      && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
206 	    {
207 	      processing_debug_stmt = -1;
208 	      return name;
209 	    }
210 	  n = id->decl_map->get (val);
211 	  if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
212 	    return *n;
213 	  tree vexpr = build_debug_expr_decl (TREE_TYPE (name));
214 	  /* FIXME: Is setting the mode really necessary? */
215 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
216 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
217 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
218 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
219 	  insert_decl_map (id, val, vexpr);
220 	  return vexpr;
221 	}
222 
223       processing_debug_stmt = -1;
224       return name;
225     }
226 
227   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
228   var = SSA_NAME_VAR (name);
229   if (!var
230       || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 	  && VAR_P (var)
232 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 	  && DECL_ARTIFICIAL (var)
234 	  && DECL_IGNORED_P (var)
235 	  && !DECL_NAME (var)))
236     {
237       struct ptr_info_def *pi;
238       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239       if (!var && SSA_NAME_IDENTIFIER (name))
240 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241       insert_decl_map (id, name, new_tree);
242       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244       /* At least IPA points-to info can be directly transferred.  */
245       if (id->src_cfun->gimple_df
246 	  && id->src_cfun->gimple_df->ipa_pta
247 	  && POINTER_TYPE_P (TREE_TYPE (name))
248 	  && (pi = SSA_NAME_PTR_INFO (name))
249 	  && !pi->pt.anything)
250 	{
251 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
252 	  new_pi->pt = pi->pt;
253 	}
254       /* So can range-info.  */
255       if (!POINTER_TYPE_P (TREE_TYPE (name))
256 	  && SSA_NAME_RANGE_INFO (name))
257 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
258 				       SSA_NAME_RANGE_INFO (name));
259       return new_tree;
260     }
261 
262   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
263      in copy_bb.  */
264   new_tree = remap_decl (var, id);
265 
266   /* We might've substituted constant or another SSA_NAME for
267      the variable.
268 
269      Replace the SSA name representing RESULT_DECL by variable during
270      inlining:  this saves us from need to introduce PHI node in a case
271      return value is just partly initialized.  */
272   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
273       && (!SSA_NAME_VAR (name)
274 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
275 	  || !id->transform_return_to_modify))
276     {
277       struct ptr_info_def *pi;
278       new_tree = make_ssa_name (new_tree);
279       insert_decl_map (id, name, new_tree);
280       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
281 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
282       /* At least IPA points-to info can be directly transferred.  */
283       if (id->src_cfun->gimple_df
284 	  && id->src_cfun->gimple_df->ipa_pta
285 	  && POINTER_TYPE_P (TREE_TYPE (name))
286 	  && (pi = SSA_NAME_PTR_INFO (name))
287 	  && !pi->pt.anything)
288 	{
289 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
290 	  new_pi->pt = pi->pt;
291 	}
292       /* So can range-info.  */
293       if (!POINTER_TYPE_P (TREE_TYPE (name))
294 	  && SSA_NAME_RANGE_INFO (name))
295 	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
296 				       SSA_NAME_RANGE_INFO (name));
297       if (SSA_NAME_IS_DEFAULT_DEF (name))
298 	{
299 	  /* By inlining function having uninitialized variable, we might
300 	     extend the lifetime (variable might get reused).  This cause
301 	     ICE in the case we end up extending lifetime of SSA name across
302 	     abnormal edge, but also increase register pressure.
303 
304 	     We simply initialize all uninitialized vars by 0 except
305 	     for case we are inlining to very first BB.  We can avoid
306 	     this for all BBs that are not inside strongly connected
307 	     regions of the CFG, but this is expensive to test.  */
308 	  if (id->entry_bb
309 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
310 	      && (!SSA_NAME_VAR (name)
311 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
312 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
313 					     0)->dest
314 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
315 	    {
316 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
317 	      gimple *init_stmt;
318 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
319 
320 	      init_stmt = gimple_build_assign (new_tree, zero);
321 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
322 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
323 	    }
324 	  else
325 	    {
326 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
327 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
328 	    }
329 	}
330     }
331   else
332     insert_decl_map (id, name, new_tree);
333   return new_tree;
334 }
335 
336 /* Remap DECL during the copying of the BLOCK tree for the function.  */
337 
338 tree
remap_decl(tree decl,copy_body_data * id)339 remap_decl (tree decl, copy_body_data *id)
340 {
341   tree *n;
342 
343   /* We only remap local variables in the current function.  */
344 
345   /* See if we have remapped this declaration.  */
346 
347   n = id->decl_map->get (decl);
348 
349   if (!n && processing_debug_stmt)
350     {
351       processing_debug_stmt = -1;
352       return decl;
353     }
354 
355   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
356      necessary DECLs have already been remapped and we do not want to duplicate
357      a decl coming from outside of the sequence we are copying.  */
358   if (!n
359       && id->prevent_decl_creation_for_types
360       && id->remapping_type_depth > 0
361       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
362     return decl;
363 
364   /* If we didn't already have an equivalent for this declaration, create one
365      now.  */
366   if (!n)
367     {
368       /* Make a copy of the variable or label.  */
369       tree t = id->copy_decl (decl, id);
370 
371       /* Remember it, so that if we encounter this local entity again
372 	 we can reuse this copy.  Do this early because remap_type may
373 	 need this decl for TYPE_STUB_DECL.  */
374       insert_decl_map (id, decl, t);
375 
376       if (!DECL_P (t))
377 	return t;
378 
379       /* Remap types, if necessary.  */
380       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
381       if (TREE_CODE (t) == TYPE_DECL)
382 	{
383 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
384 
385 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
386 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
387 	     is not set on the TYPE_DECL, for example in LTO mode.  */
388 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
389 	    {
390 	      tree x = build_variant_type_copy (TREE_TYPE (t));
391 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
392 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
393 	      DECL_ORIGINAL_TYPE (t) = x;
394 	    }
395 	}
396 
397       /* Remap sizes as necessary.  */
398       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
399       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
400 
401       /* If fields, do likewise for offset and qualifier.  */
402       if (TREE_CODE (t) == FIELD_DECL)
403 	{
404 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
405 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
406 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
407 	}
408 
409       return t;
410     }
411 
412   if (id->do_not_unshare)
413     return *n;
414   else
415     return unshare_expr (*n);
416 }
417 
418 static tree
remap_type_1(tree type,copy_body_data * id)419 remap_type_1 (tree type, copy_body_data *id)
420 {
421   tree new_tree, t;
422 
423   /* We do need a copy.  build and register it now.  If this is a pointer or
424      reference type, remap the designated type and make a new pointer or
425      reference type.  */
426   if (TREE_CODE (type) == POINTER_TYPE)
427     {
428       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
429 					 TYPE_MODE (type),
430 					 TYPE_REF_CAN_ALIAS_ALL (type));
431       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
432 	new_tree = build_type_attribute_qual_variant (new_tree,
433 						      TYPE_ATTRIBUTES (type),
434 						      TYPE_QUALS (type));
435       insert_decl_map (id, type, new_tree);
436       return new_tree;
437     }
438   else if (TREE_CODE (type) == REFERENCE_TYPE)
439     {
440       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
441 					    TYPE_MODE (type),
442 					    TYPE_REF_CAN_ALIAS_ALL (type));
443       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
444 	new_tree = build_type_attribute_qual_variant (new_tree,
445 						      TYPE_ATTRIBUTES (type),
446 						      TYPE_QUALS (type));
447       insert_decl_map (id, type, new_tree);
448       return new_tree;
449     }
450   else
451     new_tree = copy_node (type);
452 
453   insert_decl_map (id, type, new_tree);
454 
455   /* This is a new type, not a copy of an old type.  Need to reassociate
456      variants.  We can handle everything except the main variant lazily.  */
457   t = TYPE_MAIN_VARIANT (type);
458   if (type != t)
459     {
460       t = remap_type (t, id);
461       TYPE_MAIN_VARIANT (new_tree) = t;
462       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
463       TYPE_NEXT_VARIANT (t) = new_tree;
464     }
465   else
466     {
467       TYPE_MAIN_VARIANT (new_tree) = new_tree;
468       TYPE_NEXT_VARIANT (new_tree) = NULL;
469     }
470 
471   if (TYPE_STUB_DECL (type))
472     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
473 
474   /* Lazily create pointer and reference types.  */
475   TYPE_POINTER_TO (new_tree) = NULL;
476   TYPE_REFERENCE_TO (new_tree) = NULL;
477 
478   /* Copy all types that may contain references to local variables; be sure to
479      preserve sharing in between type and its main variant when possible.  */
480   switch (TREE_CODE (new_tree))
481     {
482     case INTEGER_TYPE:
483     case REAL_TYPE:
484     case FIXED_POINT_TYPE:
485     case ENUMERAL_TYPE:
486     case BOOLEAN_TYPE:
487       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
488 	{
489 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
490 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
491 
492 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
493 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
494 	}
495       else
496 	{
497 	  t = TYPE_MIN_VALUE (new_tree);
498 	  if (t && TREE_CODE (t) != INTEGER_CST)
499 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
500 
501 	  t = TYPE_MAX_VALUE (new_tree);
502 	  if (t && TREE_CODE (t) != INTEGER_CST)
503 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
504 	}
505       return new_tree;
506 
507     case FUNCTION_TYPE:
508       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
509 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
510 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
511       else
512         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
513       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
514 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
515 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
516       else
517         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
518       return new_tree;
519 
520     case ARRAY_TYPE:
521       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
523 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
524       else
525 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
526 
527       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
528 	{
529 	  gcc_checking_assert (TYPE_DOMAIN (type)
530 			       == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
531 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
532 	}
533       else
534         {
535 	  TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
536 	  /* For array bounds where we have decided not to copy over the bounds
537 	     variable which isn't used in OpenMP/OpenACC region, change them to
538 	     an uninitialized VAR_DECL temporary.  */
539 	  if (id->adjust_array_error_bounds
540 	      && TYPE_DOMAIN (new_tree)
541 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
542 	      && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
543 	    {
544 	      tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
545 	      DECL_ATTRIBUTES (v)
546 		= tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
547 			     DECL_ATTRIBUTES (v));
548 	      TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
549 	    }
550         }
551       break;
552 
553     case RECORD_TYPE:
554     case UNION_TYPE:
555     case QUAL_UNION_TYPE:
556       if (TYPE_MAIN_VARIANT (type) != type
557 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
558 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
559       else
560 	{
561 	  tree f, nf = NULL;
562 
563 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
564 	    {
565 	      t = remap_decl (f, id);
566 	      DECL_CONTEXT (t) = new_tree;
567 	      DECL_CHAIN (t) = nf;
568 	      nf = t;
569 	    }
570 	  TYPE_FIELDS (new_tree) = nreverse (nf);
571 	}
572       break;
573 
574     case OFFSET_TYPE:
575     default:
576       /* Shouldn't have been thought variable sized.  */
577       gcc_unreachable ();
578     }
579 
580   /* All variants of type share the same size, so use the already remaped data.  */
581   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
582     {
583       tree s = TYPE_SIZE (type);
584       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
585       tree su = TYPE_SIZE_UNIT (type);
586       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
587       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
588 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
589 			   || s == mvs);
590       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
591 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
592 			   || su == mvsu);
593       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
594       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
595     }
596   else
597     {
598       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
599       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
600     }
601 
602   return new_tree;
603 }
604 
605 /* Helper function for remap_type_2, called through walk_tree.  */
606 
607 static tree
remap_type_3(tree * tp,int * walk_subtrees,void * data)608 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
609 {
610   copy_body_data *id = (copy_body_data *) data;
611 
612   if (TYPE_P (*tp))
613     *walk_subtrees = 0;
614 
615   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
616     return *tp;
617 
618   return NULL_TREE;
619 }
620 
621 /* Return true if TYPE needs to be remapped because remap_decl on any
622    needed embedded decl returns something other than that decl.  */
623 
624 static bool
remap_type_2(tree type,copy_body_data * id)625 remap_type_2 (tree type, copy_body_data *id)
626 {
627   tree t;
628 
629 #define RETURN_TRUE_IF_VAR(T) \
630   do								\
631     {								\
632       tree _t = (T);						\
633       if (_t)							\
634 	{							\
635 	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
636 	    return true;					\
637 	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
638 	      && walk_tree (&_t, remap_type_3, id, NULL))	\
639 	    return true;					\
640 	}							\
641     }								\
642   while (0)
643 
644   switch (TREE_CODE (type))
645     {
646     case POINTER_TYPE:
647     case REFERENCE_TYPE:
648     case FUNCTION_TYPE:
649     case METHOD_TYPE:
650       return remap_type_2 (TREE_TYPE (type), id);
651 
652     case INTEGER_TYPE:
653     case REAL_TYPE:
654     case FIXED_POINT_TYPE:
655     case ENUMERAL_TYPE:
656     case BOOLEAN_TYPE:
657       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
658       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
659       return false;
660 
661     case ARRAY_TYPE:
662       if (remap_type_2 (TREE_TYPE (type), id)
663 	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
664 	return true;
665       break;
666 
667     case RECORD_TYPE:
668     case UNION_TYPE:
669     case QUAL_UNION_TYPE:
670       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
671 	if (TREE_CODE (t) == FIELD_DECL)
672 	  {
673 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
674 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
675 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
676 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
677 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
678 	  }
679       break;
680 
681     default:
682       return false;
683     }
684 
685   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
686   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
687   return false;
688 #undef RETURN_TRUE_IF_VAR
689 }
690 
691 tree
remap_type(tree type,copy_body_data * id)692 remap_type (tree type, copy_body_data *id)
693 {
694   tree *node;
695   tree tmp;
696 
697   if (type == NULL)
698     return type;
699 
700   /* See if we have remapped this type.  */
701   node = id->decl_map->get (type);
702   if (node)
703     return *node;
704 
705   /* The type only needs remapping if it's variably modified.  */
706   if (! variably_modified_type_p (type, id->src_fn)
707       /* Don't remap if copy_decl method doesn't always return a new
708 	 decl and for all embedded decls returns the passed in decl.  */
709       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
710     {
711       insert_decl_map (id, type, type);
712       return type;
713     }
714 
715   id->remapping_type_depth++;
716   tmp = remap_type_1 (type, id);
717   id->remapping_type_depth--;
718 
719   return tmp;
720 }
721 
722 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
723 
724 static bool
can_be_nonlocal(tree decl,copy_body_data * id)725 can_be_nonlocal (tree decl, copy_body_data *id)
726 {
727   /* We cannot duplicate function decls.  */
728   if (TREE_CODE (decl) == FUNCTION_DECL)
729     return true;
730 
731   /* Local static vars must be non-local or we get multiple declaration
732      problems.  */
733   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
734     return true;
735 
736   return false;
737 }
738 
739 static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)740 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
741 	     copy_body_data *id)
742 {
743   tree old_var;
744   tree new_decls = NULL_TREE;
745 
746   /* Remap its variables.  */
747   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
748     {
749       tree new_var;
750 
751       if (can_be_nonlocal (old_var, id))
752 	{
753 	  /* We need to add this variable to the local decls as otherwise
754 	     nothing else will do so.  */
755 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
756 	    add_local_decl (cfun, old_var);
757 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
758 	      && !DECL_IGNORED_P (old_var)
759 	      && nonlocalized_list)
760 	    vec_safe_push (*nonlocalized_list, old_var);
761 	  continue;
762 	}
763 
764       /* Remap the variable.  */
765       new_var = remap_decl (old_var, id);
766 
767       /* If we didn't remap this variable, we can't mess with its
768 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
769 	 already declared somewhere else, so don't declare it here.  */
770 
771       if (new_var == id->retvar)
772 	;
773       else if (!new_var)
774         {
775 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
776 	      && !DECL_IGNORED_P (old_var)
777 	      && nonlocalized_list)
778 	    vec_safe_push (*nonlocalized_list, old_var);
779 	}
780       else
781 	{
782 	  gcc_assert (DECL_P (new_var));
783 	  DECL_CHAIN (new_var) = new_decls;
784 	  new_decls = new_var;
785 
786 	  /* Also copy value-expressions.  */
787 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
788 	    {
789 	      tree tem = DECL_VALUE_EXPR (new_var);
790 	      bool old_regimplify = id->regimplify;
791 	      id->remapping_type_depth++;
792 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
793 	      id->remapping_type_depth--;
794 	      id->regimplify = old_regimplify;
795 	      SET_DECL_VALUE_EXPR (new_var, tem);
796 	    }
797 	}
798     }
799 
800   return nreverse (new_decls);
801 }
802 
803 /* Copy the BLOCK to contain remapped versions of the variables
804    therein.  And hook the new block into the block-tree.  */
805 
806 static void
remap_block(tree * block,copy_body_data * id)807 remap_block (tree *block, copy_body_data *id)
808 {
809   tree old_block;
810   tree new_block;
811 
812   /* Make the new block.  */
813   old_block = *block;
814   new_block = make_node (BLOCK);
815   TREE_USED (new_block) = TREE_USED (old_block);
816   BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
817   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
818   BLOCK_NONLOCALIZED_VARS (new_block)
819     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
820   *block = new_block;
821 
822   /* Remap its variables.  */
823   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
824   					&BLOCK_NONLOCALIZED_VARS (new_block),
825 					id);
826 
827   /* Remember the remapped block.  */
828   insert_decl_map (id, old_block, new_block);
829 }
830 
831 /* Copy the whole block tree and root it in id->block.  */
832 
833 static tree
remap_blocks(tree block,copy_body_data * id)834 remap_blocks (tree block, copy_body_data *id)
835 {
836   tree t;
837   tree new_tree = block;
838 
839   if (!block)
840     return NULL;
841 
842   remap_block (&new_tree, id);
843   gcc_assert (new_tree != block);
844   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
845     prepend_lexical_block (new_tree, remap_blocks (t, id));
846   /* Blocks are in arbitrary order, but make things slightly prettier and do
847      not swap order when producing a copy.  */
848   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
849   return new_tree;
850 }
851 
852 /* Remap the block tree rooted at BLOCK to nothing.  */
853 
854 static void
remap_blocks_to_null(tree block,copy_body_data * id)855 remap_blocks_to_null (tree block, copy_body_data *id)
856 {
857   tree t;
858   insert_decl_map (id, block, NULL_TREE);
859   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
860     remap_blocks_to_null (t, id);
861 }
862 
863 /* Remap the location info pointed to by LOCUS.  */
864 
865 static location_t
remap_location(location_t locus,copy_body_data * id)866 remap_location (location_t locus, copy_body_data *id)
867 {
868   if (LOCATION_BLOCK (locus))
869     {
870       tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
871       gcc_assert (n);
872       if (*n)
873 	return set_block (locus, *n);
874     }
875 
876   locus = LOCATION_LOCUS (locus);
877 
878   if (locus != UNKNOWN_LOCATION && id->block)
879     return set_block (locus, id->block);
880 
881   return locus;
882 }
883 
884 static void
copy_statement_list(tree * tp)885 copy_statement_list (tree *tp)
886 {
887   tree_stmt_iterator oi, ni;
888   tree new_tree;
889 
890   new_tree = alloc_stmt_list ();
891   ni = tsi_start (new_tree);
892   oi = tsi_start (*tp);
893   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
894   *tp = new_tree;
895 
896   for (; !tsi_end_p (oi); tsi_next (&oi))
897     {
898       tree stmt = tsi_stmt (oi);
899       if (TREE_CODE (stmt) == STATEMENT_LIST)
900 	/* This copy is not redundant; tsi_link_after will smash this
901 	   STATEMENT_LIST into the end of the one we're building, and we
902 	   don't want to do that with the original.  */
903 	copy_statement_list (&stmt);
904       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
905     }
906 }
907 
908 static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)909 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
910 {
911   tree block = BIND_EXPR_BLOCK (*tp);
912   /* Copy (and replace) the statement.  */
913   copy_tree_r (tp, walk_subtrees, NULL);
914   if (block)
915     {
916       remap_block (&block, id);
917       BIND_EXPR_BLOCK (*tp) = block;
918     }
919 
920   if (BIND_EXPR_VARS (*tp))
921     /* This will remap a lot of the same decls again, but this should be
922        harmless.  */
923     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
924 }
925 
926 
927 /* Create a new gimple_seq by remapping all the statements in BODY
928    using the inlining information in ID.  */
929 
930 static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)931 remap_gimple_seq (gimple_seq body, copy_body_data *id)
932 {
933   gimple_stmt_iterator si;
934   gimple_seq new_body = NULL;
935 
936   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
937     {
938       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
939       gimple_seq_add_seq (&new_body, new_stmts);
940     }
941 
942   return new_body;
943 }
944 
945 
946 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
947    block using the mapping information in ID.  */
948 
949 static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)950 copy_gimple_bind (gbind *stmt, copy_body_data *id)
951 {
952   gimple *new_bind;
953   tree new_block, new_vars;
954   gimple_seq body, new_body;
955 
956   /* Copy the statement.  Note that we purposely don't use copy_stmt
957      here because we need to remap statements as we copy.  */
958   body = gimple_bind_body (stmt);
959   new_body = remap_gimple_seq (body, id);
960 
961   new_block = gimple_bind_block (stmt);
962   if (new_block)
963     remap_block (&new_block, id);
964 
965   /* This will remap a lot of the same decls again, but this should be
966      harmless.  */
967   new_vars = gimple_bind_vars (stmt);
968   if (new_vars)
969     new_vars = remap_decls (new_vars, NULL, id);
970 
971   new_bind = gimple_build_bind (new_vars, new_body, new_block);
972 
973   return new_bind;
974 }
975 
976 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
977 
978 static bool
is_parm(tree decl)979 is_parm (tree decl)
980 {
981   if (TREE_CODE (decl) == SSA_NAME)
982     {
983       decl = SSA_NAME_VAR (decl);
984       if (!decl)
985 	return false;
986     }
987 
988   return (TREE_CODE (decl) == PARM_DECL);
989 }
990 
991 /* Remap the dependence CLIQUE from the source to the destination function
992    as specified in ID.  */
993 
994 static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)995 remap_dependence_clique (copy_body_data *id, unsigned short clique)
996 {
997   if (clique == 0 || processing_debug_stmt)
998     return 0;
999   if (!id->dependence_map)
1000     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1001   bool existed;
1002   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1003   if (!existed)
1004     {
1005       /* Clique 1 is reserved for local ones set by PTA.  */
1006       if (cfun->last_clique == 0)
1007 	cfun->last_clique = 1;
1008       newc = ++cfun->last_clique;
1009     }
1010   return newc;
1011 }
1012 
1013 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
1014    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
1015    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1016    recursing into the children nodes of *TP.  */
1017 
1018 static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)1019 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1020 {
1021   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1022   copy_body_data *id = (copy_body_data *) wi_p->info;
1023   tree fn = id->src_fn;
1024 
1025   /* For recursive invocations this is no longer the LHS itself.  */
1026   bool is_lhs = wi_p->is_lhs;
1027   wi_p->is_lhs = false;
1028 
1029   if (TREE_CODE (*tp) == SSA_NAME)
1030     {
1031       *tp = remap_ssa_name (*tp, id);
1032       *walk_subtrees = 0;
1033       if (is_lhs)
1034 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1035       return NULL;
1036     }
1037   else if (auto_var_in_fn_p (*tp, fn))
1038     {
1039       /* Local variables and labels need to be replaced by equivalent
1040 	 variables.  We don't want to copy static variables; there's
1041 	 only one of those, no matter how many times we inline the
1042 	 containing function.  Similarly for globals from an outer
1043 	 function.  */
1044       tree new_decl;
1045 
1046       /* Remap the declaration.  */
1047       new_decl = remap_decl (*tp, id);
1048       gcc_assert (new_decl);
1049       /* Replace this variable with the copy.  */
1050       STRIP_TYPE_NOPS (new_decl);
1051       /* ???  The C++ frontend uses void * pointer zero to initialize
1052          any other type.  This confuses the middle-end type verification.
1053 	 As cloned bodies do not go through gimplification again the fixup
1054 	 there doesn't trigger.  */
1055       if (TREE_CODE (new_decl) == INTEGER_CST
1056 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1057 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1058       *tp = new_decl;
1059       *walk_subtrees = 0;
1060     }
1061   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1062     gcc_unreachable ();
1063   else if (TREE_CODE (*tp) == SAVE_EXPR)
1064     gcc_unreachable ();
1065   else if (TREE_CODE (*tp) == LABEL_DECL
1066 	   && (!DECL_CONTEXT (*tp)
1067 	       || decl_function_context (*tp) == id->src_fn))
1068     /* These may need to be remapped for EH handling.  */
1069     *tp = remap_decl (*tp, id);
1070   else if (TREE_CODE (*tp) == FIELD_DECL)
1071     {
1072       /* If the enclosing record type is variably_modified_type_p, the field
1073 	 has already been remapped.  Otherwise, it need not be.  */
1074       tree *n = id->decl_map->get (*tp);
1075       if (n)
1076 	*tp = *n;
1077       *walk_subtrees = 0;
1078     }
1079   else if (TYPE_P (*tp))
1080     /* Types may need remapping as well.  */
1081     *tp = remap_type (*tp, id);
1082   else if (CONSTANT_CLASS_P (*tp))
1083     {
1084       /* If this is a constant, we have to copy the node iff the type
1085 	 will be remapped.  copy_tree_r will not copy a constant.  */
1086       tree new_type = remap_type (TREE_TYPE (*tp), id);
1087 
1088       if (new_type == TREE_TYPE (*tp))
1089 	*walk_subtrees = 0;
1090 
1091       else if (TREE_CODE (*tp) == INTEGER_CST)
1092 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1093       else
1094 	{
1095 	  *tp = copy_node (*tp);
1096 	  TREE_TYPE (*tp) = new_type;
1097 	}
1098     }
1099   else
1100     {
1101       /* Otherwise, just copy the node.  Note that copy_tree_r already
1102 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1103 
1104       if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1105 	{
1106 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1107 	     that can happen when a pointer argument is an ADDR_EXPR.
1108 	     Recurse here manually to allow that.  */
1109 	  tree ptr = TREE_OPERAND (*tp, 0);
1110 	  tree type = remap_type (TREE_TYPE (*tp), id);
1111 	  tree old = *tp;
1112 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1113 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1114 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1115 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1116 	  copy_warning (*tp, old);
1117 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1118 	    {
1119 	      MR_DEPENDENCE_CLIQUE (*tp)
1120 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1121 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1122 	    }
1123 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1124 	     remapped a parameter as the property might be valid only
1125 	     for the parameter itself.  */
1126 	  if (TREE_THIS_NOTRAP (old)
1127 	      && (!is_parm (TREE_OPERAND (old, 0))
1128 		  || (!id->transform_parameter && is_parm (ptr))))
1129 	    TREE_THIS_NOTRAP (*tp) = 1;
1130 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1131 	  *walk_subtrees = 0;
1132 	  return NULL;
1133 	}
1134 
1135       /* Here is the "usual case".  Copy this tree node, and then
1136 	 tweak some special cases.  */
1137       copy_tree_r (tp, walk_subtrees, NULL);
1138 
1139       if (TREE_CODE (*tp) != OMP_CLAUSE)
1140 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1141 
1142       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1143 	{
1144 	  /* The copied TARGET_EXPR has never been expanded, even if the
1145 	     original node was expanded already.  */
1146 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1147 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1148 	}
1149       else if (TREE_CODE (*tp) == ADDR_EXPR)
1150 	{
1151 	  /* Variable substitution need not be simple.  In particular,
1152 	     the MEM_REF substitution above.  Make sure that
1153 	     TREE_CONSTANT and friends are up-to-date.  */
1154 	  int invariant = is_gimple_min_invariant (*tp);
1155 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1156 	  recompute_tree_invariant_for_addr_expr (*tp);
1157 
1158 	  /* If this used to be invariant, but is not any longer,
1159 	     then regimplification is probably needed.  */
1160 	  if (invariant && !is_gimple_min_invariant (*tp))
1161 	    id->regimplify = true;
1162 
1163 	  *walk_subtrees = 0;
1164 	}
1165     }
1166 
1167   /* Update the TREE_BLOCK for the cloned expr.  */
1168   if (EXPR_P (*tp))
1169     {
1170       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1171       tree old_block = TREE_BLOCK (*tp);
1172       if (old_block)
1173 	{
1174 	  tree *n;
1175 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1176 	  if (n)
1177 	    new_block = *n;
1178 	}
1179       TREE_SET_BLOCK (*tp, new_block);
1180     }
1181 
1182   /* Keep iterating.  */
1183   return NULL_TREE;
1184 }
1185 
1186 
1187 /* Called from copy_body_id via walk_tree.  DATA is really a
1188    `copy_body_data *'.  */
1189 
1190 tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)1191 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1192 {
1193   copy_body_data *id = (copy_body_data *) data;
1194   tree fn = id->src_fn;
1195   tree new_block;
1196 
1197   /* Begin by recognizing trees that we'll completely rewrite for the
1198      inlining context.  Our output for these trees is completely
1199      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1200      into an edge).  Further down, we'll handle trees that get
1201      duplicated and/or tweaked.  */
1202 
1203   /* When requested, RETURN_EXPRs should be transformed to just the
1204      contained MODIFY_EXPR.  The branch semantics of the return will
1205      be handled elsewhere by manipulating the CFG rather than a statement.  */
1206   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1207     {
1208       tree assignment = TREE_OPERAND (*tp, 0);
1209 
1210       /* If we're returning something, just turn that into an
1211 	 assignment into the equivalent of the original RESULT_DECL.
1212 	 If the "assignment" is just the result decl, the result
1213 	 decl has already been set (e.g. a recent "foo (&result_decl,
1214 	 ...)"); just toss the entire RETURN_EXPR.  */
1215       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1216 	{
1217 	  /* Replace the RETURN_EXPR with (a copy of) the
1218 	     MODIFY_EXPR hanging underneath.  */
1219 	  *tp = copy_node (assignment);
1220 	}
1221       else /* Else the RETURN_EXPR returns no value.  */
1222 	{
1223 	  *tp = NULL;
1224 	  return (tree) (void *)1;
1225 	}
1226     }
1227   else if (TREE_CODE (*tp) == SSA_NAME)
1228     {
1229       *tp = remap_ssa_name (*tp, id);
1230       *walk_subtrees = 0;
1231       return NULL;
1232     }
1233 
1234   /* Local variables and labels need to be replaced by equivalent
1235      variables.  We don't want to copy static variables; there's only
1236      one of those, no matter how many times we inline the containing
1237      function.  Similarly for globals from an outer function.  */
1238   else if (auto_var_in_fn_p (*tp, fn))
1239     {
1240       tree new_decl;
1241 
1242       /* Remap the declaration.  */
1243       new_decl = remap_decl (*tp, id);
1244       gcc_assert (new_decl);
1245       /* Replace this variable with the copy.  */
1246       STRIP_TYPE_NOPS (new_decl);
1247       *tp = new_decl;
1248       *walk_subtrees = 0;
1249     }
1250   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1251     copy_statement_list (tp);
1252   else if (TREE_CODE (*tp) == SAVE_EXPR
1253 	   || TREE_CODE (*tp) == TARGET_EXPR)
1254     remap_save_expr (tp, id->decl_map, walk_subtrees);
1255   else if (TREE_CODE (*tp) == LABEL_DECL
1256 	   && (! DECL_CONTEXT (*tp)
1257 	       || decl_function_context (*tp) == id->src_fn))
1258     /* These may need to be remapped for EH handling.  */
1259     *tp = remap_decl (*tp, id);
1260   else if (TREE_CODE (*tp) == BIND_EXPR)
1261     copy_bind_expr (tp, walk_subtrees, id);
1262   /* Types may need remapping as well.  */
1263   else if (TYPE_P (*tp))
1264     *tp = remap_type (*tp, id);
1265 
1266   /* If this is a constant, we have to copy the node iff the type will be
1267      remapped.  copy_tree_r will not copy a constant.  */
1268   else if (CONSTANT_CLASS_P (*tp))
1269     {
1270       tree new_type = remap_type (TREE_TYPE (*tp), id);
1271 
1272       if (new_type == TREE_TYPE (*tp))
1273 	*walk_subtrees = 0;
1274 
1275       else if (TREE_CODE (*tp) == INTEGER_CST)
1276 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1277       else
1278 	{
1279 	  *tp = copy_node (*tp);
1280 	  TREE_TYPE (*tp) = new_type;
1281 	}
1282     }
1283 
1284   /* Otherwise, just copy the node.  Note that copy_tree_r already
1285      knows not to copy VAR_DECLs, etc., so this is safe.  */
1286   else
1287     {
1288       /* Here we handle trees that are not completely rewritten.
1289 	 First we detect some inlining-induced bogosities for
1290 	 discarding.  */
1291       if (TREE_CODE (*tp) == MODIFY_EXPR
1292 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1293 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1294 	{
1295 	  /* Some assignments VAR = VAR; don't generate any rtl code
1296 	     and thus don't count as variable modification.  Avoid
1297 	     keeping bogosities like 0 = 0.  */
1298 	  tree decl = TREE_OPERAND (*tp, 0), value;
1299 	  tree *n;
1300 
1301 	  n = id->decl_map->get (decl);
1302 	  if (n)
1303 	    {
1304 	      value = *n;
1305 	      STRIP_TYPE_NOPS (value);
1306 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1307 		{
1308 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1309 		  return copy_tree_body_r (tp, walk_subtrees, data);
1310 		}
1311 	    }
1312 	}
1313       else if (TREE_CODE (*tp) == INDIRECT_REF)
1314 	{
1315 	  /* Get rid of *& from inline substitutions that can happen when a
1316 	     pointer argument is an ADDR_EXPR.  */
1317 	  tree decl = TREE_OPERAND (*tp, 0);
1318 	  tree *n = id->decl_map->get (decl);
1319 	  if (n)
1320 	    {
1321 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1322 	         it manually here as we'll eventually get ADDR_EXPRs
1323 		 which lie about their types pointed to.  In this case
1324 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1325 		 but we absolutely rely on that.  As fold_indirect_ref
1326 	         does other useful transformations, try that first, though.  */
1327 	      tree type = TREE_TYPE (*tp);
1328 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1329 	      tree old = *tp;
1330 	      *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1331 	      if (! *tp)
1332 	        {
1333 		  type = remap_type (type, id);
1334 		  if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1335 		    {
1336 		      *tp
1337 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1338 		      /* ???  We should either assert here or build
1339 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1340 			 incompatible types to our IL.  */
1341 		      if (! *tp)
1342 			*tp = TREE_OPERAND (ptr, 0);
1343 		    }
1344 	          else
1345 		    {
1346 	              *tp = build1 (INDIRECT_REF, type, ptr);
1347 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1348 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1349 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1350 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1351 			 have remapped a parameter as the property might be
1352 			 valid only for the parameter itself.  */
1353 		      if (TREE_THIS_NOTRAP (old)
1354 			  && (!is_parm (TREE_OPERAND (old, 0))
1355 			      || (!id->transform_parameter && is_parm (ptr))))
1356 		        TREE_THIS_NOTRAP (*tp) = 1;
1357 		    }
1358 		}
1359 	      *walk_subtrees = 0;
1360 	      return NULL;
1361 	    }
1362 	}
1363       else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1364 	{
1365 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1366 	     that can happen when a pointer argument is an ADDR_EXPR.
1367 	     Recurse here manually to allow that.  */
1368 	  tree ptr = TREE_OPERAND (*tp, 0);
1369 	  tree type = remap_type (TREE_TYPE (*tp), id);
1370 	  tree old = *tp;
1371 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1372 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1373 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1374 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1375 	  copy_warning (*tp, old);
1376 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1377 	    {
1378 	      MR_DEPENDENCE_CLIQUE (*tp)
1379 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1380 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1381 	    }
1382 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1383 	     remapped a parameter as the property might be valid only
1384 	     for the parameter itself.  */
1385 	  if (TREE_THIS_NOTRAP (old)
1386 	      && (!is_parm (TREE_OPERAND (old, 0))
1387 		  || (!id->transform_parameter && is_parm (ptr))))
1388 	    TREE_THIS_NOTRAP (*tp) = 1;
1389 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1390 	  *walk_subtrees = 0;
1391 	  return NULL;
1392 	}
1393 
1394       /* Here is the "usual case".  Copy this tree node, and then
1395 	 tweak some special cases.  */
1396       copy_tree_r (tp, walk_subtrees, NULL);
1397 
1398       /* If EXPR has block defined, map it to newly constructed block.
1399          When inlining we want EXPRs without block appear in the block
1400 	 of function call if we are not remapping a type.  */
1401       if (EXPR_P (*tp))
1402 	{
1403 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1404 	  if (TREE_BLOCK (*tp))
1405 	    {
1406 	      tree *n;
1407 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1408 	      if (n)
1409 		new_block = *n;
1410 	    }
1411 	  TREE_SET_BLOCK (*tp, new_block);
1412 	}
1413 
1414       if (TREE_CODE (*tp) != OMP_CLAUSE)
1415 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1416 
1417       /* The copied TARGET_EXPR has never been expanded, even if the
1418 	 original node was expanded already.  */
1419       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1420 	{
1421 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1422 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1423 	}
1424 
1425       /* Variable substitution need not be simple.  In particular, the
1426 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1427 	 and friends are up-to-date.  */
1428       else if (TREE_CODE (*tp) == ADDR_EXPR)
1429 	{
1430 	  int invariant = is_gimple_min_invariant (*tp);
1431 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1432 
1433 	  /* Handle the case where we substituted an INDIRECT_REF
1434 	     into the operand of the ADDR_EXPR.  */
1435 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1436 	      && !id->do_not_fold)
1437 	    {
1438 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1439 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1440 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1441 	      *tp = t;
1442 	    }
1443 	  else
1444 	    recompute_tree_invariant_for_addr_expr (*tp);
1445 
1446 	  /* If this used to be invariant, but is not any longer,
1447 	     then regimplification is probably needed.  */
1448 	  if (invariant && !is_gimple_min_invariant (*tp))
1449 	    id->regimplify = true;
1450 
1451 	  *walk_subtrees = 0;
1452 	}
1453       else if (TREE_CODE (*tp) == OMP_CLAUSE
1454 	       && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1455 		   || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1456 	{
1457 	  tree t = OMP_CLAUSE_DECL (*tp);
1458 	  if (t
1459 	      && TREE_CODE (t) == TREE_LIST
1460 	      && TREE_PURPOSE (t)
1461 	      && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
1462 	    {
1463 	      *walk_subtrees = 0;
1464 	      OMP_CLAUSE_DECL (*tp) = copy_node (t);
1465 	      t = OMP_CLAUSE_DECL (*tp);
1466 	      TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1467 	      for (int i = 0; i <= 4; i++)
1468 		walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1469 			   copy_tree_body_r, id, NULL);
1470 	      if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1471 		remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1472 	      walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1473 	    }
1474 	}
1475     }
1476 
1477   /* Keep iterating.  */
1478   return NULL_TREE;
1479 }
1480 
1481 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1482    source function, map that to the duplicate EH region number in
1483    the destination function.  */
1484 
1485 static int
remap_eh_region_nr(int old_nr,copy_body_data * id)1486 remap_eh_region_nr (int old_nr, copy_body_data *id)
1487 {
1488   eh_region old_r, new_r;
1489 
1490   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1491   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1492 
1493   return new_r->index;
1494 }
1495 
1496 /* Similar, but operate on INTEGER_CSTs.  */
1497 
1498 static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)1499 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1500 {
1501   int old_nr, new_nr;
1502 
1503   old_nr = tree_to_shwi (old_t_nr);
1504   new_nr = remap_eh_region_nr (old_nr, id);
1505 
1506   return build_int_cst (integer_type_node, new_nr);
1507 }
1508 
1509 /* Helper for copy_bb.  Remap statement STMT using the inlining
1510    information in ID.  Return the new statement copy.  */
1511 
1512 static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)1513 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1514 {
1515   gimple *copy = NULL;
1516   struct walk_stmt_info wi;
1517   bool skip_first = false;
1518   gimple_seq stmts = NULL;
1519 
1520   if (is_gimple_debug (stmt)
1521       && (gimple_debug_nonbind_marker_p (stmt)
1522 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1523 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1524     return NULL;
1525 
1526   if (!is_gimple_debug (stmt)
1527       && id->param_body_adjs
1528       && id->param_body_adjs->m_dead_stmts.contains (stmt))
1529     {
1530       tree *dval = id->param_body_adjs->m_dead_stmt_debug_equiv.get (stmt);
1531       if (!dval)
1532 	return NULL;
1533 
1534       gcc_assert (is_gimple_assign (stmt));
1535       tree lhs = gimple_assign_lhs (stmt);
1536       tree *dvar = id->param_body_adjs->m_dead_ssa_debug_equiv.get (lhs);
1537       gdebug *bind = gimple_build_debug_bind (*dvar, *dval, stmt);
1538       if (id->reset_location)
1539 	gimple_set_location (bind, input_location);
1540       id->debug_stmts.safe_push (bind);
1541       gimple_seq_add_stmt (&stmts, bind);
1542       return stmts;
1543     }
1544 
1545   /* Begin by recognizing trees that we'll completely rewrite for the
1546      inlining context.  Our output for these trees is completely
1547      different from our input (e.g. RETURN_EXPR is deleted and morphs
1548      into an edge).  Further down, we'll handle trees that get
1549      duplicated and/or tweaked.  */
1550 
1551   /* When requested, GIMPLE_RETURN should be transformed to just the
1552      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1553      be handled elsewhere by manipulating the CFG rather than the
1554      statement.  */
1555   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1556     {
1557       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1558 
1559       /* If we're returning something, just turn that into an
1560 	 assignment to the equivalent of the original RESULT_DECL.
1561 	 If RETVAL is just the result decl, the result decl has
1562 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1563 	 just toss the entire GIMPLE_RETURN.  Likewise for when the
1564 	 call doesn't want the return value.  */
1565       if (retval
1566 	  && (TREE_CODE (retval) != RESULT_DECL
1567 	      && (!id->call_stmt
1568 		  || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1569 	      && (TREE_CODE (retval) != SSA_NAME
1570 		  || ! SSA_NAME_VAR (retval)
1571 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1572         {
1573 	  copy = gimple_build_assign (id->do_not_unshare
1574 				      ? id->retvar : unshare_expr (id->retvar),
1575 				      retval);
1576 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1577 	  skip_first = true;
1578 	}
1579       else
1580 	return NULL;
1581     }
1582   else if (gimple_has_substatements (stmt))
1583     {
1584       gimple_seq s1, s2;
1585 
1586       /* When cloning bodies from the C++ front end, we will be handed bodies
1587 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1588 	 have embedded statements.  */
1589       switch (gimple_code (stmt))
1590 	{
1591 	case GIMPLE_BIND:
1592 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1593 	  break;
1594 
1595 	case GIMPLE_CATCH:
1596 	  {
1597 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1598 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1599 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1600 	  }
1601 	  break;
1602 
1603 	case GIMPLE_EH_FILTER:
1604 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1605 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1606 	  break;
1607 
1608 	case GIMPLE_TRY:
1609 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1610 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1611 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1612 	  break;
1613 
1614 	case GIMPLE_WITH_CLEANUP_EXPR:
1615 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1616 	  copy = gimple_build_wce (s1);
1617 	  break;
1618 
1619 	case GIMPLE_OMP_PARALLEL:
1620 	  {
1621 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1622 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1623 	    copy = gimple_build_omp_parallel
1624 	             (s1,
1625 		      gimple_omp_parallel_clauses (omp_par_stmt),
1626 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1627 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1628 	  }
1629 	  break;
1630 
1631 	case GIMPLE_OMP_TASK:
1632 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1633 	  copy = gimple_build_omp_task
1634 	           (s1,
1635 		    gimple_omp_task_clauses (stmt),
1636 		    gimple_omp_task_child_fn (stmt),
1637 		    gimple_omp_task_data_arg (stmt),
1638 		    gimple_omp_task_copy_fn (stmt),
1639 		    gimple_omp_task_arg_size (stmt),
1640 		    gimple_omp_task_arg_align (stmt));
1641 	  break;
1642 
1643 	case GIMPLE_OMP_FOR:
1644 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1645 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1646 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1647 				       gimple_omp_for_clauses (stmt),
1648 				       gimple_omp_for_collapse (stmt), s2);
1649 	  {
1650 	    size_t i;
1651 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1652 	      {
1653 		gimple_omp_for_set_index (copy, i,
1654 					  gimple_omp_for_index (stmt, i));
1655 		gimple_omp_for_set_initial (copy, i,
1656 					    gimple_omp_for_initial (stmt, i));
1657 		gimple_omp_for_set_final (copy, i,
1658 					  gimple_omp_for_final (stmt, i));
1659 		gimple_omp_for_set_incr (copy, i,
1660 					 gimple_omp_for_incr (stmt, i));
1661 		gimple_omp_for_set_cond (copy, i,
1662 					 gimple_omp_for_cond (stmt, i));
1663 	      }
1664 	  }
1665 	  break;
1666 
1667 	case GIMPLE_OMP_MASTER:
1668 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1669 	  copy = gimple_build_omp_master (s1);
1670 	  break;
1671 
1672 	case GIMPLE_OMP_MASKED:
1673 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1674 	  copy = gimple_build_omp_masked
1675 		   (s1, gimple_omp_masked_clauses (stmt));
1676 	  break;
1677 
1678 	case GIMPLE_OMP_SCOPE:
1679 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1680 	  copy = gimple_build_omp_scope
1681 		   (s1, gimple_omp_scope_clauses (stmt));
1682 	  break;
1683 
1684 	case GIMPLE_OMP_TASKGROUP:
1685 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1686 	  copy = gimple_build_omp_taskgroup
1687 		   (s1, gimple_omp_taskgroup_clauses (stmt));
1688 	  break;
1689 
1690 	case GIMPLE_OMP_ORDERED:
1691 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1692 	  copy = gimple_build_omp_ordered
1693 		   (s1,
1694 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1695 	  break;
1696 
1697 	case GIMPLE_OMP_SCAN:
1698 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1699 	  copy = gimple_build_omp_scan
1700 		   (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1701 	  break;
1702 
1703 	case GIMPLE_OMP_SECTION:
1704 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1705 	  copy = gimple_build_omp_section (s1);
1706 	  break;
1707 
1708 	case GIMPLE_OMP_SECTIONS:
1709 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1710 	  copy = gimple_build_omp_sections
1711 	           (s1, gimple_omp_sections_clauses (stmt));
1712 	  break;
1713 
1714 	case GIMPLE_OMP_SINGLE:
1715 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1716 	  copy = gimple_build_omp_single
1717 	           (s1, gimple_omp_single_clauses (stmt));
1718 	  break;
1719 
1720 	case GIMPLE_OMP_TARGET:
1721 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1722 	  copy = gimple_build_omp_target
1723 		   (s1, gimple_omp_target_kind (stmt),
1724 		    gimple_omp_target_clauses (stmt));
1725 	  break;
1726 
1727 	case GIMPLE_OMP_TEAMS:
1728 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1729 	  copy = gimple_build_omp_teams
1730 		   (s1, gimple_omp_teams_clauses (stmt));
1731 	  break;
1732 
1733 	case GIMPLE_OMP_CRITICAL:
1734 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1735 	  copy = gimple_build_omp_critical (s1,
1736 					    gimple_omp_critical_name
1737 					      (as_a <gomp_critical *> (stmt)),
1738 					    gimple_omp_critical_clauses
1739 					      (as_a <gomp_critical *> (stmt)));
1740 	  break;
1741 
1742 	case GIMPLE_TRANSACTION:
1743 	  {
1744 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1745 	    gtransaction *new_trans_stmt;
1746 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1747 				   id);
1748 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1749 	    gimple_transaction_set_subcode (new_trans_stmt,
1750 	      gimple_transaction_subcode (old_trans_stmt));
1751 	    gimple_transaction_set_label_norm (new_trans_stmt,
1752 	      gimple_transaction_label_norm (old_trans_stmt));
1753 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1754 	      gimple_transaction_label_uninst (old_trans_stmt));
1755 	    gimple_transaction_set_label_over (new_trans_stmt,
1756 	      gimple_transaction_label_over (old_trans_stmt));
1757 	  }
1758 	  break;
1759 
1760 	default:
1761 	  gcc_unreachable ();
1762 	}
1763     }
1764   else
1765     {
1766       if (gimple_assign_copy_p (stmt)
1767 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1768 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1769 	{
1770 	  /* Here we handle statements that are not completely rewritten.
1771 	     First we detect some inlining-induced bogosities for
1772 	     discarding.  */
1773 
1774 	  /* Some assignments VAR = VAR; don't generate any rtl code
1775 	     and thus don't count as variable modification.  Avoid
1776 	     keeping bogosities like 0 = 0.  */
1777 	  tree decl = gimple_assign_lhs (stmt), value;
1778 	  tree *n;
1779 
1780 	  n = id->decl_map->get (decl);
1781 	  if (n)
1782 	    {
1783 	      value = *n;
1784 	      STRIP_TYPE_NOPS (value);
1785 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1786 		return NULL;
1787 	    }
1788 	}
1789 
1790       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1791 	 in a block that we aren't copying during tree_function_versioning,
1792 	 just drop the clobber stmt.  */
1793       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1794 	{
1795 	  tree lhs = gimple_assign_lhs (stmt);
1796 	  if (TREE_CODE (lhs) == MEM_REF
1797 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1798 	    {
1799 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1800 	      if (gimple_bb (def_stmt)
1801 		  && !bitmap_bit_p (id->blocks_to_copy,
1802 				    gimple_bb (def_stmt)->index))
1803 		return NULL;
1804 	    }
1805 	}
1806 
1807       /* We do not allow CLOBBERs of handled components.  In case
1808 	 returned value is stored via such handled component, remove
1809 	 the clobber so stmt verifier is happy.  */
1810       if (gimple_clobber_p (stmt)
1811 	  && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1812 	{
1813 	  tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1814 	  if (!DECL_P (remapped)
1815 	      && TREE_CODE (remapped) != MEM_REF)
1816 	    return NULL;
1817 	}
1818 
1819       if (gimple_debug_bind_p (stmt))
1820 	{
1821 	  tree var = gimple_debug_bind_get_var (stmt);
1822 	  tree value = gimple_debug_bind_get_value (stmt);
1823 	  if (id->param_body_adjs
1824 	      && id->param_body_adjs->m_dead_stmts.contains (stmt))
1825 	    {
1826 	      value = unshare_expr_without_location (value);
1827 	      id->param_body_adjs->remap_with_debug_expressions (&value);
1828 	    }
1829 
1830 	  gdebug *copy = gimple_build_debug_bind (var, value, stmt);
1831 	  if (id->reset_location)
1832 	    gimple_set_location (copy, input_location);
1833 	  id->debug_stmts.safe_push (copy);
1834 	  gimple_seq_add_stmt (&stmts, copy);
1835 	  return stmts;
1836 	}
1837       if (gimple_debug_source_bind_p (stmt))
1838 	{
1839 	  gdebug *copy = gimple_build_debug_source_bind
1840 	                   (gimple_debug_source_bind_get_var (stmt),
1841 			    gimple_debug_source_bind_get_value (stmt),
1842 			    stmt);
1843 	  if (id->reset_location)
1844 	    gimple_set_location (copy, input_location);
1845 	  id->debug_stmts.safe_push (copy);
1846 	  gimple_seq_add_stmt (&stmts, copy);
1847 	  return stmts;
1848 	}
1849       if (gimple_debug_nonbind_marker_p (stmt))
1850 	{
1851 	  /* If the inlined function has too many debug markers,
1852 	     don't copy them.  */
1853 	  if (id->src_cfun->debug_marker_count
1854 	      > param_max_debug_marker_count
1855 	      || id->reset_location)
1856 	    return stmts;
1857 
1858 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1859 	  id->debug_stmts.safe_push (copy);
1860 	  gimple_seq_add_stmt (&stmts, copy);
1861 	  return stmts;
1862 	}
1863 
1864       /* Create a new deep copy of the statement.  */
1865       copy = gimple_copy (stmt);
1866 
1867       /* Clear flags that need revisiting.  */
1868       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1869         {
1870 	  if (gimple_call_tail_p (call_stmt))
1871 	    gimple_call_set_tail (call_stmt, false);
1872 	  if (gimple_call_from_thunk_p (call_stmt))
1873 	    gimple_call_set_from_thunk (call_stmt, false);
1874 	  if (gimple_call_internal_p (call_stmt))
1875 	    switch (gimple_call_internal_fn (call_stmt))
1876 	      {
1877 	      case IFN_GOMP_SIMD_LANE:
1878 	      case IFN_GOMP_SIMD_VF:
1879 	      case IFN_GOMP_SIMD_LAST_LANE:
1880 	      case IFN_GOMP_SIMD_ORDERED_START:
1881 	      case IFN_GOMP_SIMD_ORDERED_END:
1882 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1883 	        break;
1884 	      default:
1885 		break;
1886 	      }
1887 	}
1888 
1889       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1890 	 RESX and EH_DISPATCH.  */
1891       if (id->eh_map)
1892 	switch (gimple_code (copy))
1893 	  {
1894 	  case GIMPLE_CALL:
1895 	    {
1896 	      tree r, fndecl = gimple_call_fndecl (copy);
1897 	      if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1898 		switch (DECL_FUNCTION_CODE (fndecl))
1899 		  {
1900 		  case BUILT_IN_EH_COPY_VALUES:
1901 		    r = gimple_call_arg (copy, 1);
1902 		    r = remap_eh_region_tree_nr (r, id);
1903 		    gimple_call_set_arg (copy, 1, r);
1904 		    /* FALLTHRU */
1905 
1906 		  case BUILT_IN_EH_POINTER:
1907 		  case BUILT_IN_EH_FILTER:
1908 		    r = gimple_call_arg (copy, 0);
1909 		    r = remap_eh_region_tree_nr (r, id);
1910 		    gimple_call_set_arg (copy, 0, r);
1911 		    break;
1912 
1913 		  default:
1914 		    break;
1915 		  }
1916 
1917 	      /* Reset alias info if we didn't apply measures to
1918 		 keep it valid over inlining by setting DECL_PT_UID.  */
1919 	      if (!id->src_cfun->gimple_df
1920 		  || !id->src_cfun->gimple_df->ipa_pta)
1921 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1922 	    }
1923 	    break;
1924 
1925 	  case GIMPLE_RESX:
1926 	    {
1927 	      gresx *resx_stmt = as_a <gresx *> (copy);
1928 	      int r = gimple_resx_region (resx_stmt);
1929 	      r = remap_eh_region_nr (r, id);
1930 	      gimple_resx_set_region (resx_stmt, r);
1931 	    }
1932 	    break;
1933 
1934 	  case GIMPLE_EH_DISPATCH:
1935 	    {
1936 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1937 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1938 	      r = remap_eh_region_nr (r, id);
1939 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1940 	    }
1941 	    break;
1942 
1943 	  default:
1944 	    break;
1945 	  }
1946     }
1947 
1948   /* If STMT has a block defined, map it to the newly constructed block.  */
1949   if (tree block = gimple_block (copy))
1950     {
1951       tree *n;
1952       n = id->decl_map->get (block);
1953       gcc_assert (n);
1954       gimple_set_block (copy, *n);
1955     }
1956   if (id->param_body_adjs)
1957     {
1958       gimple_seq extra_stmts = NULL;
1959       id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts, stmt);
1960       if (!gimple_seq_empty_p (extra_stmts))
1961 	{
1962 	  memset (&wi, 0, sizeof (wi));
1963 	  wi.info = id;
1964 	  for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1965 	       !gsi_end_p (egsi);
1966 	       gsi_next (&egsi))
1967 	    walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1968 	  gimple_seq_add_seq (&stmts, extra_stmts);
1969 	}
1970     }
1971 
1972   if (id->reset_location)
1973     gimple_set_location (copy, input_location);
1974 
1975   /* Debug statements ought to be rebuilt and not copied.  */
1976   gcc_checking_assert (!is_gimple_debug (copy));
1977 
1978   /* Remap all the operands in COPY.  */
1979   memset (&wi, 0, sizeof (wi));
1980   wi.info = id;
1981   if (skip_first)
1982     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1983   else
1984     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1985 
1986   /* Clear the copied virtual operands.  We are not remapping them here
1987      but are going to recreate them from scratch.  */
1988   if (gimple_has_mem_ops (copy))
1989     {
1990       gimple_set_vdef (copy, NULL_TREE);
1991       gimple_set_vuse (copy, NULL_TREE);
1992     }
1993 
1994   if (cfun->can_throw_non_call_exceptions)
1995     {
1996       /* When inlining a function which does not have non-call exceptions
1997 	 enabled into a function that has (which only happens with
1998 	 always-inline) we have to fixup stmts that cannot throw.  */
1999       if (gcond *cond = dyn_cast <gcond *> (copy))
2000 	if (gimple_could_trap_p (cond))
2001 	  {
2002 	    gassign *cmp
2003 	      = gimple_build_assign (make_ssa_name (boolean_type_node),
2004 				     gimple_cond_code (cond),
2005 				     gimple_cond_lhs (cond),
2006 				     gimple_cond_rhs (cond));
2007 	    gimple_seq_add_stmt (&stmts, cmp);
2008 	    gimple_cond_set_code (cond, NE_EXPR);
2009 	    gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
2010 	    gimple_cond_set_rhs (cond, boolean_false_node);
2011 	  }
2012     }
2013 
2014   gimple_seq_add_stmt (&stmts, copy);
2015   return stmts;
2016 }
2017 
2018 
2019 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
2020    later  */
2021 
2022 static basic_block
copy_bb(copy_body_data * id,basic_block bb,profile_count num,profile_count den)2023 copy_bb (copy_body_data *id, basic_block bb,
2024          profile_count num, profile_count den)
2025 {
2026   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2027   basic_block copy_basic_block;
2028   tree decl;
2029   basic_block prev;
2030 
2031   profile_count::adjust_for_ipa_scaling (&num, &den);
2032 
2033   /* Search for previous copied basic block.  */
2034   prev = bb->prev_bb;
2035   while (!prev->aux)
2036     prev = prev->prev_bb;
2037 
2038   /* create_basic_block() will append every new block to
2039      basic_block_info automatically.  */
2040   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2041   copy_basic_block->count = bb->count.apply_scale (num, den);
2042 
2043   copy_gsi = gsi_start_bb (copy_basic_block);
2044 
2045   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2046     {
2047       gimple_seq stmts;
2048       gimple *stmt = gsi_stmt (gsi);
2049       gimple *orig_stmt = stmt;
2050       gimple_stmt_iterator stmts_gsi;
2051       bool stmt_added = false;
2052 
2053       id->regimplify = false;
2054       stmts = remap_gimple_stmt (stmt, id);
2055 
2056       if (gimple_seq_empty_p (stmts))
2057 	continue;
2058 
2059       seq_gsi = copy_gsi;
2060 
2061       for (stmts_gsi = gsi_start (stmts);
2062 	   !gsi_end_p (stmts_gsi); )
2063 	{
2064 	  stmt = gsi_stmt (stmts_gsi);
2065 
2066 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
2067 	  gsi_next (&stmts_gsi);
2068 
2069 	  if (gimple_nop_p (stmt))
2070 	      continue;
2071 
2072 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2073 					    orig_stmt);
2074 
2075 	  /* With return slot optimization we can end up with
2076 	     non-gimple (foo *)&this->m, fix that here.  */
2077 	  if (is_gimple_assign (stmt)
2078 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2079 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2080 	    {
2081 	      tree new_rhs;
2082 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
2083 						  gimple_assign_rhs1 (stmt),
2084 						  true, NULL, false,
2085 						  GSI_CONTINUE_LINKING);
2086 	      gimple_assign_set_rhs1 (stmt, new_rhs);
2087 	      id->regimplify = false;
2088 	    }
2089 
2090 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2091 
2092 	  if (id->regimplify)
2093 	    gimple_regimplify_operands (stmt, &seq_gsi);
2094 
2095 	  stmt_added = true;
2096 	}
2097 
2098       if (!stmt_added)
2099 	continue;
2100 
2101       /* If copy_basic_block has been empty at the start of this iteration,
2102 	 call gsi_start_bb again to get at the newly added statements.  */
2103       if (gsi_end_p (copy_gsi))
2104 	copy_gsi = gsi_start_bb (copy_basic_block);
2105       else
2106 	gsi_next (&copy_gsi);
2107 
2108       /* Process the new statement.  The call to gimple_regimplify_operands
2109 	 possibly turned the statement into multiple statements, we
2110 	 need to process all of them.  */
2111       do
2112 	{
2113 	  tree fn;
2114 	  gcall *call_stmt;
2115 
2116 	  stmt = gsi_stmt (copy_gsi);
2117 	  call_stmt = dyn_cast <gcall *> (stmt);
2118 	  if (call_stmt
2119 	      && gimple_call_va_arg_pack_p (call_stmt)
2120 	      && id->call_stmt
2121 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
2122 	    {
2123 	      /* __builtin_va_arg_pack () should be replaced by
2124 		 all arguments corresponding to ... in the caller.  */
2125 	      tree p;
2126 	      gcall *new_call;
2127 	      vec<tree> argarray;
2128 	      size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2129 	      size_t nargs = nargs_caller;
2130 
2131 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2132 		{
2133 		  /* Avoid crashing on invalid IL that doesn't have a
2134 		     varargs function or that passes not enough arguments.  */
2135 		  if (nargs == 0)
2136 		    break;
2137 		  nargs--;
2138 		}
2139 
2140 	      /* Create the new array of arguments.  */
2141 	      size_t nargs_callee = gimple_call_num_args (call_stmt);
2142 	      size_t n = nargs + nargs_callee;
2143 	      argarray.create (n);
2144 	      argarray.safe_grow_cleared (n, true);
2145 
2146 	      /* Copy all the arguments before '...'  */
2147 	      if (nargs_callee)
2148 		memcpy (argarray.address (),
2149 			gimple_call_arg_ptr (call_stmt, 0),
2150 			nargs_callee * sizeof (tree));
2151 
2152 	      /* Append the arguments passed in '...'  */
2153 	      if (nargs)
2154 		memcpy (argarray.address () + nargs_callee,
2155 			gimple_call_arg_ptr (id->call_stmt, 0)
2156 			+ (nargs_caller - nargs), nargs * sizeof (tree));
2157 
2158 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2159 						argarray);
2160 
2161 	      argarray.release ();
2162 
2163 	      /* Copy all GIMPLE_CALL flags, location and block, except
2164 		 GF_CALL_VA_ARG_PACK.  */
2165 	      gimple_call_copy_flags (new_call, call_stmt);
2166 	      gimple_call_set_va_arg_pack (new_call, false);
2167 	      gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2168 	      /* location includes block.  */
2169 	      gimple_set_location (new_call, gimple_location (stmt));
2170 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2171 
2172 	      gsi_replace (&copy_gsi, new_call, false);
2173 	      stmt = new_call;
2174 	    }
2175 	  else if (call_stmt
2176 		   && id->call_stmt
2177 		   && (decl = gimple_call_fndecl (stmt))
2178 		   && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2179 	    {
2180 	      /* __builtin_va_arg_pack_len () should be replaced by
2181 		 the number of anonymous arguments.  */
2182 	      size_t nargs = gimple_call_num_args (id->call_stmt);
2183 	      tree count, p;
2184 	      gimple *new_stmt;
2185 
2186 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2187 		nargs--;
2188 
2189 	      if (!gimple_call_lhs (stmt))
2190 		{
2191 		  /* Drop unused calls.  */
2192 		  gsi_remove (&copy_gsi, false);
2193 		  continue;
2194 		}
2195 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2196 		{
2197 		  count = build_int_cst (integer_type_node, nargs);
2198 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2199 		  gsi_replace (&copy_gsi, new_stmt, false);
2200 		  stmt = new_stmt;
2201 		}
2202 	      else if (nargs != 0)
2203 		{
2204 		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2205 		  count = build_int_cst (integer_type_node, nargs);
2206 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2207 						  PLUS_EXPR, newlhs, count);
2208 		  gimple_call_set_lhs (stmt, newlhs);
2209 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2210 		}
2211 	    }
2212 	  else if (call_stmt
2213 		   && id->call_stmt
2214 		   && gimple_call_internal_p (stmt))
2215 	    switch (gimple_call_internal_fn (stmt))
2216 	      {
2217 	      case IFN_TSAN_FUNC_EXIT:
2218 		/* Drop .TSAN_FUNC_EXIT () internal calls during inlining.  */
2219 		gsi_remove (&copy_gsi, false);
2220 		continue;
2221 	      case IFN_ASAN_MARK:
2222 		/* Drop .ASAN_MARK internal calls during inlining into
2223 		   no_sanitize functions.  */
2224 		if (!sanitize_flags_p (SANITIZE_ADDRESS, id->dst_fn)
2225 		    && !sanitize_flags_p (SANITIZE_HWADDRESS, id->dst_fn))
2226 		  {
2227 		    gsi_remove (&copy_gsi, false);
2228 		    continue;
2229 		  }
2230 		break;
2231 	      default:
2232 		break;
2233 	      }
2234 
2235 	  /* Statements produced by inlining can be unfolded, especially
2236 	     when we constant propagated some operands.  We can't fold
2237 	     them right now for two reasons:
2238 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2239 	     2) we can't change function calls to builtins.
2240 	     So we just mark statement for later folding.  We mark
2241 	     all new statements, instead just statements that has changed
2242 	     by some nontrivial substitution so even statements made
2243 	     foldable indirectly are updated.  If this turns out to be
2244 	     expensive, copy_body can be told to watch for nontrivial
2245 	     changes.  */
2246 	  if (id->statements_to_fold)
2247 	    id->statements_to_fold->add (stmt);
2248 
2249 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2250 	     callgraph edges and update or duplicate them.  */
2251 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2252 	    {
2253 	      struct cgraph_edge *edge;
2254 
2255 	      switch (id->transform_call_graph_edges)
2256 		{
2257 		case CB_CGE_DUPLICATE:
2258 		  edge = id->src_node->get_edge (orig_stmt);
2259 		  if (edge)
2260 		    {
2261 		      struct cgraph_edge *old_edge = edge;
2262 
2263 		      /* A speculative call is consist of multiple
2264 			 edges - indirect edge and one or more direct edges
2265 			 Duplicate the whole thing and distribute frequencies
2266 			 accordingly.  */
2267 		      if (edge->speculative)
2268 			{
2269 			  int n = 0;
2270 			  profile_count direct_cnt
2271 				 = profile_count::zero ();
2272 
2273 			  /* First figure out the distribution of counts
2274 			     so we can re-scale BB profile accordingly.  */
2275 			  for (cgraph_edge *e = old_edge; e;
2276 			       e = e->next_speculative_call_target ())
2277 			    direct_cnt = direct_cnt + e->count;
2278 
2279 			  cgraph_edge *indirect
2280 				 = old_edge->speculative_call_indirect_edge ();
2281 			  profile_count indir_cnt = indirect->count;
2282 
2283 			  /* Next iterate all direct edges, clone it and its
2284 			     corresponding reference and update profile.  */
2285 			  for (cgraph_edge *e = old_edge;
2286 			       e;
2287 			       e = e->next_speculative_call_target ())
2288 			    {
2289 			      profile_count cnt = e->count;
2290 
2291 			      id->dst_node->clone_reference
2292 				 (e->speculative_call_target_ref (), stmt);
2293 			      edge = e->clone (id->dst_node, call_stmt,
2294 					       gimple_uid (stmt), num, den,
2295 					       true);
2296 			      profile_probability prob
2297 				 = cnt.probability_in (direct_cnt
2298 						       + indir_cnt);
2299 			      edge->count
2300 				 = copy_basic_block->count.apply_probability
2301 					 (prob);
2302 			      n++;
2303 			    }
2304 			  gcc_checking_assert
2305 				 (indirect->num_speculative_call_targets_p ()
2306 				  == n);
2307 
2308 			  /* Duplicate the indirect edge after all direct edges
2309 			     cloned.  */
2310 			  indirect = indirect->clone (id->dst_node, call_stmt,
2311 						      gimple_uid (stmt),
2312 						      num, den,
2313 						      true);
2314 
2315 			  profile_probability prob
2316 			     = indir_cnt.probability_in (direct_cnt
2317 							 + indir_cnt);
2318 			  indirect->count
2319 			     = copy_basic_block->count.apply_probability (prob);
2320 			}
2321 		      else
2322 			{
2323 			  edge = edge->clone (id->dst_node, call_stmt,
2324 					      gimple_uid (stmt),
2325 					      num, den,
2326 					      true);
2327 			  edge->count = copy_basic_block->count;
2328 			}
2329 		    }
2330 		  break;
2331 
2332 		case CB_CGE_MOVE_CLONES:
2333 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2334 								call_stmt);
2335 		  edge = id->dst_node->get_edge (stmt);
2336 		  break;
2337 
2338 		case CB_CGE_MOVE:
2339 		  edge = id->dst_node->get_edge (orig_stmt);
2340 		  if (edge)
2341 		    edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2342 		  break;
2343 
2344 		default:
2345 		  gcc_unreachable ();
2346 		}
2347 
2348 	      /* Constant propagation on argument done during inlining
2349 		 may create new direct call.  Produce an edge for it.  */
2350 	      if ((!edge
2351 		   || (edge->indirect_inlining_edge
2352 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2353 		  && id->dst_node->definition
2354 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2355 		{
2356 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2357 
2358 		  /* We have missing edge in the callgraph.  This can happen
2359 		     when previous inlining turned an indirect call into a
2360 		     direct call by constant propagating arguments or we are
2361 		     producing dead clone (for further cloning).  In all
2362 		     other cases we hit a bug (incorrect node sharing is the
2363 		     most common reason for missing edges).  */
2364 		  gcc_assert (!dest->definition
2365 			      || dest->address_taken
2366 		  	      || !id->src_node->definition
2367 			      || !id->dst_node->definition);
2368 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2369 		    id->dst_node->create_edge_including_clones
2370 		      (dest, orig_stmt, call_stmt, bb->count,
2371 		       CIF_ORIGINALLY_INDIRECT_CALL);
2372 		  else
2373 		    id->dst_node->create_edge (dest, call_stmt,
2374 					bb->count)->inline_failed
2375 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2376 		  if (dump_file)
2377 		    {
2378 		      fprintf (dump_file, "Created new direct edge to %s\n",
2379 			       dest->dump_name ());
2380 		    }
2381 		}
2382 
2383 	      notice_special_calls (as_a <gcall *> (stmt));
2384 	    }
2385 
2386 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2387 				      id->eh_map, id->eh_lp_nr);
2388 
2389 	  gsi_next (&copy_gsi);
2390 	}
2391       while (!gsi_end_p (copy_gsi));
2392 
2393       copy_gsi = gsi_last_bb (copy_basic_block);
2394     }
2395 
2396   return copy_basic_block;
2397 }
2398 
2399 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2400    form is quite easy, since dominator relationship for old basic blocks does
2401    not change.
2402 
2403    There is however exception where inlining might change dominator relation
2404    across EH edges from basic block within inlined functions destinating
2405    to landing pads in function we inline into.
2406 
2407    The function fills in PHI_RESULTs of such PHI nodes if they refer
2408    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2409    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2410    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2411    set, and this means that there will be no overlapping live ranges
2412    for the underlying symbol.
2413 
2414    This might change in future if we allow redirecting of EH edges and
2415    we might want to change way build CFG pre-inlining to include
2416    all the possible edges then.  */
2417 static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)2418 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2419 				  bool can_throw, bool nonlocal_goto)
2420 {
2421   edge e;
2422   edge_iterator ei;
2423 
2424   FOR_EACH_EDGE (e, ei, bb->succs)
2425     if (!e->dest->aux
2426 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2427       {
2428 	gphi *phi;
2429 	gphi_iterator si;
2430 
2431 	if (!nonlocal_goto)
2432 	  gcc_assert (e->flags & EDGE_EH);
2433 
2434 	if (!can_throw)
2435 	  gcc_assert (!(e->flags & EDGE_EH));
2436 
2437 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2438 	  {
2439 	    edge re;
2440 
2441 	    phi = si.phi ();
2442 
2443 	    /* For abnormal goto/call edges the receiver can be the
2444 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2445 
2446 	    gcc_assert ((e->flags & EDGE_EH)
2447 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2448 
2449 	    re = find_edge (ret_bb, e->dest);
2450 	    gcc_checking_assert (re);
2451 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2452 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2453 
2454 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2455 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2456 	  }
2457       }
2458 }
2459 
2460 /* Insert clobbers for automatic variables of inlined ID->src_fn
2461    function at the start of basic block ID->eh_landing_pad_dest.  */
2462 
2463 static void
add_clobbers_to_eh_landing_pad(copy_body_data * id)2464 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2465 {
2466   tree var;
2467   basic_block bb = id->eh_landing_pad_dest;
2468   live_vars_map *vars = NULL;
2469   unsigned int cnt = 0;
2470   unsigned int i;
2471   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2472     if (VAR_P (var)
2473 	&& !DECL_HARD_REGISTER (var)
2474 	&& !TREE_THIS_VOLATILE (var)
2475 	&& !DECL_HAS_VALUE_EXPR_P (var)
2476 	&& !is_gimple_reg (var)
2477 	&& auto_var_in_fn_p (var, id->src_fn)
2478 	&& !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2479       {
2480 	tree *t = id->decl_map->get (var);
2481 	if (!t)
2482 	  continue;
2483 	tree new_var = *t;
2484 	if (VAR_P (new_var)
2485 	    && !DECL_HARD_REGISTER (new_var)
2486 	    && !TREE_THIS_VOLATILE (new_var)
2487 	    && !DECL_HAS_VALUE_EXPR_P (new_var)
2488 	    && !is_gimple_reg (new_var)
2489 	    && auto_var_in_fn_p (new_var, id->dst_fn))
2490 	  {
2491 	    if (vars == NULL)
2492 	      vars = new live_vars_map;
2493             vars->put (DECL_UID (var), cnt++);
2494 	  }
2495       }
2496   if (vars == NULL)
2497     return;
2498 
2499   vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2500   FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2501     if (VAR_P (var))
2502       {
2503 	edge e;
2504 	edge_iterator ei;
2505 	bool needed = false;
2506 	unsigned int *v = vars->get (DECL_UID (var));
2507 	if (v == NULL)
2508 	  continue;
2509 	FOR_EACH_EDGE (e, ei, bb->preds)
2510 	  if ((e->flags & EDGE_EH) != 0
2511 	      && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2512 	    {
2513 	      basic_block src_bb = (basic_block) e->src->aux;
2514 
2515 	      if (bitmap_bit_p (&live[src_bb->index], *v))
2516 		{
2517 		  needed = true;
2518 		  break;
2519 		}
2520 	    }
2521 	if (needed)
2522 	  {
2523 	    tree new_var = *id->decl_map->get (var);
2524 	    gimple_stmt_iterator gsi = gsi_after_labels (bb);
2525 	    tree clobber = build_clobber (TREE_TYPE (new_var));
2526 	    gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2527 	    gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2528 	  }
2529       }
2530   destroy_live_vars (live);
2531   delete vars;
2532 }
2533 
2534 /* Copy edges from BB into its copy constructed earlier, scale profile
2535    accordingly.  Edges will be taken care of later.  Assume aux
2536    pointers to point to the copies of each BB.  Return true if any
2537    debug stmts are left after a statement that must end the basic block.  */
2538 
2539 static bool
copy_edges_for_bb(basic_block bb,profile_count num,profile_count den,basic_block ret_bb,basic_block abnormal_goto_dest,copy_body_data * id)2540 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2541 		   basic_block ret_bb, basic_block abnormal_goto_dest,
2542 		   copy_body_data *id)
2543 {
2544   basic_block new_bb = (basic_block) bb->aux;
2545   edge_iterator ei;
2546   edge old_edge;
2547   gimple_stmt_iterator si;
2548   bool need_debug_cleanup = false;
2549 
2550   /* Use the indices from the original blocks to create edges for the
2551      new ones.  */
2552   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2553     if (!(old_edge->flags & EDGE_EH))
2554       {
2555 	edge new_edge;
2556 	int flags = old_edge->flags;
2557 	location_t locus = old_edge->goto_locus;
2558 
2559 	/* Return edges do get a FALLTHRU flag when they get inlined.  */
2560 	if (old_edge->dest->index == EXIT_BLOCK
2561 	    && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2562 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2563 	  flags |= EDGE_FALLTHRU;
2564 
2565 	new_edge
2566 	  = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2567 	new_edge->probability = old_edge->probability;
2568 	if (!id->reset_location)
2569 	  new_edge->goto_locus = remap_location (locus, id);
2570       }
2571 
2572   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2573     return false;
2574 
2575   /* When doing function splitting, we must decrease count of the return block
2576      which was previously reachable by block we did not copy.  */
2577   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2578     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2579       if (old_edge->src->index != ENTRY_BLOCK
2580 	  && !old_edge->src->aux)
2581 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2582 
2583   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2584     {
2585       gimple *copy_stmt;
2586       bool can_throw, nonlocal_goto;
2587 
2588       copy_stmt = gsi_stmt (si);
2589       if (!is_gimple_debug (copy_stmt))
2590 	update_stmt (copy_stmt);
2591 
2592       /* Do this before the possible split_block.  */
2593       gsi_next (&si);
2594 
2595       /* If this tree could throw an exception, there are two
2596          cases where we need to add abnormal edge(s): the
2597          tree wasn't in a region and there is a "current
2598          region" in the caller; or the original tree had
2599          EH edges.  In both cases split the block after the tree,
2600          and add abnormal edge(s) as needed; we need both
2601          those from the callee and the caller.
2602          We check whether the copy can throw, because the const
2603          propagation can change an INDIRECT_REF which throws
2604          into a COMPONENT_REF which doesn't.  If the copy
2605          can throw, the original could also throw.  */
2606       can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2607       nonlocal_goto
2608 	= (stmt_can_make_abnormal_goto (copy_stmt)
2609 	   && !computed_goto_p (copy_stmt));
2610 
2611       if (can_throw || nonlocal_goto)
2612 	{
2613 	  if (!gsi_end_p (si))
2614 	    {
2615 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2616 		gsi_next (&si);
2617 	      if (gsi_end_p (si))
2618 		need_debug_cleanup = true;
2619 	    }
2620 	  if (!gsi_end_p (si))
2621 	    /* Note that bb's predecessor edges aren't necessarily
2622 	       right at this point; split_block doesn't care.  */
2623 	    {
2624 	      edge e = split_block (new_bb, copy_stmt);
2625 
2626 	      new_bb = e->dest;
2627 	      new_bb->aux = e->src->aux;
2628 	      si = gsi_start_bb (new_bb);
2629 	    }
2630 	}
2631 
2632       bool update_probs = false;
2633 
2634       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2635 	{
2636 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2637 	  update_probs = true;
2638 	}
2639       else if (can_throw)
2640 	{
2641 	  make_eh_edges (copy_stmt);
2642 	  update_probs = true;
2643 	}
2644 
2645       /* EH edges may not match old edges.  Copy as much as possible.  */
2646       if (update_probs)
2647 	{
2648           edge e;
2649           edge_iterator ei;
2650 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2651 
2652           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2653             if ((old_edge->flags & EDGE_EH)
2654 		&& (e = find_edge (copy_stmt_bb,
2655 				   (basic_block) old_edge->dest->aux))
2656 		&& (e->flags & EDGE_EH))
2657 	      e->probability = old_edge->probability;
2658 
2659           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2660 	    if (e->flags & EDGE_EH)
2661 	      {
2662 		if (!e->probability.initialized_p ())
2663 		  e->probability = profile_probability::never ();
2664 		if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2665 		  {
2666 		    if (id->eh_landing_pad_dest == NULL)
2667 		      id->eh_landing_pad_dest = e->dest;
2668 		    else
2669 		      gcc_assert (id->eh_landing_pad_dest == e->dest);
2670 		  }
2671 	      }
2672         }
2673 
2674 
2675       /* If the call we inline cannot make abnormal goto do not add
2676          additional abnormal edges but only retain those already present
2677 	 in the original function body.  */
2678       if (abnormal_goto_dest == NULL)
2679 	nonlocal_goto = false;
2680       if (nonlocal_goto)
2681 	{
2682 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2683 
2684 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2685 	    nonlocal_goto = false;
2686 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2687 	     in OpenMP regions which aren't allowed to be left abnormally.
2688 	     So, no need to add abnormal edge in that case.  */
2689 	  else if (is_gimple_call (copy_stmt)
2690 		   && gimple_call_internal_p (copy_stmt)
2691 		   && (gimple_call_internal_fn (copy_stmt)
2692 		       == IFN_ABNORMAL_DISPATCHER)
2693 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2694 	    nonlocal_goto = false;
2695 	  else
2696 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2697 				   EDGE_ABNORMAL);
2698 	}
2699 
2700       if ((can_throw || nonlocal_goto)
2701 	  && gimple_in_ssa_p (cfun))
2702 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2703 					  can_throw, nonlocal_goto);
2704     }
2705   return need_debug_cleanup;
2706 }
2707 
2708 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2709    was possibly split and new outgoing EH edges inserted.
2710    BB points to the block of original function and AUX pointers links
2711    the original and newly copied blocks.  */
2712 
2713 static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)2714 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2715 {
2716   basic_block const new_bb = (basic_block) bb->aux;
2717   edge_iterator ei;
2718   gphi *phi;
2719   gphi_iterator si;
2720   edge new_edge;
2721   bool inserted = false;
2722 
2723   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2724     {
2725       tree res, new_res;
2726       gphi *new_phi;
2727 
2728       phi = si.phi ();
2729       res = PHI_RESULT (phi);
2730       new_res = res;
2731       if (!virtual_operand_p (res)
2732 	  && (!id->param_body_adjs
2733 	      || !id->param_body_adjs->m_dead_stmts.contains (phi)))
2734 	{
2735 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2736 	  if (EDGE_COUNT (new_bb->preds) == 0)
2737 	    {
2738 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2739 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2740 	    }
2741 	  else
2742 	    {
2743 	      new_phi = create_phi_node (new_res, new_bb);
2744 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2745 		{
2746 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2747 					     bb);
2748 		  tree arg;
2749 		  tree new_arg;
2750 		  edge_iterator ei2;
2751 		  location_t locus;
2752 
2753 		  /* When doing partial cloning, we allow PHIs on the entry
2754 		     block as long as all the arguments are the same.
2755 		     Find any input edge to see argument to copy.  */
2756 		  if (!old_edge)
2757 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2758 		      if (!old_edge->src->aux)
2759 			break;
2760 
2761 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2762 		  new_arg = arg;
2763 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2764 		  gcc_assert (new_arg);
2765 		  /* With return slot optimization we can end up with
2766 		     non-gimple (foo *)&this->m, fix that here.  */
2767 		  if (TREE_CODE (new_arg) != SSA_NAME
2768 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2769 		      && !is_gimple_val (new_arg))
2770 		    {
2771 		      gimple_seq stmts = NULL;
2772 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2773 						      NULL);
2774 		      gsi_insert_seq_on_edge (new_edge, stmts);
2775 		      inserted = true;
2776 		    }
2777 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2778 		  if (id->reset_location)
2779 		    locus = input_location;
2780 		  else
2781 		    locus = remap_location (locus, id);
2782 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2783 		}
2784 	    }
2785 	}
2786     }
2787 
2788   /* Commit the delayed edge insertions.  */
2789   if (inserted)
2790     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2791       gsi_commit_one_edge_insert (new_edge, NULL);
2792 }
2793 
2794 
2795 /* Wrapper for remap_decl so it can be used as a callback.  */
2796 
2797 static tree
remap_decl_1(tree decl,void * data)2798 remap_decl_1 (tree decl, void *data)
2799 {
2800   return remap_decl (decl, (copy_body_data *) data);
2801 }
2802 
2803 /* Build struct function and associated datastructures for the new clone
2804    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2805    the cfun to the function of new_fndecl (and current_function_decl too).  */
2806 
2807 static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,profile_count count)2808 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2809 {
2810   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2811 
2812   /* Register specific tree functions.  */
2813   gimple_register_cfg_hooks ();
2814 
2815   /* Get clean struct function.  */
2816   push_struct_function (new_fndecl, true);
2817   targetm.target_option.relayout_function (new_fndecl);
2818 
2819   /* We will rebuild these, so just sanity check that they are empty.  */
2820   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2821   gcc_assert (cfun->local_decls == NULL);
2822   gcc_assert (cfun->cfg == NULL);
2823   gcc_assert (cfun->decl == new_fndecl);
2824 
2825   /* Copy items we preserve during cloning.  */
2826   cfun->static_chain_decl = src_cfun->static_chain_decl;
2827   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2828   cfun->function_end_locus = src_cfun->function_end_locus;
2829   cfun->curr_properties = src_cfun->curr_properties;
2830   cfun->last_verified = src_cfun->last_verified;
2831   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2832   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2833   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2834   cfun->calls_eh_return = src_cfun->calls_eh_return;
2835   cfun->stdarg = src_cfun->stdarg;
2836   cfun->after_inlining = src_cfun->after_inlining;
2837   cfun->can_throw_non_call_exceptions
2838     = src_cfun->can_throw_non_call_exceptions;
2839   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2840   cfun->returns_struct = src_cfun->returns_struct;
2841   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2842 
2843   init_empty_tree_cfg ();
2844 
2845   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2846 
2847   profile_count num = count;
2848   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2849   profile_count::adjust_for_ipa_scaling (&num, &den);
2850 
2851   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2852     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2853 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2854   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2855     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2856 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2857   if (src_cfun->eh)
2858     init_eh_for_function ();
2859 
2860   if (src_cfun->gimple_df)
2861     {
2862       init_tree_ssa (cfun);
2863       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2864       if (cfun->gimple_df->in_ssa_p)
2865 	init_ssa_operands (cfun);
2866     }
2867 }
2868 
2869 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2870    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2871    successor has multiple predecessors, reset them, otherwise keep
2872    their value.  */
2873 
2874 static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)2875 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2876 {
2877   edge e;
2878   edge_iterator ei;
2879   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2880 
2881   if (gsi_end_p (si)
2882       || gsi_one_before_end_p (si)
2883       || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2884 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2885     return;
2886 
2887   FOR_EACH_EDGE (e, ei, new_bb->succs)
2888     {
2889       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2890       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2891       while (is_gimple_debug (gsi_stmt (ssi)))
2892 	{
2893 	  gimple *stmt = gsi_stmt (ssi);
2894 	  gdebug *new_stmt;
2895 	  tree var;
2896 	  tree value;
2897 
2898 	  /* For the last edge move the debug stmts instead of copying
2899 	     them.  */
2900 	  if (ei_one_before_end_p (ei))
2901 	    {
2902 	      si = ssi;
2903 	      gsi_prev (&ssi);
2904 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2905 		{
2906 		  gimple_debug_bind_reset_value (stmt);
2907 		  gimple_set_location (stmt, UNKNOWN_LOCATION);
2908 		}
2909 	      gsi_remove (&si, false);
2910 	      gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
2911 	      continue;
2912 	    }
2913 
2914 	  if (gimple_debug_bind_p (stmt))
2915 	    {
2916 	      var = gimple_debug_bind_get_var (stmt);
2917 	      if (single_pred_p (e->dest))
2918 		{
2919 		  value = gimple_debug_bind_get_value (stmt);
2920 		  value = unshare_expr (value);
2921 		  new_stmt = gimple_build_debug_bind (var, value, stmt);
2922 		}
2923 	      else
2924 		new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2925 	    }
2926 	  else if (gimple_debug_source_bind_p (stmt))
2927 	    {
2928 	      var = gimple_debug_source_bind_get_var (stmt);
2929 	      value = gimple_debug_source_bind_get_value (stmt);
2930 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2931 	    }
2932 	  else if (gimple_debug_nonbind_marker_p (stmt))
2933 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2934 	  else
2935 	    gcc_unreachable ();
2936 	  gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
2937 	  id->debug_stmts.safe_push (new_stmt);
2938 	  gsi_prev (&ssi);
2939 	}
2940     }
2941 }
2942 
2943 /* Make a copy of the sub-loops of SRC_PARENT and place them
2944    as siblings of DEST_PARENT.  */
2945 
2946 static void
copy_loops(copy_body_data * id,class loop * dest_parent,class loop * src_parent)2947 copy_loops (copy_body_data *id,
2948 	    class loop *dest_parent, class loop *src_parent)
2949 {
2950   class loop *src_loop = src_parent->inner;
2951   while (src_loop)
2952     {
2953       if (!id->blocks_to_copy
2954 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2955 	{
2956 	  class loop *dest_loop = alloc_loop ();
2957 
2958 	  /* Assign the new loop its header and latch and associate
2959 	     those with the new loop.  */
2960 	  dest_loop->header = (basic_block)src_loop->header->aux;
2961 	  dest_loop->header->loop_father = dest_loop;
2962 	  if (src_loop->latch != NULL)
2963 	    {
2964 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2965 	      dest_loop->latch->loop_father = dest_loop;
2966 	    }
2967 
2968 	  /* Copy loop meta-data.  */
2969 	  copy_loop_info (src_loop, dest_loop);
2970 	  if (dest_loop->unroll)
2971 	    cfun->has_unroll = true;
2972 	  if (dest_loop->force_vectorize)
2973 	    cfun->has_force_vectorize_loops = true;
2974 	  if (id->src_cfun->last_clique != 0)
2975 	    dest_loop->owned_clique
2976 	      = remap_dependence_clique (id,
2977 					 src_loop->owned_clique
2978 					 ? src_loop->owned_clique : 1);
2979 
2980 	  /* Finally place it into the loop array and the loop tree.  */
2981 	  place_new_loop (cfun, dest_loop);
2982 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2983 
2984 	  if (src_loop->simduid)
2985 	    {
2986 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2987 	      cfun->has_simduid_loops = true;
2988 	    }
2989 
2990 	  /* Recurse.  */
2991 	  copy_loops (id, dest_loop, src_loop);
2992 	}
2993       src_loop = src_loop->next;
2994     }
2995 }
2996 
2997 /* Call redirect_call_stmt_to_callee on all calls in BB.  */
2998 
2999 void
redirect_all_calls(copy_body_data * id,basic_block bb)3000 redirect_all_calls (copy_body_data * id, basic_block bb)
3001 {
3002   gimple_stmt_iterator si;
3003   gimple *last = last_stmt (bb);
3004   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
3005     {
3006       gimple *stmt = gsi_stmt (si);
3007       if (is_gimple_call (stmt))
3008 	{
3009 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
3010 	  if (edge)
3011 	    {
3012 	      if (!id->killed_new_ssa_names)
3013 		id->killed_new_ssa_names = new hash_set<tree> (16);
3014 	      cgraph_edge::redirect_call_stmt_to_callee (edge,
3015 		id->killed_new_ssa_names);
3016 
3017 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
3018 		gimple_purge_dead_eh_edges (bb);
3019 	    }
3020 	}
3021     }
3022 }
3023 
3024 /* Make a copy of the body of FN so that it can be inserted inline in
3025    another function.  Walks FN via CFG, returns new fndecl.  */
3026 
3027 static tree
copy_cfg_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)3028 copy_cfg_body (copy_body_data * id,
3029 	       basic_block entry_block_map, basic_block exit_block_map,
3030 	       basic_block new_entry)
3031 {
3032   tree callee_fndecl = id->src_fn;
3033   /* Original cfun for the callee, doesn't change.  */
3034   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3035   struct function *cfun_to_copy;
3036   basic_block bb;
3037   tree new_fndecl = NULL;
3038   bool need_debug_cleanup = false;
3039   int last;
3040   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3041   profile_count num = entry_block_map->count;
3042 
3043   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3044 
3045   /* Register specific tree functions.  */
3046   gimple_register_cfg_hooks ();
3047 
3048   /* If we are inlining just region of the function, make sure to connect
3049      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
3050      part of loop, we must compute frequency and probability of
3051      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3052      probabilities of edges incoming from nonduplicated region.  */
3053   if (new_entry)
3054     {
3055       edge e;
3056       edge_iterator ei;
3057       den = profile_count::zero ();
3058 
3059       FOR_EACH_EDGE (e, ei, new_entry->preds)
3060 	if (!e->src->aux)
3061 	  den += e->count ();
3062       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3063     }
3064 
3065   profile_count::adjust_for_ipa_scaling (&num, &den);
3066 
3067   /* Must have a CFG here at this point.  */
3068   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3069 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
3070 
3071 
3072   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3073   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3074   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3075   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3076 
3077   /* Duplicate any exception-handling regions.  */
3078   if (cfun->eh)
3079     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3080 				       remap_decl_1, id);
3081 
3082   /* Use aux pointers to map the original blocks to copy.  */
3083   FOR_EACH_BB_FN (bb, cfun_to_copy)
3084     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3085       {
3086 	basic_block new_bb = copy_bb (id, bb, num, den);
3087 	bb->aux = new_bb;
3088 	new_bb->aux = bb;
3089 	new_bb->loop_father = entry_block_map->loop_father;
3090       }
3091 
3092   last = last_basic_block_for_fn (cfun);
3093 
3094   /* Now that we've duplicated the blocks, duplicate their edges.  */
3095   basic_block abnormal_goto_dest = NULL;
3096   if (id->call_stmt
3097       && stmt_can_make_abnormal_goto (id->call_stmt))
3098     {
3099       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3100 
3101       bb = gimple_bb (id->call_stmt);
3102       gsi_next (&gsi);
3103       if (gsi_end_p (gsi))
3104 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3105     }
3106   FOR_ALL_BB_FN (bb, cfun_to_copy)
3107     if (!id->blocks_to_copy
3108 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3109       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3110 					       abnormal_goto_dest, id);
3111 
3112   if (id->eh_landing_pad_dest)
3113     {
3114       add_clobbers_to_eh_landing_pad (id);
3115       id->eh_landing_pad_dest = NULL;
3116     }
3117 
3118   if (new_entry)
3119     {
3120       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3121 			  EDGE_FALLTHRU);
3122       e->probability = profile_probability::always ();
3123     }
3124 
3125   /* Duplicate the loop tree, if available and wanted.  */
3126   if (loops_for_fn (src_cfun) != NULL
3127       && current_loops != NULL)
3128     {
3129       copy_loops (id, entry_block_map->loop_father,
3130 		  get_loop (src_cfun, 0));
3131       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
3132       loops_state_set (LOOPS_NEED_FIXUP);
3133     }
3134 
3135   /* If the loop tree in the source function needed fixup, mark the
3136      destination loop tree for fixup, too.  */
3137   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3138     loops_state_set (LOOPS_NEED_FIXUP);
3139 
3140   if (gimple_in_ssa_p (cfun))
3141     FOR_ALL_BB_FN (bb, cfun_to_copy)
3142       if (!id->blocks_to_copy
3143 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3144 	copy_phis_for_bb (bb, id);
3145 
3146   FOR_ALL_BB_FN (bb, cfun_to_copy)
3147     if (bb->aux)
3148       {
3149 	if (need_debug_cleanup
3150 	    && bb->index != ENTRY_BLOCK
3151 	    && bb->index != EXIT_BLOCK)
3152 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3153 	/* Update call edge destinations.  This cannot be done before loop
3154 	   info is updated, because we may split basic blocks.  */
3155 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3156 	    && bb->index != ENTRY_BLOCK
3157 	    && bb->index != EXIT_BLOCK)
3158 	  redirect_all_calls (id, (basic_block)bb->aux);
3159 	((basic_block)bb->aux)->aux = NULL;
3160 	bb->aux = NULL;
3161       }
3162 
3163   /* Zero out AUX fields of newly created block during EH edge
3164      insertion. */
3165   for (; last < last_basic_block_for_fn (cfun); last++)
3166     {
3167       if (need_debug_cleanup)
3168 	maybe_move_debug_stmts_to_successors (id,
3169 					      BASIC_BLOCK_FOR_FN (cfun, last));
3170       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3171       /* Update call edge destinations.  This cannot be done before loop
3172 	 info is updated, because we may split basic blocks.  */
3173       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3174 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3175     }
3176   entry_block_map->aux = NULL;
3177   exit_block_map->aux = NULL;
3178 
3179   if (id->eh_map)
3180     {
3181       delete id->eh_map;
3182       id->eh_map = NULL;
3183     }
3184   if (id->dependence_map)
3185     {
3186       delete id->dependence_map;
3187       id->dependence_map = NULL;
3188     }
3189 
3190   return new_fndecl;
3191 }
3192 
3193 /* Copy the debug STMT using ID.  We deal with these statements in a
3194    special way: if any variable in their VALUE expression wasn't
3195    remapped yet, we won't remap it, because that would get decl uids
3196    out of sync, causing codegen differences between -g and -g0.  If
3197    this arises, we drop the VALUE expression altogether.  */
3198 
3199 static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)3200 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3201 {
3202   tree t, *n;
3203   struct walk_stmt_info wi;
3204 
3205   if (tree block = gimple_block (stmt))
3206     {
3207       n = id->decl_map->get (block);
3208       gimple_set_block (stmt, n ? *n : id->block);
3209     }
3210 
3211   if (gimple_debug_nonbind_marker_p (stmt))
3212     {
3213       if (id->call_stmt && !gimple_block (stmt))
3214 	{
3215 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3216 	  gsi_remove (&gsi, true);
3217 	}
3218       return;
3219     }
3220 
3221   /* Remap all the operands in COPY.  */
3222   memset (&wi, 0, sizeof (wi));
3223   wi.info = id;
3224 
3225   processing_debug_stmt = 1;
3226 
3227   if (gimple_debug_source_bind_p (stmt))
3228     t = gimple_debug_source_bind_get_var (stmt);
3229   else if (gimple_debug_bind_p (stmt))
3230     t = gimple_debug_bind_get_var (stmt);
3231   else
3232     gcc_unreachable ();
3233 
3234   if (TREE_CODE (t) == PARM_DECL
3235       && id->debug_map
3236       && (n = id->debug_map->get (t)))
3237     {
3238       gcc_assert (VAR_P (*n));
3239       t = *n;
3240     }
3241   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3242     /* T is a non-localized variable.  */;
3243   else
3244     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3245 
3246   if (gimple_debug_bind_p (stmt))
3247     {
3248       gimple_debug_bind_set_var (stmt, t);
3249 
3250       if (gimple_debug_bind_has_value_p (stmt))
3251 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3252 		   remap_gimple_op_r, &wi, NULL);
3253 
3254       /* Punt if any decl couldn't be remapped.  */
3255       if (processing_debug_stmt < 0)
3256 	gimple_debug_bind_reset_value (stmt);
3257     }
3258   else if (gimple_debug_source_bind_p (stmt))
3259     {
3260       gimple_debug_source_bind_set_var (stmt, t);
3261       /* When inlining and source bind refers to one of the optimized
3262 	 away parameters, change the source bind into normal debug bind
3263 	 referring to the corresponding DEBUG_EXPR_DECL that should have
3264 	 been bound before the call stmt.  */
3265       t = gimple_debug_source_bind_get_value (stmt);
3266       if (t != NULL_TREE
3267 	  && TREE_CODE (t) == PARM_DECL
3268 	  && id->call_stmt)
3269 	{
3270 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3271 	  unsigned int i;
3272 	  if (debug_args != NULL)
3273 	    {
3274 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3275 		if ((**debug_args)[i] == DECL_ORIGIN (t)
3276 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3277 		  {
3278 		    t = (**debug_args)[i + 1];
3279 		    stmt->subcode = GIMPLE_DEBUG_BIND;
3280 		    gimple_debug_bind_set_value (stmt, t);
3281 		    break;
3282 		  }
3283 	    }
3284 	}
3285       if (gimple_debug_source_bind_p (stmt))
3286 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3287 		   remap_gimple_op_r, &wi, NULL);
3288     }
3289 
3290   processing_debug_stmt = 0;
3291 
3292   update_stmt (stmt);
3293 }
3294 
3295 /* Process deferred debug stmts.  In order to give values better odds
3296    of being successfully remapped, we delay the processing of debug
3297    stmts until all other stmts that might require remapping are
3298    processed.  */
3299 
3300 static void
copy_debug_stmts(copy_body_data * id)3301 copy_debug_stmts (copy_body_data *id)
3302 {
3303   if (!id->debug_stmts.exists ())
3304     return;
3305 
3306   for (gdebug *stmt : id->debug_stmts)
3307     copy_debug_stmt (stmt, id);
3308 
3309   id->debug_stmts.release ();
3310 }
3311 
3312 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3313    another function.  */
3314 
3315 static tree
copy_tree_body(copy_body_data * id)3316 copy_tree_body (copy_body_data *id)
3317 {
3318   tree fndecl = id->src_fn;
3319   tree body = DECL_SAVED_TREE (fndecl);
3320 
3321   walk_tree (&body, copy_tree_body_r, id, NULL);
3322 
3323   return body;
3324 }
3325 
3326 /* Make a copy of the body of FN so that it can be inserted inline in
3327    another function.  */
3328 
3329 static tree
copy_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)3330 copy_body (copy_body_data *id,
3331 	   basic_block entry_block_map, basic_block exit_block_map,
3332 	   basic_block new_entry)
3333 {
3334   tree fndecl = id->src_fn;
3335   tree body;
3336 
3337   /* If this body has a CFG, walk CFG and copy.  */
3338   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3339   body = copy_cfg_body (id, entry_block_map, exit_block_map,
3340 			new_entry);
3341   copy_debug_stmts (id);
3342   if (id->killed_new_ssa_names)
3343     {
3344       ipa_release_ssas_in_hash (id->killed_new_ssa_names);
3345       delete id->killed_new_ssa_names;
3346       id->killed_new_ssa_names = NULL;
3347     }
3348 
3349   return body;
3350 }
3351 
3352 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3353    defined in function FN, or of a data member thereof.  */
3354 
3355 static bool
self_inlining_addr_expr(tree value,tree fn)3356 self_inlining_addr_expr (tree value, tree fn)
3357 {
3358   tree var;
3359 
3360   if (TREE_CODE (value) != ADDR_EXPR)
3361     return false;
3362 
3363   var = get_base_address (TREE_OPERAND (value, 0));
3364 
3365   return var && auto_var_in_fn_p (var, fn);
3366 }
3367 
3368 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3369    lexical block and line number information from base_stmt, if given,
3370    or from the last stmt of the block otherwise.  */
3371 
3372 static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)3373 insert_init_debug_bind (copy_body_data *id,
3374 			basic_block bb, tree var, tree value,
3375 			gimple *base_stmt)
3376 {
3377   gimple *note;
3378   gimple_stmt_iterator gsi;
3379   tree tracked_var;
3380 
3381   if (!gimple_in_ssa_p (id->src_cfun))
3382     return NULL;
3383 
3384   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3385     return NULL;
3386 
3387   tracked_var = target_for_debug_bind (var);
3388   if (!tracked_var)
3389     return NULL;
3390 
3391   if (bb)
3392     {
3393       gsi = gsi_last_bb (bb);
3394       if (!base_stmt && !gsi_end_p (gsi))
3395 	base_stmt = gsi_stmt (gsi);
3396     }
3397 
3398   note = gimple_build_debug_bind (tracked_var,
3399 				  value == error_mark_node
3400 				  ? NULL_TREE : unshare_expr (value),
3401 				  base_stmt);
3402 
3403   if (bb)
3404     {
3405       if (!gsi_end_p (gsi))
3406 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3407       else
3408 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3409     }
3410 
3411   return note;
3412 }
3413 
3414 static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)3415 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3416 {
3417   /* If VAR represents a zero-sized variable, it's possible that the
3418      assignment statement may result in no gimple statements.  */
3419   if (init_stmt)
3420     {
3421       gimple_stmt_iterator si = gsi_last_bb (bb);
3422 
3423       /* We can end up with init statements that store to a non-register
3424          from a rhs with a conversion.  Handle that here by forcing the
3425 	 rhs into a temporary.  gimple_regimplify_operands is not
3426 	 prepared to do this for us.  */
3427       if (!is_gimple_debug (init_stmt)
3428 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3429 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3430 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3431 	{
3432 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3433 			     TREE_TYPE (gimple_assign_lhs (init_stmt)),
3434 			     gimple_assign_rhs1 (init_stmt));
3435 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3436 					  GSI_NEW_STMT);
3437 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3438 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3439 	}
3440       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3441       if (!is_gimple_debug (init_stmt))
3442 	{
3443 	  gimple_regimplify_operands (init_stmt, &si);
3444 
3445 	  tree def = gimple_assign_lhs (init_stmt);
3446 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3447 	}
3448     }
3449 }
3450 
3451 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3452    if need be (which should only be necessary for invalid programs).  Attempt
3453    to convert VAL to TYPE and return the result if it is possible, just return
3454    a zero constant of the given type if it fails.  */
3455 
3456 tree
force_value_to_type(tree type,tree value)3457 force_value_to_type (tree type, tree value)
3458 {
3459   /* If we can match up types by promotion/demotion do so.  */
3460   if (fold_convertible_p (type, value))
3461     return fold_convert (type, value);
3462 
3463   /* ???  For valid programs we should not end up here.
3464      Still if we end up with truly mismatched types here, fall back
3465      to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3466      GIMPLE to the following passes.  */
3467   if (TREE_CODE (value) == WITH_SIZE_EXPR)
3468     return error_mark_node;
3469   else if (!is_gimple_reg_type (TREE_TYPE (value))
3470 	   || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3471     return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3472   else
3473     return build_zero_cst (type);
3474 }
3475 
3476 /* Initialize parameter P with VALUE.  If needed, produce init statement
3477    at the end of BB.  When BB is NULL, we return init statement to be
3478    output later.  */
3479 static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)3480 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3481 		     basic_block bb, tree *vars)
3482 {
3483   gimple *init_stmt = NULL;
3484   tree var;
3485   tree def = (gimple_in_ssa_p (cfun)
3486 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3487 
3488   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3489      here since the type of this decl must be visible to the calling
3490      function.  */
3491   var = copy_decl_to_var (p, id);
3492 
3493   /* Declare this new variable.  */
3494   DECL_CHAIN (var) = *vars;
3495   *vars = var;
3496 
3497   /* Make gimplifier happy about this variable.  */
3498   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3499 
3500   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3501      we would not need to create a new variable here at all, if it
3502      weren't for debug info.  Still, we can just use the argument
3503      value.  */
3504   if (TREE_READONLY (p)
3505       && !TREE_ADDRESSABLE (p)
3506       && value
3507       && !TREE_SIDE_EFFECTS (value)
3508       && !def)
3509     {
3510       /* We may produce non-gimple trees by adding NOPs or introduce invalid
3511 	 sharing when the value is not constant or DECL.  And we need to make
3512 	 sure that it cannot be modified from another path in the callee.  */
3513       if (((is_gimple_min_invariant (value)
3514 	    /* When the parameter is used in a context that forces it to
3515 	       not be a GIMPLE register avoid substituting something that
3516 	       is not a decl there.  */
3517 	    && ! DECL_NOT_GIMPLE_REG_P (p))
3518 	   || (DECL_P (value) && TREE_READONLY (value))
3519 	   || (auto_var_in_fn_p (value, id->dst_fn)
3520 	       && !TREE_ADDRESSABLE (value)))
3521 	  && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3522 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3523 	     the base variable isn't a local variable of the inlined
3524 	     function, e.g., when doing recursive inlining, direct or
3525 	     mutually-recursive or whatever, which is why we don't
3526 	     just test whether fn == current_function_decl.  */
3527 	  && ! self_inlining_addr_expr (value, fn))
3528 	{
3529 	  insert_decl_map (id, p, value);
3530 	  if (!id->debug_map)
3531 	    id->debug_map = new hash_map<tree, tree>;
3532 	  id->debug_map->put (p, var);
3533 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3534 	}
3535     }
3536 
3537   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3538      that way, when the PARM_DECL is encountered, it will be
3539      automatically replaced by the VAR_DECL.  */
3540   insert_decl_map (id, p, var);
3541 
3542   /* Even if P was TREE_READONLY, the new VAR should not be.  In the original
3543      code, we would have constructed a temporary, and then the function body
3544      would have never changed the value of P.  However, now, we will be
3545      constructing VAR directly.  Therefore, it must not be TREE_READONLY.  */
3546   TREE_READONLY (var) = 0;
3547 
3548   tree rhs = value;
3549   if (value
3550       && value != error_mark_node
3551       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3552     rhs = force_value_to_type (TREE_TYPE (p), value);
3553 
3554   /* If there is no setup required and we are in SSA, take the easy route
3555      replacing all SSA names representing the function parameter by the
3556      SSA name passed to function.
3557 
3558      We need to construct map for the variable anyway as it might be used
3559      in different SSA names when parameter is set in function.
3560 
3561      Do replacement at -O0 for const arguments replaced by constant.
3562      This is important for builtin_constant_p and other construct requiring
3563      constant argument to be visible in inlined function body.  */
3564   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3565       && (optimize
3566           || (TREE_READONLY (p)
3567 	      && is_gimple_min_invariant (rhs)))
3568       && (TREE_CODE (rhs) == SSA_NAME
3569 	  || is_gimple_min_invariant (rhs))
3570       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3571     {
3572       insert_decl_map (id, def, rhs);
3573       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3574     }
3575 
3576   /* If the value of argument is never used, don't care about initializing
3577      it.  */
3578   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3579     {
3580       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3581       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3582     }
3583 
3584   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3585      the argument to the proper type in case it was promoted.  */
3586   if (value)
3587     {
3588       if (rhs == error_mark_node)
3589 	{
3590 	  insert_decl_map (id, p, var);
3591 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3592 	}
3593 
3594       STRIP_USELESS_TYPE_CONVERSION (rhs);
3595 
3596       /* If we are in SSA form properly remap the default definition
3597          or assign to a dummy SSA name if the parameter is unused and
3598 	 we are not optimizing.  */
3599       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3600 	{
3601 	  if (def)
3602 	    {
3603 	      def = remap_ssa_name (def, id);
3604 	      init_stmt = gimple_build_assign (def, rhs);
3605 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3606 	      set_ssa_default_def (cfun, var, NULL);
3607 	    }
3608 	  else if (!optimize)
3609 	    {
3610 	      def = make_ssa_name (var);
3611 	      init_stmt = gimple_build_assign (def, rhs);
3612 	    }
3613 	}
3614       else if (!is_empty_type (TREE_TYPE (var)))
3615         init_stmt = gimple_build_assign (var, rhs);
3616 
3617       if (bb && init_stmt)
3618         insert_init_stmt (id, bb, init_stmt);
3619     }
3620   return init_stmt;
3621 }
3622 
3623 /* Generate code to initialize the parameters of the function at the
3624    top of the stack in ID from the GIMPLE_CALL STMT.  */
3625 
3626 static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)3627 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3628 			       tree fn, basic_block bb)
3629 {
3630   tree parms;
3631   size_t i;
3632   tree p;
3633   tree vars = NULL_TREE;
3634   tree static_chain = gimple_call_chain (stmt);
3635 
3636   /* Figure out what the parameters are.  */
3637   parms = DECL_ARGUMENTS (fn);
3638 
3639   /* Loop through the parameter declarations, replacing each with an
3640      equivalent VAR_DECL, appropriately initialized.  */
3641   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3642     {
3643       tree val;
3644       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3645       setup_one_parameter (id, p, val, fn, bb, &vars);
3646     }
3647   /* After remapping parameters remap their types.  This has to be done
3648      in a second loop over all parameters to appropriately remap
3649      variable sized arrays when the size is specified in a
3650      parameter following the array.  */
3651   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3652     {
3653       tree *varp = id->decl_map->get (p);
3654       if (varp && VAR_P (*varp))
3655 	{
3656 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3657 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3658 	  tree var = *varp;
3659 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3660 	  /* Also remap the default definition if it was remapped
3661 	     to the default definition of the parameter replacement
3662 	     by the parameter setup.  */
3663 	  if (def)
3664 	    {
3665 	      tree *defp = id->decl_map->get (def);
3666 	      if (defp
3667 		  && TREE_CODE (*defp) == SSA_NAME
3668 		  && SSA_NAME_VAR (*defp) == var)
3669 		TREE_TYPE (*defp) = TREE_TYPE (var);
3670 	    }
3671 	}
3672     }
3673 
3674   /* Initialize the static chain.  */
3675   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3676   gcc_assert (fn != current_function_decl);
3677   if (p)
3678     {
3679       /* No static chain?  Seems like a bug in tree-nested.cc.  */
3680       gcc_assert (static_chain);
3681 
3682       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3683     }
3684 
3685   declare_inline_vars (id->block, vars);
3686 }
3687 
3688 
3689 /* Declare a return variable to replace the RESULT_DECL for the
3690    function we are calling.  An appropriate DECL_STMT is returned.
3691    The USE_STMT is filled to contain a use of the declaration to
3692    indicate the return value of the function.
3693 
3694    RETURN_SLOT, if non-null is place where to store the result.  It
3695    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3696    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3697 
3698    The return value is a (possibly null) value that holds the result
3699    as seen by the caller.  */
3700 
3701 static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,basic_block entry_bb)3702 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3703 			 basic_block entry_bb)
3704 {
3705   tree callee = id->src_fn;
3706   tree result = DECL_RESULT (callee);
3707   tree callee_type = TREE_TYPE (result);
3708   tree caller_type;
3709   tree var, use;
3710 
3711   /* Handle type-mismatches in the function declaration return type
3712      vs. the call expression.  */
3713   if (modify_dest)
3714     caller_type = TREE_TYPE (modify_dest);
3715   else if (return_slot)
3716     caller_type = TREE_TYPE (return_slot);
3717   else /* No LHS on the call.  */
3718     caller_type = TREE_TYPE (TREE_TYPE (callee));
3719 
3720   /* We don't need to do anything for functions that don't return anything.  */
3721   if (VOID_TYPE_P (callee_type))
3722     return NULL_TREE;
3723 
3724   /* If there was a return slot, then the return value is the
3725      dereferenced address of that object.  */
3726   if (return_slot)
3727     {
3728       /* The front end shouldn't have used both return_slot and
3729 	 a modify expression.  */
3730       gcc_assert (!modify_dest);
3731       if (DECL_BY_REFERENCE (result))
3732 	{
3733 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3734 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3735 
3736 	  /* We are going to construct *&return_slot and we can't do that
3737 	     for variables believed to be not addressable.
3738 
3739 	     FIXME: This check possibly can match, because values returned
3740 	     via return slot optimization are not believed to have address
3741 	     taken by alias analysis.  */
3742 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3743 	  var = return_slot_addr;
3744 	  mark_addressable (return_slot);
3745 	}
3746       else
3747 	{
3748 	  var = return_slot;
3749 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3750 	  if (TREE_ADDRESSABLE (result))
3751 	    mark_addressable (var);
3752 	}
3753       if (DECL_NOT_GIMPLE_REG_P (result)
3754 	  && DECL_P (var))
3755 	DECL_NOT_GIMPLE_REG_P (var) = 1;
3756 
3757       if (!useless_type_conversion_p (callee_type, caller_type))
3758 	var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3759 
3760       use = NULL;
3761       goto done;
3762     }
3763 
3764   /* All types requiring non-trivial constructors should have been handled.  */
3765   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3766 
3767   /* Attempt to avoid creating a new temporary variable.  */
3768   if (modify_dest
3769       && TREE_CODE (modify_dest) != SSA_NAME)
3770     {
3771       bool use_it = false;
3772 
3773       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3774       if (!useless_type_conversion_p (callee_type, caller_type))
3775 	use_it = false;
3776 
3777       /* ??? If we're assigning to a variable sized type, then we must
3778 	 reuse the destination variable, because we've no good way to
3779 	 create variable sized temporaries at this point.  */
3780       else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3781 	use_it = true;
3782 
3783       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3784 	 reuse it as the result of the call directly.  Don't do this if
3785 	 it would promote MODIFY_DEST to addressable.  */
3786       else if (TREE_ADDRESSABLE (result))
3787 	use_it = false;
3788       else
3789 	{
3790 	  tree base_m = get_base_address (modify_dest);
3791 
3792 	  /* If the base isn't a decl, then it's a pointer, and we don't
3793 	     know where that's going to go.  */
3794 	  if (!DECL_P (base_m))
3795 	    use_it = false;
3796 	  else if (is_global_var (base_m))
3797 	    use_it = false;
3798 	  else if (DECL_NOT_GIMPLE_REG_P (result)
3799 		   && !DECL_NOT_GIMPLE_REG_P (base_m))
3800 	    use_it = false;
3801 	  else if (!TREE_ADDRESSABLE (base_m))
3802 	    use_it = true;
3803 	}
3804 
3805       if (use_it)
3806 	{
3807 	  var = modify_dest;
3808 	  use = NULL;
3809 	  goto done;
3810 	}
3811     }
3812 
3813   gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3814 
3815   var = copy_result_decl_to_var (result, id);
3816   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3817 
3818   /* Do not have the rest of GCC warn about this variable as it should
3819      not be visible to the user.  */
3820   suppress_warning (var /* OPT_Wuninitialized? */);
3821 
3822   declare_inline_vars (id->block, var);
3823 
3824   /* Build the use expr.  If the return type of the function was
3825      promoted, convert it back to the expected type.  */
3826   use = var;
3827   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3828     {
3829       /* If we can match up types by promotion/demotion do so.  */
3830       if (fold_convertible_p (caller_type, var))
3831 	use = fold_convert (caller_type, var);
3832       else
3833 	{
3834 	  /* ???  For valid programs we should not end up here.
3835 	     Still if we end up with truly mismatched types here, fall back
3836 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3837 	     passes.  */
3838 	  /* Prevent var from being written into SSA form.  */
3839 	  if (is_gimple_reg_type (TREE_TYPE (var)))
3840 	    DECL_NOT_GIMPLE_REG_P (var) = true;
3841 	  use = fold_build2 (MEM_REF, caller_type,
3842 			     build_fold_addr_expr (var),
3843 			     build_int_cst (ptr_type_node, 0));
3844 	}
3845     }
3846 
3847   STRIP_USELESS_TYPE_CONVERSION (use);
3848 
3849   if (DECL_BY_REFERENCE (result))
3850     {
3851       TREE_ADDRESSABLE (var) = 1;
3852       var = build_fold_addr_expr (var);
3853     }
3854 
3855  done:
3856   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3857      way, when the RESULT_DECL is encountered, it will be
3858      automatically replaced by the VAR_DECL.
3859 
3860      When returning by reference, ensure that RESULT_DECL remaps to
3861      gimple_val.  */
3862   if (DECL_BY_REFERENCE (result)
3863       && !is_gimple_val (var))
3864     {
3865       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3866       insert_decl_map (id, result, temp);
3867       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3868 	 it's default_def SSA_NAME.  */
3869       if (gimple_in_ssa_p (id->src_cfun)
3870 	  && is_gimple_reg (result))
3871 	{
3872 	  temp = make_ssa_name (temp);
3873 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3874 	}
3875       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3876     }
3877   else
3878     insert_decl_map (id, result, var);
3879 
3880   /* Remember this so we can ignore it in remap_decls.  */
3881   id->retvar = var;
3882   return use;
3883 }
3884 
3885 /* Determine if the function can be copied.  If so return NULL.  If
3886    not return a string describng the reason for failure.  */
3887 
3888 const char *
copy_forbidden(struct function * fun)3889 copy_forbidden (struct function *fun)
3890 {
3891   const char *reason = fun->cannot_be_copied_reason;
3892 
3893   /* Only examine the function once.  */
3894   if (fun->cannot_be_copied_set)
3895     return reason;
3896 
3897   /* We cannot copy a function that receives a non-local goto
3898      because we cannot remap the destination label used in the
3899      function that is performing the non-local goto.  */
3900   /* ??? Actually, this should be possible, if we work at it.
3901      No doubt there's just a handful of places that simply
3902      assume it doesn't happen and don't substitute properly.  */
3903   if (fun->has_nonlocal_label)
3904     {
3905       reason = G_("function %q+F can never be copied "
3906 		  "because it receives a non-local goto");
3907       goto fail;
3908     }
3909 
3910   if (fun->has_forced_label_in_static)
3911     {
3912       reason = G_("function %q+F can never be copied because it saves "
3913 		  "address of local label in a static variable");
3914       goto fail;
3915     }
3916 
3917  fail:
3918   fun->cannot_be_copied_reason = reason;
3919   fun->cannot_be_copied_set = true;
3920   return reason;
3921 }
3922 
3923 
3924 static const char *inline_forbidden_reason;
3925 
3926 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3927    iff a function cannot be inlined.  Also sets the reason why. */
3928 
3929 static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)3930 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3931 			 struct walk_stmt_info *wip)
3932 {
3933   tree fn = (tree) wip->info;
3934   tree t;
3935   gimple *stmt = gsi_stmt (*gsi);
3936 
3937   switch (gimple_code (stmt))
3938     {
3939     case GIMPLE_CALL:
3940       /* Refuse to inline alloca call unless user explicitly forced so as
3941 	 this may change program's memory overhead drastically when the
3942 	 function using alloca is called in loop.  In GCC present in
3943 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3944 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3945 	 VLA objects as those can't cause unbounded growth (they're always
3946 	 wrapped inside stack_save/stack_restore regions.  */
3947       if (gimple_maybe_alloca_call_p (stmt)
3948 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3949 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3950 	{
3951 	  inline_forbidden_reason
3952 	    = G_("function %q+F can never be inlined because it uses "
3953 		 "alloca (override using the always_inline attribute)");
3954 	  *handled_ops_p = true;
3955 	  return fn;
3956 	}
3957 
3958       t = gimple_call_fndecl (stmt);
3959       if (t == NULL_TREE)
3960 	break;
3961 
3962       /* We cannot inline functions that call setjmp.  */
3963       if (setjmp_call_p (t))
3964 	{
3965 	  inline_forbidden_reason
3966 	    = G_("function %q+F can never be inlined because it uses setjmp");
3967 	  *handled_ops_p = true;
3968 	  return t;
3969 	}
3970 
3971       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3972 	switch (DECL_FUNCTION_CODE (t))
3973 	  {
3974 	    /* We cannot inline functions that take a variable number of
3975 	       arguments.  */
3976 	  case BUILT_IN_VA_START:
3977 	  case BUILT_IN_NEXT_ARG:
3978 	  case BUILT_IN_VA_END:
3979 	    inline_forbidden_reason
3980 	      = G_("function %q+F can never be inlined because it "
3981 		   "uses variable argument lists");
3982 	    *handled_ops_p = true;
3983 	    return t;
3984 
3985 	  case BUILT_IN_LONGJMP:
3986 	    /* We can't inline functions that call __builtin_longjmp at
3987 	       all.  The non-local goto machinery really requires the
3988 	       destination be in a different function.  If we allow the
3989 	       function calling __builtin_longjmp to be inlined into the
3990 	       function calling __builtin_setjmp, Things will Go Awry.  */
3991 	    inline_forbidden_reason
3992 	      = G_("function %q+F can never be inlined because "
3993 		   "it uses setjmp-longjmp exception handling");
3994 	    *handled_ops_p = true;
3995 	    return t;
3996 
3997 	  case BUILT_IN_NONLOCAL_GOTO:
3998 	    /* Similarly.  */
3999 	    inline_forbidden_reason
4000 	      = G_("function %q+F can never be inlined because "
4001 		   "it uses non-local goto");
4002 	    *handled_ops_p = true;
4003 	    return t;
4004 
4005 	  case BUILT_IN_RETURN:
4006 	  case BUILT_IN_APPLY_ARGS:
4007 	    /* If a __builtin_apply_args caller would be inlined,
4008 	       it would be saving arguments of the function it has
4009 	       been inlined into.  Similarly __builtin_return would
4010 	       return from the function the inline has been inlined into.  */
4011 	    inline_forbidden_reason
4012 	      = G_("function %q+F can never be inlined because "
4013 		   "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
4014 	    *handled_ops_p = true;
4015 	    return t;
4016 
4017 	  default:
4018 	    break;
4019 	  }
4020       break;
4021 
4022     case GIMPLE_GOTO:
4023       t = gimple_goto_dest (stmt);
4024 
4025       /* We will not inline a function which uses computed goto.  The
4026 	 addresses of its local labels, which may be tucked into
4027 	 global storage, are of course not constant across
4028 	 instantiations, which causes unexpected behavior.  */
4029       if (TREE_CODE (t) != LABEL_DECL)
4030 	{
4031 	  inline_forbidden_reason
4032 	    = G_("function %q+F can never be inlined "
4033 		 "because it contains a computed goto");
4034 	  *handled_ops_p = true;
4035 	  return t;
4036 	}
4037       break;
4038 
4039     default:
4040       break;
4041     }
4042 
4043   *handled_ops_p = false;
4044   return NULL_TREE;
4045 }
4046 
4047 /* Return true if FNDECL is a function that cannot be inlined into
4048    another one.  */
4049 
4050 static bool
inline_forbidden_p(tree fndecl)4051 inline_forbidden_p (tree fndecl)
4052 {
4053   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4054   struct walk_stmt_info wi;
4055   basic_block bb;
4056   bool forbidden_p = false;
4057 
4058   /* First check for shared reasons not to copy the code.  */
4059   inline_forbidden_reason = copy_forbidden (fun);
4060   if (inline_forbidden_reason != NULL)
4061     return true;
4062 
4063   /* Next, walk the statements of the function looking for
4064      constraucts we can't handle, or are non-optimal for inlining.  */
4065   hash_set<tree> visited_nodes;
4066   memset (&wi, 0, sizeof (wi));
4067   wi.info = (void *) fndecl;
4068   wi.pset = &visited_nodes;
4069 
4070   /* We cannot inline a function with a variable-sized parameter because we
4071      cannot materialize a temporary of such a type in the caller if need be.
4072      Note that the return case is not symmetrical because we can guarantee
4073      that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT.  */
4074   for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4075     if (!poly_int_tree_p (DECL_SIZE (parm)))
4076       {
4077 	inline_forbidden_reason
4078 	  = G_("function %q+F can never be inlined because "
4079 	       "it has a VLA argument");
4080 	return true;
4081       }
4082 
4083   FOR_EACH_BB_FN (bb, fun)
4084     {
4085       gimple *ret;
4086       gimple_seq seq = bb_seq (bb);
4087       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4088       forbidden_p = (ret != NULL);
4089       if (forbidden_p)
4090 	break;
4091     }
4092 
4093   return forbidden_p;
4094 }
4095 
4096 /* Return false if the function FNDECL cannot be inlined on account of its
4097    attributes, true otherwise.  */
4098 static bool
function_attribute_inlinable_p(const_tree fndecl)4099 function_attribute_inlinable_p (const_tree fndecl)
4100 {
4101   if (targetm.attribute_table)
4102     {
4103       const_tree a;
4104 
4105       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4106 	{
4107 	  const_tree name = get_attribute_name (a);
4108 	  int i;
4109 
4110 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4111 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
4112 	      return targetm.function_attribute_inlinable_p (fndecl);
4113 	}
4114     }
4115 
4116   return true;
4117 }
4118 
4119 /* Returns nonzero if FN is a function that does not have any
4120    fundamental inline blocking properties.  */
4121 
4122 bool
tree_inlinable_function_p(tree fn)4123 tree_inlinable_function_p (tree fn)
4124 {
4125   bool inlinable = true;
4126   bool do_warning;
4127   tree always_inline;
4128 
4129   /* If we've already decided this function shouldn't be inlined,
4130      there's no need to check again.  */
4131   if (DECL_UNINLINABLE (fn))
4132     return false;
4133 
4134   /* We only warn for functions declared `inline' by the user.  */
4135   do_warning = (opt_for_fn (fn, warn_inline)
4136 		&& DECL_DECLARED_INLINE_P (fn)
4137 		&& !DECL_NO_INLINE_WARNING_P (fn)
4138 		&& !DECL_IN_SYSTEM_HEADER (fn));
4139 
4140   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4141 
4142   if (flag_no_inline
4143       && always_inline == NULL)
4144     {
4145       if (do_warning)
4146 	warning (OPT_Winline, "function %q+F can never be inlined because it "
4147 		 "is suppressed using %<-fno-inline%>", fn);
4148       inlinable = false;
4149     }
4150 
4151   else if (!function_attribute_inlinable_p (fn))
4152     {
4153       if (do_warning)
4154         warning (OPT_Winline, "function %q+F can never be inlined because it "
4155                  "uses attributes conflicting with inlining", fn);
4156       inlinable = false;
4157     }
4158 
4159   else if (inline_forbidden_p (fn))
4160     {
4161       /* See if we should warn about uninlinable functions.  Previously,
4162 	 some of these warnings would be issued while trying to expand
4163 	 the function inline, but that would cause multiple warnings
4164 	 about functions that would for example call alloca.  But since
4165 	 this a property of the function, just one warning is enough.
4166 	 As a bonus we can now give more details about the reason why a
4167 	 function is not inlinable.  */
4168       if (always_inline)
4169 	error (inline_forbidden_reason, fn);
4170       else if (do_warning)
4171 	warning (OPT_Winline, inline_forbidden_reason, fn);
4172 
4173       inlinable = false;
4174     }
4175 
4176   /* Squirrel away the result so that we don't have to check again.  */
4177   DECL_UNINLINABLE (fn) = !inlinable;
4178 
4179   return inlinable;
4180 }
4181 
4182 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
4183    word size and take possible memcpy call into account and return
4184    cost based on whether optimizing for size or speed according to SPEED_P.  */
4185 
4186 int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))4187 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4188 {
4189   HOST_WIDE_INT size;
4190 
4191   gcc_assert (!VOID_TYPE_P (type));
4192 
4193   if (TREE_CODE (type) == VECTOR_TYPE)
4194     {
4195       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4196       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4197       int orig_mode_size
4198 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4199       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4200       return ((orig_mode_size + simd_mode_size - 1)
4201 	      / simd_mode_size);
4202     }
4203 
4204   size = int_size_in_bytes (type);
4205 
4206   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4207     /* Cost of a memcpy call, 3 arguments and the call.  */
4208     return 4;
4209   else
4210     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4211 }
4212 
4213 /* Returns cost of operation CODE, according to WEIGHTS  */
4214 
4215 static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)4216 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4217 			tree op1 ATTRIBUTE_UNUSED, tree op2)
4218 {
4219   switch (code)
4220     {
4221     /* These are "free" conversions, or their presumed cost
4222        is folded into other operations.  */
4223     case RANGE_EXPR:
4224     CASE_CONVERT:
4225     case COMPLEX_EXPR:
4226     case PAREN_EXPR:
4227     case VIEW_CONVERT_EXPR:
4228       return 0;
4229 
4230     /* Assign cost of 1 to usual operations.
4231        ??? We may consider mapping RTL costs to this.  */
4232     case COND_EXPR:
4233     case VEC_COND_EXPR:
4234     case VEC_PERM_EXPR:
4235 
4236     case PLUS_EXPR:
4237     case POINTER_PLUS_EXPR:
4238     case POINTER_DIFF_EXPR:
4239     case MINUS_EXPR:
4240     case MULT_EXPR:
4241     case MULT_HIGHPART_EXPR:
4242 
4243     case ADDR_SPACE_CONVERT_EXPR:
4244     case FIXED_CONVERT_EXPR:
4245     case FIX_TRUNC_EXPR:
4246 
4247     case NEGATE_EXPR:
4248     case FLOAT_EXPR:
4249     case MIN_EXPR:
4250     case MAX_EXPR:
4251     case ABS_EXPR:
4252     case ABSU_EXPR:
4253 
4254     case LSHIFT_EXPR:
4255     case RSHIFT_EXPR:
4256     case LROTATE_EXPR:
4257     case RROTATE_EXPR:
4258 
4259     case BIT_IOR_EXPR:
4260     case BIT_XOR_EXPR:
4261     case BIT_AND_EXPR:
4262     case BIT_NOT_EXPR:
4263 
4264     case TRUTH_ANDIF_EXPR:
4265     case TRUTH_ORIF_EXPR:
4266     case TRUTH_AND_EXPR:
4267     case TRUTH_OR_EXPR:
4268     case TRUTH_XOR_EXPR:
4269     case TRUTH_NOT_EXPR:
4270 
4271     case LT_EXPR:
4272     case LE_EXPR:
4273     case GT_EXPR:
4274     case GE_EXPR:
4275     case EQ_EXPR:
4276     case NE_EXPR:
4277     case ORDERED_EXPR:
4278     case UNORDERED_EXPR:
4279 
4280     case UNLT_EXPR:
4281     case UNLE_EXPR:
4282     case UNGT_EXPR:
4283     case UNGE_EXPR:
4284     case UNEQ_EXPR:
4285     case LTGT_EXPR:
4286 
4287     case CONJ_EXPR:
4288 
4289     case PREDECREMENT_EXPR:
4290     case PREINCREMENT_EXPR:
4291     case POSTDECREMENT_EXPR:
4292     case POSTINCREMENT_EXPR:
4293 
4294     case REALIGN_LOAD_EXPR:
4295 
4296     case WIDEN_PLUS_EXPR:
4297     case WIDEN_MINUS_EXPR:
4298     case WIDEN_SUM_EXPR:
4299     case WIDEN_MULT_EXPR:
4300     case DOT_PROD_EXPR:
4301     case SAD_EXPR:
4302     case WIDEN_MULT_PLUS_EXPR:
4303     case WIDEN_MULT_MINUS_EXPR:
4304     case WIDEN_LSHIFT_EXPR:
4305 
4306     case VEC_WIDEN_PLUS_HI_EXPR:
4307     case VEC_WIDEN_PLUS_LO_EXPR:
4308     case VEC_WIDEN_MINUS_HI_EXPR:
4309     case VEC_WIDEN_MINUS_LO_EXPR:
4310     case VEC_WIDEN_MULT_HI_EXPR:
4311     case VEC_WIDEN_MULT_LO_EXPR:
4312     case VEC_WIDEN_MULT_EVEN_EXPR:
4313     case VEC_WIDEN_MULT_ODD_EXPR:
4314     case VEC_UNPACK_HI_EXPR:
4315     case VEC_UNPACK_LO_EXPR:
4316     case VEC_UNPACK_FLOAT_HI_EXPR:
4317     case VEC_UNPACK_FLOAT_LO_EXPR:
4318     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4319     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4320     case VEC_PACK_TRUNC_EXPR:
4321     case VEC_PACK_SAT_EXPR:
4322     case VEC_PACK_FIX_TRUNC_EXPR:
4323     case VEC_PACK_FLOAT_EXPR:
4324     case VEC_WIDEN_LSHIFT_HI_EXPR:
4325     case VEC_WIDEN_LSHIFT_LO_EXPR:
4326     case VEC_DUPLICATE_EXPR:
4327     case VEC_SERIES_EXPR:
4328 
4329       return 1;
4330 
4331     /* Few special cases of expensive operations.  This is useful
4332        to avoid inlining on functions having too many of these.  */
4333     case TRUNC_DIV_EXPR:
4334     case CEIL_DIV_EXPR:
4335     case FLOOR_DIV_EXPR:
4336     case ROUND_DIV_EXPR:
4337     case EXACT_DIV_EXPR:
4338     case TRUNC_MOD_EXPR:
4339     case CEIL_MOD_EXPR:
4340     case FLOOR_MOD_EXPR:
4341     case ROUND_MOD_EXPR:
4342     case RDIV_EXPR:
4343       if (TREE_CODE (op2) != INTEGER_CST)
4344         return weights->div_mod_cost;
4345       return 1;
4346 
4347     /* Bit-field insertion needs several shift and mask operations.  */
4348     case BIT_INSERT_EXPR:
4349       return 3;
4350 
4351     default:
4352       /* We expect a copy assignment with no operator.  */
4353       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4354       return 0;
4355     }
4356 }
4357 
4358 
4359 /* Estimate number of instructions that will be created by expanding
4360    the statements in the statement sequence STMTS.
4361    WEIGHTS contains weights attributed to various constructs.  */
4362 
4363 int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)4364 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4365 {
4366   int cost;
4367   gimple_stmt_iterator gsi;
4368 
4369   cost = 0;
4370   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4371     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4372 
4373   return cost;
4374 }
4375 
4376 
4377 /* Estimate number of instructions that will be created by expanding STMT.
4378    WEIGHTS contains weights attributed to various constructs.  */
4379 
4380 int
estimate_num_insns(gimple * stmt,eni_weights * weights)4381 estimate_num_insns (gimple *stmt, eni_weights *weights)
4382 {
4383   unsigned cost, i;
4384   enum gimple_code code = gimple_code (stmt);
4385   tree lhs;
4386   tree rhs;
4387 
4388   switch (code)
4389     {
4390     case GIMPLE_ASSIGN:
4391       /* Try to estimate the cost of assignments.  We have three cases to
4392 	 deal with:
4393 	 1) Simple assignments to registers;
4394 	 2) Stores to things that must live in memory.  This includes
4395 	    "normal" stores to scalars, but also assignments of large
4396 	    structures, or constructors of big arrays;
4397 
4398 	 Let us look at the first two cases, assuming we have "a = b + C":
4399 	 <GIMPLE_ASSIGN <var_decl "a">
4400 	        <plus_expr <var_decl "b"> <constant C>>
4401 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4402 	 any target, because "a" usually ends up in a real register.  Hence
4403 	 the only cost of this expression comes from the PLUS_EXPR, and we
4404 	 can ignore the GIMPLE_ASSIGN.
4405 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4406 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4407 	 of moving something into "a", which we compute using the function
4408 	 estimate_move_cost.  */
4409       if (gimple_clobber_p (stmt))
4410 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4411 
4412       lhs = gimple_assign_lhs (stmt);
4413       rhs = gimple_assign_rhs1 (stmt);
4414 
4415       cost = 0;
4416 
4417       /* Account for the cost of moving to / from memory.  */
4418       if (gimple_store_p (stmt))
4419 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4420       if (gimple_assign_load_p (stmt))
4421 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4422 
4423       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4424       				      gimple_assign_rhs1 (stmt),
4425 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4426 				      == GIMPLE_BINARY_RHS
4427 				      ? gimple_assign_rhs2 (stmt) : NULL);
4428       break;
4429 
4430     case GIMPLE_COND:
4431       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4432       				         gimple_op (stmt, 0),
4433 				         gimple_op (stmt, 1));
4434       break;
4435 
4436     case GIMPLE_SWITCH:
4437       {
4438 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4439 	/* Take into account cost of the switch + guess 2 conditional jumps for
4440 	   each case label.
4441 
4442 	   TODO: once the switch expansion logic is sufficiently separated, we can
4443 	   do better job on estimating cost of the switch.  */
4444 	if (weights->time_based)
4445 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4446 	else
4447 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4448       }
4449       break;
4450 
4451     case GIMPLE_CALL:
4452       {
4453 	tree decl;
4454 
4455 	if (gimple_call_internal_p (stmt))
4456 	  return 0;
4457 	else if ((decl = gimple_call_fndecl (stmt))
4458 		 && fndecl_built_in_p (decl))
4459 	  {
4460 	    /* Do not special case builtins where we see the body.
4461 	       This just confuse inliner.  */
4462 	    struct cgraph_node *node;
4463 	    if ((node = cgraph_node::get (decl))
4464 		&& node->definition)
4465 	      ;
4466 	    /* For buitins that are likely expanded to nothing or
4467 	       inlined do not account operand costs.  */
4468 	    else if (is_simple_builtin (decl))
4469 	      return 0;
4470 	    else if (is_inexpensive_builtin (decl))
4471 	      return weights->target_builtin_call_cost;
4472 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4473 	      {
4474 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4475 		   specialize the cheap expansion we do here.
4476 		   ???  This asks for a more general solution.  */
4477 		switch (DECL_FUNCTION_CODE (decl))
4478 		  {
4479 		    case BUILT_IN_POW:
4480 		    case BUILT_IN_POWF:
4481 		    case BUILT_IN_POWL:
4482 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4483 			  && (real_equal
4484 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4485 			       &dconst2)))
4486 			return estimate_operator_cost
4487 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4488 			     gimple_call_arg (stmt, 0));
4489 		      break;
4490 
4491 		    default:
4492 		      break;
4493 		  }
4494 	      }
4495 	  }
4496 
4497 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4498 	if (gimple_call_lhs (stmt))
4499 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4500 				      weights->time_based);
4501 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4502 	  {
4503 	    tree arg = gimple_call_arg (stmt, i);
4504 	    cost += estimate_move_cost (TREE_TYPE (arg),
4505 					weights->time_based);
4506 	  }
4507 	break;
4508       }
4509 
4510     case GIMPLE_RETURN:
4511       return weights->return_cost;
4512 
4513     case GIMPLE_GOTO:
4514     case GIMPLE_LABEL:
4515     case GIMPLE_NOP:
4516     case GIMPLE_PHI:
4517     case GIMPLE_PREDICT:
4518     case GIMPLE_DEBUG:
4519       return 0;
4520 
4521     case GIMPLE_ASM:
4522       {
4523 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4524 	/* 1000 means infinity. This avoids overflows later
4525 	   with very long asm statements.  */
4526 	if (count > 1000)
4527 	  count = 1000;
4528 	/* If this asm is asm inline, count anything as minimum size.  */
4529 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4530 	  count = MIN (1, count);
4531 	return MAX (1, count);
4532       }
4533 
4534     case GIMPLE_RESX:
4535       /* This is either going to be an external function call with one
4536 	 argument, or two register copy statements plus a goto.  */
4537       return 2;
4538 
4539     case GIMPLE_EH_DISPATCH:
4540       /* ??? This is going to turn into a switch statement.  Ideally
4541 	 we'd have a look at the eh region and estimate the number of
4542 	 edges involved.  */
4543       return 10;
4544 
4545     case GIMPLE_BIND:
4546       return estimate_num_insns_seq (
4547 	       gimple_bind_body (as_a <gbind *> (stmt)),
4548 	       weights);
4549 
4550     case GIMPLE_EH_FILTER:
4551       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4552 
4553     case GIMPLE_CATCH:
4554       return estimate_num_insns_seq (gimple_catch_handler (
4555 				       as_a <gcatch *> (stmt)),
4556 				     weights);
4557 
4558     case GIMPLE_TRY:
4559       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4560               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4561 
4562     /* OMP directives are generally very expensive.  */
4563 
4564     case GIMPLE_OMP_RETURN:
4565     case GIMPLE_OMP_SECTIONS_SWITCH:
4566     case GIMPLE_OMP_ATOMIC_STORE:
4567     case GIMPLE_OMP_CONTINUE:
4568       /* ...except these, which are cheap.  */
4569       return 0;
4570 
4571     case GIMPLE_OMP_ATOMIC_LOAD:
4572       return weights->omp_cost;
4573 
4574     case GIMPLE_OMP_FOR:
4575       return (weights->omp_cost
4576               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4577               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4578 
4579     case GIMPLE_OMP_PARALLEL:
4580     case GIMPLE_OMP_TASK:
4581     case GIMPLE_OMP_CRITICAL:
4582     case GIMPLE_OMP_MASTER:
4583     case GIMPLE_OMP_MASKED:
4584     case GIMPLE_OMP_SCOPE:
4585     case GIMPLE_OMP_TASKGROUP:
4586     case GIMPLE_OMP_ORDERED:
4587     case GIMPLE_OMP_SCAN:
4588     case GIMPLE_OMP_SECTION:
4589     case GIMPLE_OMP_SECTIONS:
4590     case GIMPLE_OMP_SINGLE:
4591     case GIMPLE_OMP_TARGET:
4592     case GIMPLE_OMP_TEAMS:
4593       return (weights->omp_cost
4594               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4595 
4596     case GIMPLE_TRANSACTION:
4597       return (weights->tm_cost
4598 	      + estimate_num_insns_seq (gimple_transaction_body (
4599 					  as_a <gtransaction *> (stmt)),
4600 					weights));
4601 
4602     default:
4603       gcc_unreachable ();
4604     }
4605 
4606   return cost;
4607 }
4608 
4609 /* Estimate number of instructions that will be created by expanding
4610    function FNDECL.  WEIGHTS contains weights attributed to various
4611    constructs.  */
4612 
4613 int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)4614 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4615 {
4616   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4617   gimple_stmt_iterator bsi;
4618   basic_block bb;
4619   int n = 0;
4620 
4621   gcc_assert (my_function && my_function->cfg);
4622   FOR_EACH_BB_FN (bb, my_function)
4623     {
4624       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4625 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4626     }
4627 
4628   return n;
4629 }
4630 
4631 
4632 /* Initializes weights used by estimate_num_insns.  */
4633 
4634 void
init_inline_once(void)4635 init_inline_once (void)
4636 {
4637   eni_size_weights.call_cost = 1;
4638   eni_size_weights.indirect_call_cost = 3;
4639   eni_size_weights.target_builtin_call_cost = 1;
4640   eni_size_weights.div_mod_cost = 1;
4641   eni_size_weights.omp_cost = 40;
4642   eni_size_weights.tm_cost = 10;
4643   eni_size_weights.time_based = false;
4644   eni_size_weights.return_cost = 1;
4645 
4646   /* Estimating time for call is difficult, since we have no idea what the
4647      called function does.  In the current uses of eni_time_weights,
4648      underestimating the cost does less harm than overestimating it, so
4649      we choose a rather small value here.  */
4650   eni_time_weights.call_cost = 10;
4651   eni_time_weights.indirect_call_cost = 15;
4652   eni_time_weights.target_builtin_call_cost = 1;
4653   eni_time_weights.div_mod_cost = 10;
4654   eni_time_weights.omp_cost = 40;
4655   eni_time_weights.tm_cost = 40;
4656   eni_time_weights.time_based = true;
4657   eni_time_weights.return_cost = 2;
4658 }
4659 
4660 
4661 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4662 
4663 static void
prepend_lexical_block(tree current_block,tree new_block)4664 prepend_lexical_block (tree current_block, tree new_block)
4665 {
4666   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4667   BLOCK_SUBBLOCKS (current_block) = new_block;
4668   BLOCK_SUPERCONTEXT (new_block) = current_block;
4669 }
4670 
4671 /* Add local variables from CALLEE to CALLER.  */
4672 
4673 static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)4674 add_local_variables (struct function *callee, struct function *caller,
4675 		     copy_body_data *id)
4676 {
4677   tree var;
4678   unsigned ix;
4679 
4680   FOR_EACH_LOCAL_DECL (callee, ix, var)
4681     if (!can_be_nonlocal (var, id))
4682       {
4683         tree new_var = remap_decl (var, id);
4684 
4685         /* Remap debug-expressions.  */
4686 	if (VAR_P (new_var)
4687 	    && DECL_HAS_DEBUG_EXPR_P (var)
4688 	    && new_var != var)
4689 	  {
4690 	    tree tem = DECL_DEBUG_EXPR (var);
4691 	    bool old_regimplify = id->regimplify;
4692 	    id->remapping_type_depth++;
4693 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4694 	    id->remapping_type_depth--;
4695 	    id->regimplify = old_regimplify;
4696 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4697 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4698 	  }
4699 	add_local_decl (caller, new_var);
4700       }
4701 }
4702 
4703 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4704    have brought in or introduced any debug stmts for SRCVAR.  */
4705 
4706 static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)4707 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4708 {
4709   tree *remappedvarp = id->decl_map->get (srcvar);
4710 
4711   if (!remappedvarp)
4712     return;
4713 
4714   if (!VAR_P (*remappedvarp))
4715     return;
4716 
4717   if (*remappedvarp == id->retvar)
4718     return;
4719 
4720   tree tvar = target_for_debug_bind (*remappedvarp);
4721   if (!tvar)
4722     return;
4723 
4724   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4725 					  id->call_stmt);
4726   gimple_seq_add_stmt (bindings, stmt);
4727 }
4728 
4729 /* For each inlined variable for which we may have debug bind stmts,
4730    add before GSI a final debug stmt resetting it, marking the end of
4731    its life, so that var-tracking knows it doesn't have to compute
4732    further locations for it.  */
4733 
4734 static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)4735 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4736 {
4737   tree var;
4738   unsigned ix;
4739   gimple_seq bindings = NULL;
4740 
4741   if (!gimple_in_ssa_p (id->src_cfun))
4742     return;
4743 
4744   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4745     return;
4746 
4747   for (var = DECL_ARGUMENTS (id->src_fn);
4748        var; var = DECL_CHAIN (var))
4749     reset_debug_binding (id, var, &bindings);
4750 
4751   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4752     reset_debug_binding (id, var, &bindings);
4753 
4754   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4755 }
4756 
4757 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4758 
4759 static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id,bitmap to_purge)4760 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4761 		    bitmap to_purge)
4762 {
4763   tree use_retvar;
4764   tree fn;
4765   hash_map<tree, tree> *dst;
4766   hash_map<tree, tree> *st = NULL;
4767   tree return_slot;
4768   tree modify_dest;
4769   struct cgraph_edge *cg_edge;
4770   cgraph_inline_failed_t reason;
4771   basic_block return_block;
4772   edge e;
4773   gimple_stmt_iterator gsi, stmt_gsi;
4774   bool successfully_inlined = false;
4775   bool purge_dead_abnormal_edges;
4776   gcall *call_stmt;
4777   unsigned int prop_mask, src_properties;
4778   struct function *dst_cfun;
4779   tree simduid;
4780   use_operand_p use;
4781   gimple *simtenter_stmt = NULL;
4782   vec<tree> *simtvars_save;
4783 
4784   /* The gimplifier uses input_location in too many places, such as
4785      internal_get_tmp_var ().  */
4786   location_t saved_location = input_location;
4787   input_location = gimple_location (stmt);
4788 
4789   /* From here on, we're only interested in CALL_EXPRs.  */
4790   call_stmt = dyn_cast <gcall *> (stmt);
4791   if (!call_stmt)
4792     goto egress;
4793 
4794   cg_edge = id->dst_node->get_edge (stmt);
4795   gcc_checking_assert (cg_edge);
4796   /* First, see if we can figure out what function is being called.
4797      If we cannot, then there is no hope of inlining the function.  */
4798   if (cg_edge->indirect_unknown_callee)
4799     goto egress;
4800   fn = cg_edge->callee->decl;
4801   gcc_checking_assert (fn);
4802 
4803   /* If FN is a declaration of a function in a nested scope that was
4804      globally declared inline, we don't set its DECL_INITIAL.
4805      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4806      C++ front-end uses it for cdtors to refer to their internal
4807      declarations, that are not real functions.  Fortunately those
4808      don't have trees to be saved, so we can tell by checking their
4809      gimple_body.  */
4810   if (!DECL_INITIAL (fn)
4811       && DECL_ABSTRACT_ORIGIN (fn)
4812       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4813     fn = DECL_ABSTRACT_ORIGIN (fn);
4814 
4815   /* Don't try to inline functions that are not well-suited to inlining.  */
4816   if (cg_edge->inline_failed)
4817     {
4818       reason = cg_edge->inline_failed;
4819       /* If this call was originally indirect, we do not want to emit any
4820 	 inlining related warnings or sorry messages because there are no
4821 	 guarantees regarding those.  */
4822       if (cg_edge->indirect_inlining_edge)
4823 	goto egress;
4824 
4825       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4826           /* For extern inline functions that get redefined we always
4827 	     silently ignored always_inline flag. Better behavior would
4828 	     be to be able to keep both bodies and use extern inline body
4829 	     for inlining, but we can't do that because frontends overwrite
4830 	     the body.  */
4831 	  && !cg_edge->callee->redefined_extern_inline
4832 	  /* During early inline pass, report only when optimization is
4833 	     not turned on.  */
4834 	  && (symtab->global_info_ready
4835 	      || !optimize
4836 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4837 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4838 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4839 	{
4840 	  error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4841 		 cgraph_inline_failed_string (reason));
4842 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4843 	    inform (gimple_location (stmt), "called from here");
4844 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4845 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4846                    "called from this function");
4847 	}
4848       else if (opt_for_fn (fn, warn_inline)
4849 	       && DECL_DECLARED_INLINE_P (fn)
4850 	       && !DECL_NO_INLINE_WARNING_P (fn)
4851 	       && !DECL_IN_SYSTEM_HEADER (fn)
4852 	       && reason != CIF_UNSPECIFIED
4853 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4854 	       /* Do not warn about not inlined recursive calls.  */
4855 	       && !cg_edge->recursive_p ()
4856 	       /* Avoid warnings during early inline pass. */
4857 	       && symtab->global_info_ready)
4858 	{
4859 	  auto_diagnostic_group d;
4860 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4861 		       fn, _(cgraph_inline_failed_string (reason))))
4862 	    {
4863 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4864 		inform (gimple_location (stmt), "called from here");
4865 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4866 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4867                        "called from this function");
4868 	    }
4869 	}
4870       goto egress;
4871     }
4872   id->src_node = cg_edge->callee;
4873 
4874   /* If callee is thunk, all we need is to adjust the THIS pointer
4875      and redirect to function being thunked.  */
4876   if (id->src_node->thunk)
4877     {
4878       cgraph_edge *edge;
4879       tree virtual_offset = NULL;
4880       profile_count count = cg_edge->count;
4881       tree op;
4882       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4883       thunk_info *info = thunk_info::get (id->src_node);
4884 
4885       cgraph_edge::remove (cg_edge);
4886       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4887 		   		           gimple_uid (stmt),
4888 				   	   profile_count::one (),
4889 					   profile_count::one (),
4890 				           true);
4891       edge->count = count;
4892       if (info->virtual_offset_p)
4893 	virtual_offset = size_int (info->virtual_value);
4894       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4895 			      NULL);
4896       gsi_insert_before (&iter, gimple_build_assign (op,
4897 						    gimple_call_arg (stmt, 0)),
4898 			 GSI_NEW_STMT);
4899       gcc_assert (info->this_adjusting);
4900       op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4901 			 virtual_offset, info->indirect_offset);
4902 
4903       gimple_call_set_arg (stmt, 0, op);
4904       gimple_call_set_fndecl (stmt, edge->callee->decl);
4905       update_stmt (stmt);
4906       id->src_node->remove ();
4907       successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4908       maybe_remove_unused_call_args (cfun, stmt);
4909       /* This used to return true even though we do fail to inline in
4910 	 some cases.  See PR98525.  */
4911       goto egress;
4912     }
4913   fn = cg_edge->callee->decl;
4914   cg_edge->callee->get_untransformed_body ();
4915 
4916   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4917     cg_edge->callee->verify ();
4918 
4919   /* We will be inlining this callee.  */
4920   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4921 
4922   /* Update the callers EH personality.  */
4923   if (DECL_FUNCTION_PERSONALITY (fn))
4924     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4925       = DECL_FUNCTION_PERSONALITY (fn);
4926 
4927   /* Split the block before the GIMPLE_CALL.  */
4928   stmt_gsi = gsi_for_stmt (stmt);
4929   gsi_prev (&stmt_gsi);
4930   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4931   bb = e->src;
4932   return_block = e->dest;
4933   remove_edge (e);
4934 
4935   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4936      been the source of abnormal edges.  In this case, schedule
4937      the removal of dead abnormal edges.  */
4938   gsi = gsi_start_bb (return_block);
4939   gsi_next (&gsi);
4940   purge_dead_abnormal_edges = gsi_end_p (gsi);
4941 
4942   stmt_gsi = gsi_start_bb (return_block);
4943 
4944   /* Build a block containing code to initialize the arguments, the
4945      actual inline expansion of the body, and a label for the return
4946      statements within the function to jump to.  The type of the
4947      statement expression is the return type of the function call.
4948      ???  If the call does not have an associated block then we will
4949      remap all callee blocks to NULL, effectively dropping most of
4950      its debug information.  This should only happen for calls to
4951      artificial decls inserted by the compiler itself.  We need to
4952      either link the inlined blocks into the caller block tree or
4953      not refer to them in any way to not break GC for locations.  */
4954   if (tree block = gimple_block (stmt))
4955     {
4956       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4957          to make inlined_function_outer_scope_p return true on this BLOCK.  */
4958       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4959       if (loc == UNKNOWN_LOCATION)
4960 	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4961       if (loc == UNKNOWN_LOCATION)
4962 	loc = BUILTINS_LOCATION;
4963       id->block = make_node (BLOCK);
4964       BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4965       BLOCK_SOURCE_LOCATION (id->block) = loc;
4966       prepend_lexical_block (block, id->block);
4967     }
4968 
4969   /* Local declarations will be replaced by their equivalents in this map.  */
4970   st = id->decl_map;
4971   id->decl_map = new hash_map<tree, tree>;
4972   dst = id->debug_map;
4973   id->debug_map = NULL;
4974   if (flag_stack_reuse != SR_NONE)
4975     id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4976 
4977   /* Record the function we are about to inline.  */
4978   id->src_fn = fn;
4979   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4980   id->reset_location = DECL_IGNORED_P (fn);
4981   id->call_stmt = call_stmt;
4982 
4983   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4984      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4985   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4986   simtvars_save = id->dst_simt_vars;
4987   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4988       && (simduid = bb->loop_father->simduid) != NULL_TREE
4989       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4990       && single_imm_use (simduid, &use, &simtenter_stmt)
4991       && is_gimple_call (simtenter_stmt)
4992       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4993     vec_alloc (id->dst_simt_vars, 0);
4994   else
4995     id->dst_simt_vars = NULL;
4996 
4997   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4998     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4999 
5000   /* If the src function contains an IFN_VA_ARG, then so will the dst
5001      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
5002   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
5003   src_properties = id->src_cfun->curr_properties & prop_mask;
5004   if (src_properties != prop_mask)
5005     dst_cfun->curr_properties &= src_properties | ~prop_mask;
5006   dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
5007   id->dst_node->calls_declare_variant_alt
5008     |= id->src_node->calls_declare_variant_alt;
5009 
5010   gcc_assert (!id->src_cfun->after_inlining);
5011 
5012   id->entry_bb = bb;
5013   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
5014     {
5015       gimple_stmt_iterator si = gsi_last_bb (bb);
5016       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
5017       						   NOT_TAKEN),
5018 			GSI_NEW_STMT);
5019     }
5020   initialize_inlined_parameters (id, stmt, fn, bb);
5021   if (debug_nonbind_markers_p && debug_inline_points && id->block
5022       && inlined_function_outer_scope_p (id->block))
5023     {
5024       gimple_stmt_iterator si = gsi_last_bb (bb);
5025       gsi_insert_after (&si, gimple_build_debug_inline_entry
5026 			(id->block, DECL_SOURCE_LOCATION (id->src_fn)),
5027 			GSI_NEW_STMT);
5028     }
5029 
5030   if (DECL_INITIAL (fn))
5031     {
5032       if (gimple_block (stmt))
5033 	{
5034 	  tree *var;
5035 
5036 	  prepend_lexical_block (id->block,
5037 				 remap_blocks (DECL_INITIAL (fn), id));
5038 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5039 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5040 				   == NULL_TREE));
5041 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5042 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
5043 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5044 	     under it.  The parameters can be then evaluated in the debugger,
5045 	     but don't show in backtraces.  */
5046 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5047 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5048 	      {
5049 		tree v = *var;
5050 		*var = TREE_CHAIN (v);
5051 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
5052 		BLOCK_VARS (id->block) = v;
5053 	      }
5054 	    else
5055 	      var = &TREE_CHAIN (*var);
5056 	}
5057       else
5058 	remap_blocks_to_null (DECL_INITIAL (fn), id);
5059     }
5060 
5061   /* Return statements in the function body will be replaced by jumps
5062      to the RET_LABEL.  */
5063   gcc_assert (DECL_INITIAL (fn));
5064   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5065 
5066   /* Find the LHS to which the result of this call is assigned.  */
5067   return_slot = NULL;
5068   if (gimple_call_lhs (stmt))
5069     {
5070       modify_dest = gimple_call_lhs (stmt);
5071 
5072       /* The function which we are inlining might not return a value,
5073 	 in which case we should issue a warning that the function
5074 	 does not return a value.  In that case the optimizers will
5075 	 see that the variable to which the value is assigned was not
5076 	 initialized.  We do not want to issue a warning about that
5077 	 uninitialized variable.  */
5078       if (DECL_P (modify_dest))
5079 	suppress_warning (modify_dest, OPT_Wuninitialized);
5080 
5081       if (gimple_call_return_slot_opt_p (call_stmt))
5082 	{
5083 	  return_slot = modify_dest;
5084 	  modify_dest = NULL;
5085 	}
5086     }
5087   else
5088     modify_dest = NULL;
5089 
5090   /* If we are inlining a call to the C++ operator new, we don't want
5091      to use type based alias analysis on the return value.  Otherwise
5092      we may get confused if the compiler sees that the inlined new
5093      function returns a pointer which was just deleted.  See bug
5094      33407.  */
5095   if (DECL_IS_OPERATOR_NEW_P (fn))
5096     {
5097       return_slot = NULL;
5098       modify_dest = NULL;
5099     }
5100 
5101   /* Declare the return variable for the function.  */
5102   use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5103 
5104   /* Add local vars in this inlined callee to caller.  */
5105   add_local_variables (id->src_cfun, cfun, id);
5106 
5107   if (dump_enabled_p ())
5108     {
5109       char buf[128];
5110       snprintf (buf, sizeof(buf), "%4.2f",
5111 		cg_edge->sreal_frequency ().to_double ());
5112       dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5113 		       call_stmt,
5114 		       "Inlining %C to %C with frequency %s\n",
5115 		       id->src_node, id->dst_node, buf);
5116       if (dump_file && (dump_flags & TDF_DETAILS))
5117 	{
5118 	  id->src_node->dump (dump_file);
5119 	  id->dst_node->dump (dump_file);
5120 	}
5121     }
5122 
5123   /* This is it.  Duplicate the callee body.  Assume callee is
5124      pre-gimplified.  Note that we must not alter the caller
5125      function in any way before this point, as this CALL_EXPR may be
5126      a self-referential call; if we're calling ourselves, we need to
5127      duplicate our body before altering anything.  */
5128   copy_body (id, bb, return_block, NULL);
5129 
5130   reset_debug_bindings (id, stmt_gsi);
5131 
5132   if (flag_stack_reuse != SR_NONE)
5133     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5134       if (!TREE_THIS_VOLATILE (p))
5135 	{
5136 	  /* The value associated with P is a local temporary only if
5137 	     there is no value associated with P in the debug map.  */
5138 	  tree *varp = id->decl_map->get (p);
5139 	  if (varp
5140 	      && VAR_P (*varp)
5141 	      && !is_gimple_reg (*varp)
5142 	      && !(id->debug_map && id->debug_map->get (p)))
5143 	    {
5144 	      tree clobber = build_clobber (TREE_TYPE (*varp), CLOBBER_EOL);
5145 	      gimple *clobber_stmt;
5146 	      clobber_stmt = gimple_build_assign (*varp, clobber);
5147 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
5148 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5149 	    }
5150 	}
5151 
5152   /* Reset the escaped solution.  */
5153   if (cfun->gimple_df)
5154     pt_solution_reset (&cfun->gimple_df->escaped);
5155 
5156   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
5157   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5158     {
5159       size_t nargs = gimple_call_num_args (simtenter_stmt);
5160       vec<tree> *vars = id->dst_simt_vars;
5161       auto_vec<tree> newargs (nargs + vars->length ());
5162       for (size_t i = 0; i < nargs; i++)
5163 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5164       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5165 	{
5166 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5167 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5168 	}
5169       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5170       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5171       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5172       gsi_replace (&gsi, g, false);
5173     }
5174   vec_free (id->dst_simt_vars);
5175   id->dst_simt_vars = simtvars_save;
5176 
5177   /* Clean up.  */
5178   if (id->debug_map)
5179     {
5180       delete id->debug_map;
5181       id->debug_map = dst;
5182     }
5183   delete id->decl_map;
5184   id->decl_map = st;
5185 
5186   /* Unlink the calls virtual operands before replacing it.  */
5187   unlink_stmt_vdef (stmt);
5188   if (gimple_vdef (stmt)
5189       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5190     release_ssa_name (gimple_vdef (stmt));
5191 
5192   /* If the inlined function returns a result that we care about,
5193      substitute the GIMPLE_CALL with an assignment of the return
5194      variable to the LHS of the call.  That is, if STMT was
5195      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
5196   if (use_retvar && gimple_call_lhs (stmt))
5197     {
5198       gimple *old_stmt = stmt;
5199       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5200       gimple_set_location (stmt, gimple_location (old_stmt));
5201       gsi_replace (&stmt_gsi, stmt, false);
5202       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5203       /* Append a clobber for id->retvar if easily possible.  */
5204       if (flag_stack_reuse != SR_NONE
5205 	  && id->retvar
5206 	  && VAR_P (id->retvar)
5207 	  && id->retvar != return_slot
5208 	  && id->retvar != modify_dest
5209 	  && !TREE_THIS_VOLATILE (id->retvar)
5210 	  && !is_gimple_reg (id->retvar)
5211 	  && !stmt_ends_bb_p (stmt))
5212 	{
5213 	  tree clobber = build_clobber (TREE_TYPE (id->retvar), CLOBBER_EOL);
5214 	  gimple *clobber_stmt;
5215 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5216 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5217 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5218 	}
5219     }
5220   else
5221     {
5222       /* Handle the case of inlining a function with no return
5223 	 statement, which causes the return value to become undefined.  */
5224       if (gimple_call_lhs (stmt)
5225 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5226 	{
5227 	  tree name = gimple_call_lhs (stmt);
5228 	  tree var = SSA_NAME_VAR (name);
5229 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
5230 
5231 	  if (def)
5232 	    {
5233 	      /* If the variable is used undefined, make this name
5234 		 undefined via a move.  */
5235 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5236 	      gsi_replace (&stmt_gsi, stmt, true);
5237 	    }
5238 	  else
5239 	    {
5240 	      if (!var)
5241 		{
5242 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5243 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5244 		}
5245 	      /* Otherwise make this variable undefined.  */
5246 	      gsi_remove (&stmt_gsi, true);
5247 	      set_ssa_default_def (cfun, var, name);
5248 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5249 	    }
5250 	}
5251       /* Replace with a clobber for id->retvar.  */
5252       else if (flag_stack_reuse != SR_NONE
5253 	       && id->retvar
5254 	       && VAR_P (id->retvar)
5255 	       && id->retvar != return_slot
5256 	       && id->retvar != modify_dest
5257 	       && !TREE_THIS_VOLATILE (id->retvar)
5258 	       && !is_gimple_reg (id->retvar))
5259 	{
5260 	  tree clobber = build_clobber (TREE_TYPE (id->retvar));
5261 	  gimple *clobber_stmt;
5262 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5263 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
5264 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
5265 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5266 	}
5267       else
5268 	gsi_remove (&stmt_gsi, true);
5269     }
5270 
5271   if (purge_dead_abnormal_edges)
5272     bitmap_set_bit (to_purge, return_block->index);
5273 
5274   /* If the value of the new expression is ignored, that's OK.  We
5275      don't warn about this for CALL_EXPRs, so we shouldn't warn about
5276      the equivalent inlined version either.  */
5277   if (is_gimple_assign (stmt))
5278     {
5279       gcc_assert (gimple_assign_single_p (stmt)
5280 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5281       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5282     }
5283 
5284   id->add_clobbers_to_eh_landing_pads = 0;
5285 
5286   /* Output the inlining info for this abstract function, since it has been
5287      inlined.  If we don't do this now, we can lose the information about the
5288      variables in the function when the blocks get blown away as soon as we
5289      remove the cgraph node.  */
5290   if (gimple_block (stmt))
5291     (*debug_hooks->outlining_inline_function) (fn);
5292 
5293   /* Update callgraph if needed.  */
5294   cg_edge->callee->remove ();
5295 
5296   id->block = NULL_TREE;
5297   id->retvar = NULL_TREE;
5298   successfully_inlined = true;
5299 
5300  egress:
5301   input_location = saved_location;
5302   return successfully_inlined;
5303 }
5304 
5305 /* Expand call statements reachable from STMT_P.
5306    We can only have CALL_EXPRs as the "toplevel" tree code or nested
5307    in a MODIFY_EXPR.  */
5308 
5309 static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id,bitmap to_purge)5310 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5311 			    bitmap to_purge)
5312 {
5313   gimple_stmt_iterator gsi;
5314   bool inlined = false;
5315 
5316   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5317     {
5318       gimple *stmt = gsi_stmt (gsi);
5319       gsi_prev (&gsi);
5320 
5321       if (is_gimple_call (stmt)
5322 	  && !gimple_call_internal_p (stmt))
5323 	inlined |= expand_call_inline (bb, stmt, id, to_purge);
5324     }
5325 
5326   return inlined;
5327 }
5328 
5329 
5330 /* Walk all basic blocks created after FIRST and try to fold every statement
5331    in the STATEMENTS pointer set.  */
5332 
5333 static void
fold_marked_statements(int first,hash_set<gimple * > * statements)5334 fold_marked_statements (int first, hash_set<gimple *> *statements)
5335 {
5336   auto_bitmap to_purge;
5337 
5338   auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5339   auto_sbitmap visited (last_basic_block_for_fn (cfun));
5340   bitmap_clear (visited);
5341 
5342   stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5343   while (!stack.is_empty ())
5344     {
5345       /* Look at the edge on the top of the stack.  */
5346       edge e = stack.pop ();
5347       basic_block dest = e->dest;
5348 
5349       if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5350 	  || bitmap_bit_p (visited, dest->index))
5351 	continue;
5352 
5353       bitmap_set_bit (visited, dest->index);
5354 
5355       if (dest->index >= first)
5356 	for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5357 	     !gsi_end_p (gsi); gsi_next (&gsi))
5358 	  {
5359 	    if (!statements->contains (gsi_stmt (gsi)))
5360 	      continue;
5361 
5362 	    gimple *old_stmt = gsi_stmt (gsi);
5363 	    tree old_decl = (is_gimple_call (old_stmt)
5364 			     ? gimple_call_fndecl (old_stmt) : 0);
5365 	    if (old_decl && fndecl_built_in_p (old_decl))
5366 	      {
5367 		/* Folding builtins can create multiple instructions,
5368 		   we need to look at all of them.  */
5369 		gimple_stmt_iterator i2 = gsi;
5370 		gsi_prev (&i2);
5371 		if (fold_stmt (&gsi))
5372 		  {
5373 		    gimple *new_stmt;
5374 		    /* If a builtin at the end of a bb folded into nothing,
5375 		       the following loop won't work.  */
5376 		    if (gsi_end_p (gsi))
5377 		      {
5378 			cgraph_update_edges_for_call_stmt (old_stmt,
5379 							   old_decl, NULL);
5380 			break;
5381 		      }
5382 		    if (gsi_end_p (i2))
5383 		      i2 = gsi_start_bb (dest);
5384 		    else
5385 		      gsi_next (&i2);
5386 		    while (1)
5387 		      {
5388 			new_stmt = gsi_stmt (i2);
5389 			update_stmt (new_stmt);
5390 			cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5391 							   new_stmt);
5392 
5393 			if (new_stmt == gsi_stmt (gsi))
5394 			  {
5395 			    /* It is okay to check only for the very last
5396 			       of these statements.  If it is a throwing
5397 			       statement nothing will change.  If it isn't
5398 			       this can remove EH edges.  If that weren't
5399 			       correct then because some intermediate stmts
5400 			       throw, but not the last one.  That would mean
5401 			       we'd have to split the block, which we can't
5402 			       here and we'd loose anyway.  And as builtins
5403 			       probably never throw, this all
5404 			       is mood anyway.  */
5405 			    if (maybe_clean_or_replace_eh_stmt (old_stmt,
5406 								new_stmt))
5407 			      bitmap_set_bit (to_purge, dest->index);
5408 			    break;
5409 			  }
5410 			gsi_next (&i2);
5411 		      }
5412 		  }
5413 	      }
5414 	    else if (fold_stmt (&gsi))
5415 	      {
5416 		/* Re-read the statement from GSI as fold_stmt() may
5417 		   have changed it.  */
5418 		gimple *new_stmt = gsi_stmt (gsi);
5419 		update_stmt (new_stmt);
5420 
5421 		if (is_gimple_call (old_stmt)
5422 		    || is_gimple_call (new_stmt))
5423 		  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5424 						     new_stmt);
5425 
5426 		if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5427 		  bitmap_set_bit (to_purge, dest->index);
5428 	      }
5429 	  }
5430 
5431       if (EDGE_COUNT (dest->succs) > 0)
5432 	{
5433 	  /* Avoid warnings emitted from folding statements that
5434 	     became unreachable because of inlined function parameter
5435 	     propagation.  */
5436 	  e = find_taken_edge (dest, NULL_TREE);
5437 	  if (e)
5438 	    stack.quick_push (e);
5439 	  else
5440 	    {
5441 	      edge_iterator ei;
5442 	      FOR_EACH_EDGE (e, ei, dest->succs)
5443 		stack.safe_push (e);
5444 	    }
5445 	}
5446     }
5447 
5448   gimple_purge_all_dead_eh_edges (to_purge);
5449 }
5450 
5451 /* Expand calls to inline functions in the body of FN.  */
5452 
5453 unsigned int
optimize_inline_calls(tree fn)5454 optimize_inline_calls (tree fn)
5455 {
5456   copy_body_data id;
5457   basic_block bb;
5458   int last = n_basic_blocks_for_fn (cfun);
5459   bool inlined_p = false;
5460 
5461   /* Clear out ID.  */
5462   memset (&id, 0, sizeof (id));
5463 
5464   id.src_node = id.dst_node = cgraph_node::get (fn);
5465   gcc_assert (id.dst_node->definition);
5466   id.dst_fn = fn;
5467   /* Or any functions that aren't finished yet.  */
5468   if (current_function_decl)
5469     id.dst_fn = current_function_decl;
5470 
5471   id.copy_decl = copy_decl_maybe_to_var;
5472   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5473   id.transform_new_cfg = false;
5474   id.transform_return_to_modify = true;
5475   id.transform_parameter = true;
5476   id.statements_to_fold = new hash_set<gimple *>;
5477 
5478   push_gimplify_context ();
5479 
5480   /* We make no attempts to keep dominance info up-to-date.  */
5481   free_dominance_info (CDI_DOMINATORS);
5482   free_dominance_info (CDI_POST_DOMINATORS);
5483 
5484   /* Register specific gimple functions.  */
5485   gimple_register_cfg_hooks ();
5486 
5487   /* Reach the trees by walking over the CFG, and note the
5488      enclosing basic-blocks in the call edges.  */
5489   /* We walk the blocks going forward, because inlined function bodies
5490      will split id->current_basic_block, and the new blocks will
5491      follow it; we'll trudge through them, processing their CALL_EXPRs
5492      along the way.  */
5493   auto_bitmap to_purge;
5494   FOR_EACH_BB_FN (bb, cfun)
5495     inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5496 
5497   pop_gimplify_context (NULL);
5498 
5499   if (flag_checking)
5500     {
5501       struct cgraph_edge *e;
5502 
5503       id.dst_node->verify ();
5504 
5505       /* Double check that we inlined everything we are supposed to inline.  */
5506       for (e = id.dst_node->callees; e; e = e->next_callee)
5507 	gcc_assert (e->inline_failed);
5508     }
5509 
5510   /* If we didn't inline into the function there is nothing to do.  */
5511   if (!inlined_p)
5512     {
5513       delete id.statements_to_fold;
5514       return 0;
5515     }
5516 
5517   /* Fold queued statements.  */
5518   update_max_bb_count ();
5519   fold_marked_statements (last, id.statements_to_fold);
5520   delete id.statements_to_fold;
5521 
5522   /* Finally purge EH and abnormal edges from the call stmts we inlined.
5523      We need to do this after fold_marked_statements since that may walk
5524      the SSA use-def chain.  */
5525   unsigned i;
5526   bitmap_iterator bi;
5527   EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5528     {
5529       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5530       if (bb)
5531 	{
5532 	  gimple_purge_dead_eh_edges (bb);
5533 	  gimple_purge_dead_abnormal_call_edges (bb);
5534 	}
5535     }
5536 
5537   gcc_assert (!id.debug_stmts.exists ());
5538 
5539   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5540   number_blocks (fn);
5541 
5542   delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5543   id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5544 
5545   if (flag_checking)
5546     id.dst_node->verify ();
5547 
5548   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5549      not possible yet - the IPA passes might make various functions to not
5550      throw and they don't care to proactively update local EH info.  This is
5551      done later in fixup_cfg pass that also execute the verification.  */
5552   return (TODO_update_ssa
5553 	  | TODO_cleanup_cfg
5554 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5555 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5556 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5557 	     ? TODO_rebuild_frequencies : 0));
5558 }
5559 
5560 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5561 
5562 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5563 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5564 {
5565   enum tree_code code = TREE_CODE (*tp);
5566   enum tree_code_class cl = TREE_CODE_CLASS (code);
5567 
5568   /* We make copies of most nodes.  */
5569   if (IS_EXPR_CODE_CLASS (cl)
5570       || code == TREE_LIST
5571       || code == TREE_VEC
5572       || code == TYPE_DECL
5573       || code == OMP_CLAUSE)
5574     {
5575       /* Because the chain gets clobbered when we make a copy, we save it
5576 	 here.  */
5577       tree chain = NULL_TREE, new_tree;
5578 
5579       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5580 	chain = TREE_CHAIN (*tp);
5581 
5582       /* Copy the node.  */
5583       new_tree = copy_node (*tp);
5584 
5585       *tp = new_tree;
5586 
5587       /* Now, restore the chain, if appropriate.  That will cause
5588 	 walk_tree to walk into the chain as well.  */
5589       if (code == PARM_DECL
5590 	  || code == TREE_LIST
5591 	  || code == OMP_CLAUSE)
5592 	TREE_CHAIN (*tp) = chain;
5593 
5594       /* For now, we don't update BLOCKs when we make copies.  So, we
5595 	 have to nullify all BIND_EXPRs.  */
5596       if (TREE_CODE (*tp) == BIND_EXPR)
5597 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5598     }
5599   else if (code == CONSTRUCTOR)
5600     {
5601       /* CONSTRUCTOR nodes need special handling because
5602          we need to duplicate the vector of elements.  */
5603       tree new_tree;
5604 
5605       new_tree = copy_node (*tp);
5606       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5607       *tp = new_tree;
5608     }
5609   else if (code == STATEMENT_LIST)
5610     /* We used to just abort on STATEMENT_LIST, but we can run into them
5611        with statement-expressions (c++/40975).  */
5612     copy_statement_list (tp);
5613   else if (TREE_CODE_CLASS (code) == tcc_type)
5614     *walk_subtrees = 0;
5615   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5616     *walk_subtrees = 0;
5617   else if (TREE_CODE_CLASS (code) == tcc_constant)
5618     *walk_subtrees = 0;
5619   return NULL_TREE;
5620 }
5621 
5622 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5623    information indicating to what new SAVE_EXPR this one should be mapped,
5624    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5625    the function into which the copy will be placed.  */
5626 
5627 static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)5628 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5629 {
5630   tree *n;
5631   tree t;
5632 
5633   /* See if we already encountered this SAVE_EXPR.  */
5634   n = st->get (*tp);
5635 
5636   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5637   if (!n)
5638     {
5639       t = copy_node (*tp);
5640 
5641       /* Remember this SAVE_EXPR.  */
5642       st->put (*tp, t);
5643       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5644       st->put (t, t);
5645     }
5646   else
5647     {
5648       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5649       *walk_subtrees = 0;
5650       t = *n;
5651     }
5652 
5653   /* Replace this SAVE_EXPR with the copy.  */
5654   *tp = t;
5655 }
5656 
5657 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5658    label, copies the declaration and enters it in the splay_tree in DATA (which
5659    is really a 'copy_body_data *'.  */
5660 
5661 static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5662 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5663 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5664 		        struct walk_stmt_info *wi)
5665 {
5666   copy_body_data *id = (copy_body_data *) wi->info;
5667   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5668 
5669   if (stmt)
5670     {
5671       tree decl = gimple_label_label (stmt);
5672 
5673       /* Copy the decl and remember the copy.  */
5674       insert_decl_map (id, decl, id->copy_decl (decl, id));
5675     }
5676 
5677   return NULL_TREE;
5678 }
5679 
5680 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5681 						  struct walk_stmt_info *wi);
5682 
5683 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5684    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5685    remaps all local declarations to appropriate replacements in gimple
5686    operands. */
5687 
5688 static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)5689 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5690 {
5691   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5692   copy_body_data *id = (copy_body_data *) wi->info;
5693   hash_map<tree, tree> *st = id->decl_map;
5694   tree *n;
5695   tree expr = *tp;
5696 
5697   /* For recursive invocations this is no longer the LHS itself.  */
5698   bool is_lhs = wi->is_lhs;
5699   wi->is_lhs = false;
5700 
5701   if (TREE_CODE (expr) == SSA_NAME)
5702     {
5703       *tp = remap_ssa_name (*tp, id);
5704       *walk_subtrees = 0;
5705       if (is_lhs)
5706 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5707     }
5708   /* Only a local declaration (variable or label).  */
5709   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5710 	   || TREE_CODE (expr) == LABEL_DECL)
5711     {
5712       /* Lookup the declaration.  */
5713       n = st->get (expr);
5714 
5715       /* If it's there, remap it.  */
5716       if (n)
5717 	*tp = *n;
5718       *walk_subtrees = 0;
5719     }
5720   else if (TREE_CODE (expr) == STATEMENT_LIST
5721 	   || TREE_CODE (expr) == BIND_EXPR
5722 	   || TREE_CODE (expr) == SAVE_EXPR)
5723     gcc_unreachable ();
5724   else if (TREE_CODE (expr) == TARGET_EXPR)
5725     {
5726       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5727          It's OK for this to happen if it was part of a subtree that
5728          isn't immediately expanded, such as operand 2 of another
5729          TARGET_EXPR.  */
5730       if (!TREE_OPERAND (expr, 1))
5731 	{
5732 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5733 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5734 	}
5735     }
5736   else if (TREE_CODE (expr) == OMP_CLAUSE)
5737     {
5738       /* Before the omplower pass completes, some OMP clauses can contain
5739 	 sequences that are neither copied by gimple_seq_copy nor walked by
5740 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5741 	 in those situations, we have to copy and process them explicitely.  */
5742 
5743       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5744 	{
5745 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5746 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5747 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5748 	}
5749       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5750 	{
5751 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5752 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5753 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5754 	}
5755       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5756 	{
5757 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5758 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5759 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5760 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5761 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5762 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5763 	}
5764     }
5765 
5766   /* Keep iterating.  */
5767   return NULL_TREE;
5768 }
5769 
5770 
5771 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5772    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5773    remaps all local declarations to appropriate replacements in gimple
5774    statements. */
5775 
5776 static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5777 replace_locals_stmt (gimple_stmt_iterator *gsip,
5778 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5779 		     struct walk_stmt_info *wi)
5780 {
5781   copy_body_data *id = (copy_body_data *) wi->info;
5782   gimple *gs = gsi_stmt (*gsip);
5783 
5784   if (gbind *stmt = dyn_cast <gbind *> (gs))
5785     {
5786       tree block = gimple_bind_block (stmt);
5787 
5788       if (block)
5789 	{
5790 	  remap_block (&block, id);
5791 	  gimple_bind_set_block (stmt, block);
5792 	}
5793 
5794       /* This will remap a lot of the same decls again, but this should be
5795 	 harmless.  */
5796       if (gimple_bind_vars (stmt))
5797 	{
5798 	  tree old_var, decls = gimple_bind_vars (stmt);
5799 
5800 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5801 	    if (!can_be_nonlocal (old_var, id)
5802 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5803 	      remap_decl (old_var, id);
5804 
5805 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5806 	  id->prevent_decl_creation_for_types = true;
5807 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5808 	  id->prevent_decl_creation_for_types = false;
5809 	}
5810     }
5811 
5812   /* Keep iterating.  */
5813   return NULL_TREE;
5814 }
5815 
5816 /* Create a copy of SEQ and remap all decls in it.  */
5817 
5818 static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)5819 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5820 {
5821   if (!seq)
5822     return NULL;
5823 
5824   /* If there are any labels in OMP sequences, they can be only referred to in
5825      the sequence itself and therefore we can do both here.  */
5826   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5827   gimple_seq copy = gimple_seq_copy (seq);
5828   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5829   return copy;
5830 }
5831 
5832 /* Copies everything in SEQ and replaces variables and labels local to
5833    current_function_decl.  */
5834 
5835 gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)5836 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5837 {
5838   copy_body_data id;
5839   struct walk_stmt_info wi;
5840   gimple_seq copy;
5841 
5842   /* There's nothing to do for NULL_TREE.  */
5843   if (seq == NULL)
5844     return seq;
5845 
5846   /* Set up ID.  */
5847   memset (&id, 0, sizeof (id));
5848   id.src_fn = current_function_decl;
5849   id.dst_fn = current_function_decl;
5850   id.src_cfun = cfun;
5851   id.decl_map = new hash_map<tree, tree>;
5852   id.debug_map = NULL;
5853 
5854   id.copy_decl = copy_decl_no_change;
5855   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5856   id.transform_new_cfg = false;
5857   id.transform_return_to_modify = false;
5858   id.transform_parameter = false;
5859 
5860   /* Walk the tree once to find local labels.  */
5861   memset (&wi, 0, sizeof (wi));
5862   hash_set<tree> visited;
5863   wi.info = &id;
5864   wi.pset = &visited;
5865   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5866 
5867   copy = gimple_seq_copy (seq);
5868 
5869   /* Walk the copy, remapping decls.  */
5870   memset (&wi, 0, sizeof (wi));
5871   wi.info = &id;
5872   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5873 
5874   /* Clean up.  */
5875   delete id.decl_map;
5876   if (id.debug_map)
5877     delete id.debug_map;
5878   if (id.dependence_map)
5879     {
5880       delete id.dependence_map;
5881       id.dependence_map = NULL;
5882     }
5883 
5884   return copy;
5885 }
5886 
5887 
5888 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5889 
5890 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)5891 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5892 {
5893   if (*tp == data)
5894     return (tree) data;
5895   else
5896     return NULL;
5897 }
5898 
5899 DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)5900 debug_find_tree (tree top, tree search)
5901 {
5902   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5903 }
5904 
5905 
5906 /* Declare the variables created by the inliner.  Add all the variables in
5907    VARS to BIND_EXPR.  */
5908 
5909 static void
declare_inline_vars(tree block,tree vars)5910 declare_inline_vars (tree block, tree vars)
5911 {
5912   tree t;
5913   for (t = vars; t; t = DECL_CHAIN (t))
5914     {
5915       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5916       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5917       add_local_decl (cfun, t);
5918     }
5919 
5920   if (block)
5921     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5922 }
5923 
5924 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5925    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5926    VAR_DECL translation.  */
5927 
5928 tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)5929 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5930 {
5931   /* Don't generate debug information for the copy if we wouldn't have
5932      generated it for the copy either.  */
5933   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5934   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5935 
5936   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5937      declaration inspired this copy.  */
5938   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5939 
5940   /* The new variable/label has no RTL, yet.  */
5941   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5942       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5943     SET_DECL_RTL (copy, 0);
5944   /* For vector typed decls make sure to update DECL_MODE according
5945      to the new function context.  */
5946   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5947     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5948 
5949   /* These args would always appear unused, if not for this.  */
5950   TREE_USED (copy) = 1;
5951 
5952   /* Set the context for the new declaration.  */
5953   if (!DECL_CONTEXT (decl))
5954     /* Globals stay global.  */
5955     ;
5956   else if (DECL_CONTEXT (decl) != id->src_fn)
5957     /* Things that weren't in the scope of the function we're inlining
5958        from aren't in the scope we're inlining to, either.  */
5959     ;
5960   else if (TREE_STATIC (decl))
5961     /* Function-scoped static variables should stay in the original
5962        function.  */
5963     ;
5964   else
5965     {
5966       /* Ordinary automatic local variables are now in the scope of the
5967 	 new function.  */
5968       DECL_CONTEXT (copy) = id->dst_fn;
5969       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5970 	{
5971 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5972 	    DECL_ATTRIBUTES (copy)
5973 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5974 			   DECL_ATTRIBUTES (copy));
5975 	  id->dst_simt_vars->safe_push (copy);
5976 	}
5977     }
5978 
5979   return copy;
5980 }
5981 
5982 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5983    DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL.  The original
5984    DECL must come from ID->src_fn and the copy will be part of ID->dst_fn.  */
5985 
5986 tree
copy_decl_to_var(tree decl,copy_body_data * id)5987 copy_decl_to_var (tree decl, copy_body_data *id)
5988 {
5989   tree copy, type;
5990 
5991   gcc_assert (TREE_CODE (decl) == PARM_DECL
5992 	      || TREE_CODE (decl) == RESULT_DECL);
5993 
5994   type = TREE_TYPE (decl);
5995 
5996   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5997 		     VAR_DECL, DECL_NAME (decl), type);
5998   if (DECL_PT_UID_SET_P (decl))
5999     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6000   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6001   TREE_READONLY (copy) = TREE_READONLY (decl);
6002   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6003   DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
6004   DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
6005 
6006   return copy_decl_for_dup_finish (id, decl, copy);
6007 }
6008 
6009 /* Like copy_decl_to_var, but create a return slot object instead of a
6010    pointer variable for return by invisible reference.  */
6011 
6012 static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)6013 copy_result_decl_to_var (tree decl, copy_body_data *id)
6014 {
6015   tree copy, type;
6016 
6017   gcc_assert (TREE_CODE (decl) == PARM_DECL
6018 	      || TREE_CODE (decl) == RESULT_DECL);
6019 
6020   type = TREE_TYPE (decl);
6021   if (DECL_BY_REFERENCE (decl))
6022     type = TREE_TYPE (type);
6023 
6024   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6025 		     VAR_DECL, DECL_NAME (decl), type);
6026   if (DECL_PT_UID_SET_P (decl))
6027     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6028   TREE_READONLY (copy) = TREE_READONLY (decl);
6029   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6030   if (!DECL_BY_REFERENCE (decl))
6031     {
6032       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6033       DECL_NOT_GIMPLE_REG_P (copy)
6034 	= (DECL_NOT_GIMPLE_REG_P (decl)
6035 	   /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6036 	      mirror that to the created VAR_DECL.  */
6037 	   || (TREE_CODE (decl) == RESULT_DECL
6038 	       && aggregate_value_p (decl, id->src_fn)));
6039     }
6040 
6041   return copy_decl_for_dup_finish (id, decl, copy);
6042 }
6043 
6044 tree
copy_decl_no_change(tree decl,copy_body_data * id)6045 copy_decl_no_change (tree decl, copy_body_data *id)
6046 {
6047   tree copy;
6048 
6049   copy = copy_node (decl);
6050 
6051   /* The COPY is not abstract; it will be generated in DST_FN.  */
6052   DECL_ABSTRACT_P (copy) = false;
6053   lang_hooks.dup_lang_specific_decl (copy);
6054 
6055   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6056      been taken; it's for internal bookkeeping in expand_goto_internal.  */
6057   if (TREE_CODE (copy) == LABEL_DECL)
6058     {
6059       TREE_ADDRESSABLE (copy) = 0;
6060       LABEL_DECL_UID (copy) = -1;
6061     }
6062 
6063   return copy_decl_for_dup_finish (id, decl, copy);
6064 }
6065 
6066 static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)6067 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6068 {
6069   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6070     return copy_decl_to_var (decl, id);
6071   else
6072     return copy_decl_no_change (decl, id);
6073 }
6074 
6075 /* Return a copy of the function's argument tree without any modifications.  */
6076 
6077 static tree
copy_arguments_nochange(tree orig_parm,copy_body_data * id)6078 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6079 {
6080   tree arg, *parg;
6081   tree new_parm = NULL;
6082 
6083   parg = &new_parm;
6084   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6085     {
6086       tree new_tree = remap_decl (arg, id);
6087       if (TREE_CODE (new_tree) != PARM_DECL)
6088 	new_tree = id->copy_decl (arg, id);
6089       lang_hooks.dup_lang_specific_decl (new_tree);
6090       *parg = new_tree;
6091       parg = &DECL_CHAIN (new_tree);
6092     }
6093   return new_parm;
6094 }
6095 
6096 /* Return a copy of the function's static chain.  */
6097 static tree
copy_static_chain(tree static_chain,copy_body_data * id)6098 copy_static_chain (tree static_chain, copy_body_data * id)
6099 {
6100   tree *chain_copy, *pvar;
6101 
6102   chain_copy = &static_chain;
6103   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6104     {
6105       tree new_tree = remap_decl (*pvar, id);
6106       lang_hooks.dup_lang_specific_decl (new_tree);
6107       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6108       *pvar = new_tree;
6109     }
6110   return static_chain;
6111 }
6112 
6113 /* Return true if the function is allowed to be versioned.
6114    This is a guard for the versioning functionality.  */
6115 
6116 bool
tree_versionable_function_p(tree fndecl)6117 tree_versionable_function_p (tree fndecl)
6118 {
6119   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6120 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6121 }
6122 
6123 /* Update clone info after duplication.  */
6124 
6125 static void
update_clone_info(copy_body_data * id)6126 update_clone_info (copy_body_data * id)
6127 {
6128   struct cgraph_node *this_node = id->dst_node;
6129   if (!this_node->clones)
6130     return;
6131   for (cgraph_node *node = this_node->clones; node != this_node;)
6132     {
6133       /* First update replace maps to match the new body.  */
6134       clone_info *info = clone_info::get (node);
6135       if (info && info->tree_map)
6136 	{
6137 	  unsigned int i;
6138 	  for (i = 0; i < vec_safe_length (info->tree_map); i++)
6139 	    {
6140 	      struct ipa_replace_map *replace_info;
6141 	      replace_info = (*info->tree_map)[i];
6142 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6143 	    }
6144 	}
6145 
6146       if (node->clones)
6147 	node = node->clones;
6148       else if (node->next_sibling_clone)
6149 	node = node->next_sibling_clone;
6150       else
6151 	{
6152 	  while (node != id->dst_node && !node->next_sibling_clone)
6153 	    node = node->clone_of;
6154 	  if (node != id->dst_node)
6155 	    node = node->next_sibling_clone;
6156 	}
6157     }
6158 }
6159 
6160 /* Create a copy of a function's tree.
6161    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6162    of the original function and the new copied function
6163    respectively.  In case we want to replace a DECL
6164    tree with another tree while duplicating the function's
6165    body, TREE_MAP represents the mapping between these
6166    trees. If UPDATE_CLONES is set, the call_stmt fields
6167    of edges of clones of the function will be updated.
6168 
6169    If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6170    function parameters and return value) should be modified).
6171    If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6172    If non_NULL NEW_ENTRY determine new entry BB of the clone.
6173 */
6174 void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,ipa_param_adjustments * param_adjustments,bool update_clones,bitmap blocks_to_copy,basic_block new_entry)6175 tree_function_versioning (tree old_decl, tree new_decl,
6176 			  vec<ipa_replace_map *, va_gc> *tree_map,
6177 			  ipa_param_adjustments *param_adjustments,
6178 			  bool update_clones, bitmap blocks_to_copy,
6179 			  basic_block new_entry)
6180 {
6181   struct cgraph_node *old_version_node;
6182   struct cgraph_node *new_version_node;
6183   copy_body_data id;
6184   tree p;
6185   unsigned i;
6186   struct ipa_replace_map *replace_info;
6187   basic_block old_entry_block, bb;
6188   auto_vec<gimple *, 10> init_stmts;
6189   tree vars = NULL_TREE;
6190 
6191   /* We can get called recursively from expand_call_inline via clone
6192      materialization.  While expand_call_inline maintains input_location
6193      we cannot tolerate it to leak into the materialized clone.  */
6194   location_t saved_location = input_location;
6195   input_location = UNKNOWN_LOCATION;
6196 
6197   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6198 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
6199   DECL_POSSIBLY_INLINED (old_decl) = 1;
6200 
6201   old_version_node = cgraph_node::get (old_decl);
6202   gcc_checking_assert (old_version_node);
6203   new_version_node = cgraph_node::get (new_decl);
6204   gcc_checking_assert (new_version_node);
6205 
6206   /* Copy over debug args.  */
6207   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6208     {
6209       vec<tree, va_gc> **new_debug_args, **old_debug_args;
6210       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6211       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6212       old_debug_args = decl_debug_args_lookup (old_decl);
6213       if (old_debug_args)
6214 	{
6215 	  new_debug_args = decl_debug_args_insert (new_decl);
6216 	  *new_debug_args = vec_safe_copy (*old_debug_args);
6217 	}
6218     }
6219 
6220   /* Output the inlining info for this abstract function, since it has been
6221      inlined.  If we don't do this now, we can lose the information about the
6222      variables in the function when the blocks get blown away as soon as we
6223      remove the cgraph node.  */
6224   (*debug_hooks->outlining_inline_function) (old_decl);
6225 
6226   DECL_ARTIFICIAL (new_decl) = 1;
6227   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6228   if (DECL_ORIGIN (old_decl) == old_decl)
6229     old_version_node->used_as_abstract_origin = true;
6230   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6231 
6232   /* Prepare the data structures for the tree copy.  */
6233   memset (&id, 0, sizeof (id));
6234 
6235   /* Generate a new name for the new version. */
6236   id.statements_to_fold = new hash_set<gimple *>;
6237 
6238   id.decl_map = new hash_map<tree, tree>;
6239   id.debug_map = NULL;
6240   id.src_fn = old_decl;
6241   id.dst_fn = new_decl;
6242   id.src_node = old_version_node;
6243   id.dst_node = new_version_node;
6244   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6245   id.blocks_to_copy = blocks_to_copy;
6246 
6247   id.copy_decl = copy_decl_no_change;
6248   id.transform_call_graph_edges
6249     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6250   id.transform_new_cfg = true;
6251   id.transform_return_to_modify = false;
6252   id.transform_parameter = false;
6253 
6254   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
6255   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6256   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6257   initialize_cfun (new_decl, old_decl,
6258 		   new_entry ? new_entry->count : old_entry_block->count);
6259   new_version_node->calls_declare_variant_alt
6260     = old_version_node->calls_declare_variant_alt;
6261   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6262     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6263       = id.src_cfun->gimple_df->ipa_pta;
6264 
6265   /* Copy the function's static chain.  */
6266   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6267   if (p)
6268     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6269       = copy_static_chain (p, &id);
6270 
6271   auto_vec<int, 16> new_param_indices;
6272   clone_info *info = clone_info::get (old_version_node);
6273   ipa_param_adjustments *old_param_adjustments
6274     = info ? info->param_adjustments : NULL;
6275   if (old_param_adjustments)
6276     old_param_adjustments->get_updated_indices (&new_param_indices);
6277 
6278   /* If there's a tree_map, prepare for substitution.  */
6279   if (tree_map)
6280     for (i = 0; i < tree_map->length (); i++)
6281       {
6282 	gimple *init;
6283 	replace_info = (*tree_map)[i];
6284 
6285 	int p = replace_info->parm_num;
6286 	if (old_param_adjustments)
6287 	  p = new_param_indices[p];
6288 
6289 	tree parm;
6290 	for (parm = DECL_ARGUMENTS (old_decl); p;
6291 	     parm = DECL_CHAIN (parm))
6292 	  p--;
6293 	gcc_assert (parm);
6294 	init = setup_one_parameter (&id, parm, replace_info->new_tree,
6295 				    id.src_fn, NULL, &vars);
6296 	if (init)
6297 	  init_stmts.safe_push (init);
6298       }
6299 
6300   ipa_param_body_adjustments *param_body_adjs = NULL;
6301   if (param_adjustments)
6302     {
6303       param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6304 							new_decl, old_decl,
6305 							&id, &vars, tree_map);
6306       id.param_body_adjs = param_body_adjs;
6307       DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6308     }
6309   else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6310     DECL_ARGUMENTS (new_decl)
6311       = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6312 
6313   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6314   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6315 
6316   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6317 
6318   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6319     /* Add local vars.  */
6320     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6321 
6322   if (DECL_RESULT (old_decl) == NULL_TREE)
6323     ;
6324   else if (param_adjustments && param_adjustments->m_skip_return
6325 	   && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6326     {
6327       tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6328 						   &id);
6329       declare_inline_vars (NULL, resdecl_repl);
6330       if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6331 	resdecl_repl = build_fold_addr_expr (resdecl_repl);
6332       insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6333 
6334       DECL_RESULT (new_decl)
6335 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6336 		      RESULT_DECL, NULL_TREE, void_type_node);
6337       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6338       DECL_IS_MALLOC (new_decl) = false;
6339       cfun->returns_struct = 0;
6340       cfun->returns_pcc_struct = 0;
6341     }
6342   else
6343     {
6344       tree old_name;
6345       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6346       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6347       if (gimple_in_ssa_p (id.src_cfun)
6348 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6349 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6350 	{
6351 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6352 	  insert_decl_map (&id, old_name, new_name);
6353 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6354 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6355 	}
6356     }
6357 
6358   /* Set up the destination functions loop tree.  */
6359   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6360     {
6361       cfun->curr_properties &= ~PROP_loops;
6362       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6363       cfun->curr_properties |= PROP_loops;
6364     }
6365 
6366   /* Copy the Function's body.  */
6367   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6368 	     new_entry);
6369 
6370   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6371   number_blocks (new_decl);
6372 
6373   /* We want to create the BB unconditionally, so that the addition of
6374      debug stmts doesn't affect BB count, which may in the end cause
6375      codegen differences.  */
6376   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6377   while (init_stmts.length ())
6378     insert_init_stmt (&id, bb, init_stmts.pop ());
6379   update_clone_info (&id);
6380 
6381   /* Remap the nonlocal_goto_save_area, if any.  */
6382   if (cfun->nonlocal_goto_save_area)
6383     {
6384       struct walk_stmt_info wi;
6385 
6386       memset (&wi, 0, sizeof (wi));
6387       wi.info = &id;
6388       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6389     }
6390 
6391   /* Clean up.  */
6392   delete id.decl_map;
6393   if (id.debug_map)
6394     delete id.debug_map;
6395   free_dominance_info (CDI_DOMINATORS);
6396   free_dominance_info (CDI_POST_DOMINATORS);
6397 
6398   update_max_bb_count ();
6399   fold_marked_statements (0, id.statements_to_fold);
6400   delete id.statements_to_fold;
6401   delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6402   if (id.dst_node->definition)
6403     cgraph_edge::rebuild_references ();
6404   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6405     {
6406       calculate_dominance_info (CDI_DOMINATORS);
6407       fix_loop_structure (NULL);
6408     }
6409   update_ssa (TODO_update_ssa);
6410 
6411   /* After partial cloning we need to rescale frequencies, so they are
6412      within proper range in the cloned function.  */
6413   if (new_entry)
6414     {
6415       struct cgraph_edge *e;
6416       rebuild_frequencies ();
6417 
6418       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6419       for (e = new_version_node->callees; e; e = e->next_callee)
6420 	{
6421 	  basic_block bb = gimple_bb (e->call_stmt);
6422 	  e->count = bb->count;
6423 	}
6424       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6425 	{
6426 	  basic_block bb = gimple_bb (e->call_stmt);
6427 	  e->count = bb->count;
6428 	}
6429     }
6430 
6431   if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6432     {
6433       vec<tree, va_gc> **debug_args = NULL;
6434       unsigned int len = 0;
6435       unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6436 
6437       for (i = 0; i < reset_len; i++)
6438 	{
6439 	  tree parm = param_body_adjs->m_reset_debug_decls[i];
6440 	  gcc_assert (is_gimple_reg (parm));
6441 	  tree ddecl;
6442 
6443 	  if (debug_args == NULL)
6444 	    {
6445 	      debug_args = decl_debug_args_insert (new_decl);
6446 	      len = vec_safe_length (*debug_args);
6447 	    }
6448 	  ddecl = build_debug_expr_decl (TREE_TYPE (parm));
6449 	  /* FIXME: Is setting the mode really necessary? */
6450 	  SET_DECL_MODE (ddecl, DECL_MODE (parm));
6451 	  vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6452 	  vec_safe_push (*debug_args, ddecl);
6453 	}
6454       if (debug_args != NULL)
6455 	{
6456 	  /* On the callee side, add
6457 	     DEBUG D#Y s=> parm
6458 	     DEBUG var => D#Y
6459 	     stmts to the first bb where var is a VAR_DECL created for the
6460 	     optimized away parameter in DECL_INITIAL block.  This hints
6461 	     in the debug info that var (whole DECL_ORIGIN is the parm
6462 	     PARM_DECL) is optimized away, but could be looked up at the
6463 	     call site as value of D#X there.  */
6464 	  gimple_stmt_iterator cgsi
6465 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6466 	  gimple *def_temp;
6467 	  tree var = vars;
6468 	  i = vec_safe_length (*debug_args);
6469 	  do
6470 	    {
6471 	      tree vexpr = NULL_TREE;
6472 	      i -= 2;
6473 	      while (var != NULL_TREE
6474 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6475 		var = TREE_CHAIN (var);
6476 	      if (var == NULL_TREE)
6477 		break;
6478 	      tree parm = (**debug_args)[i];
6479 	      if (tree parm_ddef = ssa_default_def (id.src_cfun, parm))
6480 		if (tree *d
6481 		    = param_body_adjs->m_dead_ssa_debug_equiv.get (parm_ddef))
6482 		  vexpr = *d;
6483 	      if (!vexpr)
6484 		{
6485 		  vexpr = build_debug_expr_decl (TREE_TYPE (parm));
6486 		  /* FIXME: Is setting the mode really necessary? */
6487 		  SET_DECL_MODE (vexpr, DECL_MODE (parm));
6488 		}
6489 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6490 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6491 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6492 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6493 	    }
6494 	  while (i > len);
6495 	}
6496     }
6497   delete param_body_adjs;
6498   free_dominance_info (CDI_DOMINATORS);
6499   free_dominance_info (CDI_POST_DOMINATORS);
6500 
6501   gcc_assert (!id.debug_stmts.exists ());
6502   pop_cfun ();
6503   input_location = saved_location;
6504   return;
6505 }
6506 
6507 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6508    the callee and return the inlined body on success.  */
6509 
6510 tree
maybe_inline_call_in_expr(tree exp)6511 maybe_inline_call_in_expr (tree exp)
6512 {
6513   tree fn = get_callee_fndecl (exp);
6514 
6515   /* We can only try to inline "const" functions.  */
6516   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6517     {
6518       call_expr_arg_iterator iter;
6519       copy_body_data id;
6520       tree param, arg, t;
6521       hash_map<tree, tree> decl_map;
6522 
6523       /* Remap the parameters.  */
6524       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6525 	   param;
6526 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6527 	decl_map.put (param, arg);
6528 
6529       memset (&id, 0, sizeof (id));
6530       id.src_fn = fn;
6531       id.dst_fn = current_function_decl;
6532       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6533       id.decl_map = &decl_map;
6534 
6535       id.copy_decl = copy_decl_no_change;
6536       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6537       id.transform_new_cfg = false;
6538       id.transform_return_to_modify = true;
6539       id.transform_parameter = true;
6540 
6541       /* Make sure not to unshare trees behind the front-end's back
6542 	 since front-end specific mechanisms may rely on sharing.  */
6543       id.regimplify = false;
6544       id.do_not_unshare = true;
6545 
6546       /* We're not inside any EH region.  */
6547       id.eh_lp_nr = 0;
6548 
6549       t = copy_tree_body (&id);
6550 
6551       /* We can only return something suitable for use in a GENERIC
6552 	 expression tree.  */
6553       if (TREE_CODE (t) == MODIFY_EXPR)
6554 	return TREE_OPERAND (t, 1);
6555     }
6556 
6557    return NULL_TREE;
6558 }
6559 
6560 /* Duplicate a type, fields and all.  */
6561 
6562 tree
build_duplicate_type(tree type)6563 build_duplicate_type (tree type)
6564 {
6565   struct copy_body_data id;
6566 
6567   memset (&id, 0, sizeof (id));
6568   id.src_fn = current_function_decl;
6569   id.dst_fn = current_function_decl;
6570   id.src_cfun = cfun;
6571   id.decl_map = new hash_map<tree, tree>;
6572   id.debug_map = NULL;
6573   id.copy_decl = copy_decl_no_change;
6574 
6575   type = remap_type_1 (type, &id);
6576 
6577   delete id.decl_map;
6578   if (id.debug_map)
6579     delete id.debug_map;
6580 
6581   TYPE_CANONICAL (type) = type;
6582 
6583   return type;
6584 }
6585 
6586 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6587    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6588    evaluation.  */
6589 
6590 tree
copy_fn(tree fn,tree & parms,tree & result)6591 copy_fn (tree fn, tree& parms, tree& result)
6592 {
6593   copy_body_data id;
6594   tree param;
6595   hash_map<tree, tree> decl_map;
6596 
6597   tree *p = &parms;
6598   *p = NULL_TREE;
6599 
6600   memset (&id, 0, sizeof (id));
6601   id.src_fn = fn;
6602   id.dst_fn = current_function_decl;
6603   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6604   id.decl_map = &decl_map;
6605 
6606   id.copy_decl = copy_decl_no_change;
6607   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6608   id.transform_new_cfg = false;
6609   id.transform_return_to_modify = false;
6610   id.transform_parameter = true;
6611 
6612   /* Make sure not to unshare trees behind the front-end's back
6613      since front-end specific mechanisms may rely on sharing.  */
6614   id.regimplify = false;
6615   id.do_not_unshare = true;
6616   id.do_not_fold = true;
6617 
6618   /* We're not inside any EH region.  */
6619   id.eh_lp_nr = 0;
6620 
6621   /* Remap the parameters and result and return them to the caller.  */
6622   for (param = DECL_ARGUMENTS (fn);
6623        param;
6624        param = DECL_CHAIN (param))
6625     {
6626       *p = remap_decl (param, &id);
6627       p = &DECL_CHAIN (*p);
6628     }
6629 
6630   if (DECL_RESULT (fn))
6631     result = remap_decl (DECL_RESULT (fn), &id);
6632   else
6633     result = NULL_TREE;
6634 
6635   return copy_tree_body (&id);
6636 }
6637