xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-inline.c (revision 6cf6fe02a981b55727c49c3d37b0d8191a98c0ee)
1 /* Tree inlining.
2    Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Alexandre Oliva <aoliva@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
55 
56 /* I'm not real happy about this, but we need to handle gimple and
57    non-gimple trees.  */
58 #include "gimple.h"
59 
60 /* Inlining, Cloning, Versioning, Parallelization
61 
62    Inlining: a function body is duplicated, but the PARM_DECLs are
63    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64    MODIFY_EXPRs that store to a dedicated returned-value variable.
65    The duplicated eh_region info of the copy will later be appended
66    to the info for the caller; the eh_region info in copied throwing
67    statements and RESX statements are adjusted accordingly.
68 
69    Cloning: (only in C++) We have one body for a con/de/structor, and
70    multiple function decls, each with a unique parameter list.
71    Duplicate the body, using the given splay tree; some parameters
72    will become constants (like 0 or 1).
73 
74    Versioning: a function body is duplicated and the result is a new
75    function rather than into blocks of an existing function as with
76    inlining.  Some parameters will become constants.
77 
78    Parallelization: a region of a function is duplicated resulting in
79    a new function.  Variables may be replaced with complex expressions
80    to enable shared variable semantics.
81 
82    All of these will simultaneously lookup any callgraph edges.  If
83    we're going to inline the duplicated function body, and the given
84    function has some cloned callgraph nodes (one for each place this
85    function will be inlined) those callgraph edges will be duplicated.
86    If we're cloning the body, those callgraph edges will be
87    updated to point into the new body.  (Note that the original
88    callgraph node and edge list will not be altered.)
89 
90    See the CALL_EXPR handling case in copy_tree_body_r ().  */
91 
92 /* To Do:
93 
94    o In order to make inlining-on-trees work, we pessimized
95      function-local static constants.  In particular, they are now
96      always output, even when not addressed.  Fix this by treating
97      function-local static constants just like global static
98      constants; the back-end already knows not to output them if they
99      are not needed.
100 
101    o Provide heuristics to clamp inlining of recursive template
102      calls?  */
103 
104 
105 /* Weights that estimate_num_insns uses for heuristics in inlining.  */
106 
107 eni_weights eni_inlining_weights;
108 
109 /* Weights that estimate_num_insns uses to estimate the size of the
110    produced code.  */
111 
112 eni_weights eni_size_weights;
113 
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115    to execute the produced code.  */
116 
117 eni_weights eni_time_weights;
118 
119 /* Prototypes.  */
120 
121 static tree declare_return_variable (copy_body_data *, tree, tree);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
135 
136 /* Insert a tree->tree mapping for ID.  Despite the name suggests
137    that the trees should be variables, it is used for more than that.  */
138 
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
141 {
142   *pointer_map_insert (id->decl_map, key) = value;
143 
144   /* Always insert an identity map as well.  If we see this same new
145      node again, we won't want to duplicate it a second time.  */
146   if (key != value)
147     *pointer_map_insert (id->decl_map, value) = value;
148 }
149 
150 /* Insert a tree->tree mapping for ID.  This is only used for
151    variables.  */
152 
153 static void
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
155 {
156   if (!gimple_in_ssa_p (id->src_cfun))
157     return;
158 
159   if (!MAY_HAVE_DEBUG_STMTS)
160     return;
161 
162   if (!target_for_debug_bind (key))
163     return;
164 
165   gcc_assert (TREE_CODE (key) == PARM_DECL);
166   gcc_assert (TREE_CODE (value) == VAR_DECL);
167 
168   if (!id->debug_map)
169     id->debug_map = pointer_map_create ();
170 
171   *pointer_map_insert (id->debug_map, key) = value;
172 }
173 
174 /* If nonzero, we're remapping the contents of inlined debug
175    statements.  If negative, an error has occurred, such as a
176    reference to a variable that isn't available in the inlined
177    context.  */
178 static int processing_debug_stmt = 0;
179 
180 /* Construct new SSA name for old NAME. ID is the inline context.  */
181 
182 static tree
183 remap_ssa_name (tree name, copy_body_data *id)
184 {
185   tree new_tree;
186   tree *n;
187 
188   gcc_assert (TREE_CODE (name) == SSA_NAME);
189 
190   n = (tree *) pointer_map_contains (id->decl_map, name);
191   if (n)
192     return unshare_expr (*n);
193 
194   if (processing_debug_stmt)
195     {
196       processing_debug_stmt = -1;
197       return name;
198     }
199 
200   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
201      in copy_bb.  */
202   new_tree = remap_decl (SSA_NAME_VAR (name), id);
203 
204   /* We might've substituted constant or another SSA_NAME for
205      the variable.
206 
207      Replace the SSA name representing RESULT_DECL by variable during
208      inlining:  this saves us from need to introduce PHI node in a case
209      return value is just partly initialized.  */
210   if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
211       && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
212 	  || !id->transform_return_to_modify))
213     {
214       new_tree = make_ssa_name (new_tree, NULL);
215       insert_decl_map (id, name, new_tree);
216       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
217 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
218       TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
219       if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
220 	{
221 	  /* By inlining function having uninitialized variable, we might
222 	     extend the lifetime (variable might get reused).  This cause
223 	     ICE in the case we end up extending lifetime of SSA name across
224 	     abnormal edge, but also increase register pressure.
225 
226 	     We simply initialize all uninitialized vars by 0 except
227 	     for case we are inlining to very first BB.  We can avoid
228 	     this for all BBs that are not inside strongly connected
229 	     regions of the CFG, but this is expensive to test.  */
230 	  if (id->entry_bb
231 	      && is_gimple_reg (SSA_NAME_VAR (name))
232 	      && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
233 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
234 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
235 	    {
236 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
237 	      gimple init_stmt;
238 
239 	      init_stmt = gimple_build_assign (new_tree,
240 		                               fold_convert (TREE_TYPE (new_tree),
241 					       		    integer_zero_node));
242 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
243 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
244 	    }
245 	  else
246 	    {
247 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
248 	      if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
249 		  == name)
250 	        set_default_def (SSA_NAME_VAR (new_tree), new_tree);
251 	    }
252 	}
253     }
254   else
255     insert_decl_map (id, name, new_tree);
256   return new_tree;
257 }
258 
259 /* Remap DECL during the copying of the BLOCK tree for the function.  */
260 
261 tree
262 remap_decl (tree decl, copy_body_data *id)
263 {
264   tree *n;
265 
266   /* We only remap local variables in the current function.  */
267 
268   /* See if we have remapped this declaration.  */
269 
270   n = (tree *) pointer_map_contains (id->decl_map, decl);
271 
272   if (!n && processing_debug_stmt)
273     {
274       processing_debug_stmt = -1;
275       return decl;
276     }
277 
278   /* If we didn't already have an equivalent for this declaration,
279      create one now.  */
280   if (!n)
281     {
282       /* Make a copy of the variable or label.  */
283       tree t = id->copy_decl (decl, id);
284 
285       /* Remember it, so that if we encounter this local entity again
286 	 we can reuse this copy.  Do this early because remap_type may
287 	 need this decl for TYPE_STUB_DECL.  */
288       insert_decl_map (id, decl, t);
289 
290       if (!DECL_P (t))
291 	return t;
292 
293       /* Remap types, if necessary.  */
294       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
295       if (TREE_CODE (t) == TYPE_DECL)
296         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
297 
298       /* Remap sizes as necessary.  */
299       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
300       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
301 
302       /* If fields, do likewise for offset and qualifier.  */
303       if (TREE_CODE (t) == FIELD_DECL)
304 	{
305 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
306 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
307 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
308 	}
309 
310       if (cfun && gimple_in_ssa_p (cfun)
311 	  && (TREE_CODE (t) == VAR_DECL
312 	      || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
313 	{
314 	  get_var_ann (t);
315 	  add_referenced_var (t);
316 	}
317       return t;
318     }
319 
320   if (id->do_not_unshare)
321     return *n;
322   else
323     return unshare_expr (*n);
324 }
325 
326 static tree
327 remap_type_1 (tree type, copy_body_data *id)
328 {
329   tree new_tree, t;
330 
331   /* We do need a copy.  build and register it now.  If this is a pointer or
332      reference type, remap the designated type and make a new pointer or
333      reference type.  */
334   if (TREE_CODE (type) == POINTER_TYPE)
335     {
336       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
337 					 TYPE_MODE (type),
338 					 TYPE_REF_CAN_ALIAS_ALL (type));
339       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
340 	new_tree = build_type_attribute_qual_variant (new_tree,
341 						      TYPE_ATTRIBUTES (type),
342 						      TYPE_QUALS (type));
343       insert_decl_map (id, type, new_tree);
344       return new_tree;
345     }
346   else if (TREE_CODE (type) == REFERENCE_TYPE)
347     {
348       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
349 					    TYPE_MODE (type),
350 					    TYPE_REF_CAN_ALIAS_ALL (type));
351       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
352 	new_tree = build_type_attribute_qual_variant (new_tree,
353 						      TYPE_ATTRIBUTES (type),
354 						      TYPE_QUALS (type));
355       insert_decl_map (id, type, new_tree);
356       return new_tree;
357     }
358   else
359     new_tree = copy_node (type);
360 
361   insert_decl_map (id, type, new_tree);
362 
363   /* This is a new type, not a copy of an old type.  Need to reassociate
364      variants.  We can handle everything except the main variant lazily.  */
365   t = TYPE_MAIN_VARIANT (type);
366   if (type != t)
367     {
368       t = remap_type (t, id);
369       TYPE_MAIN_VARIANT (new_tree) = t;
370       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
371       TYPE_NEXT_VARIANT (t) = new_tree;
372     }
373   else
374     {
375       TYPE_MAIN_VARIANT (new_tree) = new_tree;
376       TYPE_NEXT_VARIANT (new_tree) = NULL;
377     }
378 
379   if (TYPE_STUB_DECL (type))
380     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
381 
382   /* Lazily create pointer and reference types.  */
383   TYPE_POINTER_TO (new_tree) = NULL;
384   TYPE_REFERENCE_TO (new_tree) = NULL;
385 
386   switch (TREE_CODE (new_tree))
387     {
388     case INTEGER_TYPE:
389     case REAL_TYPE:
390     case FIXED_POINT_TYPE:
391     case ENUMERAL_TYPE:
392     case BOOLEAN_TYPE:
393       t = TYPE_MIN_VALUE (new_tree);
394       if (t && TREE_CODE (t) != INTEGER_CST)
395         walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
396 
397       t = TYPE_MAX_VALUE (new_tree);
398       if (t && TREE_CODE (t) != INTEGER_CST)
399         walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
400       return new_tree;
401 
402     case FUNCTION_TYPE:
403       TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
404       walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
405       return new_tree;
406 
407     case ARRAY_TYPE:
408       TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409       TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
410       break;
411 
412     case RECORD_TYPE:
413     case UNION_TYPE:
414     case QUAL_UNION_TYPE:
415       {
416 	tree f, nf = NULL;
417 
418 	for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
419 	  {
420 	    t = remap_decl (f, id);
421 	    DECL_CONTEXT (t) = new_tree;
422 	    TREE_CHAIN (t) = nf;
423 	    nf = t;
424 	  }
425 	TYPE_FIELDS (new_tree) = nreverse (nf);
426       }
427       break;
428 
429     case OFFSET_TYPE:
430     default:
431       /* Shouldn't have been thought variable sized.  */
432       gcc_unreachable ();
433     }
434 
435   walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
436   walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
437 
438   return new_tree;
439 }
440 
441 tree
442 remap_type (tree type, copy_body_data *id)
443 {
444   tree *node;
445   tree tmp;
446 
447   if (type == NULL)
448     return type;
449 
450   /* See if we have remapped this type.  */
451   node = (tree *) pointer_map_contains (id->decl_map, type);
452   if (node)
453     return *node;
454 
455   /* The type only needs remapping if it's variably modified.  */
456   if (! variably_modified_type_p (type, id->src_fn))
457     {
458       insert_decl_map (id, type, type);
459       return type;
460     }
461 
462   id->remapping_type_depth++;
463   tmp = remap_type_1 (type, id);
464   id->remapping_type_depth--;
465 
466   return tmp;
467 }
468 
469 /* Return previously remapped type of TYPE in ID.  Return NULL if TYPE
470    is NULL or TYPE has not been remapped before.  */
471 
472 static tree
473 remapped_type (tree type, copy_body_data *id)
474 {
475   tree *node;
476 
477   if (type == NULL)
478     return type;
479 
480   /* See if we have remapped this type.  */
481   node = (tree *) pointer_map_contains (id->decl_map, type);
482   if (node)
483     return *node;
484   else
485     return NULL;
486 }
487 
488   /* The type only needs remapping if it's variably modified.  */
489 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
490 
491 static bool
492 can_be_nonlocal (tree decl, copy_body_data *id)
493 {
494   /* We can not duplicate function decls.  */
495   if (TREE_CODE (decl) == FUNCTION_DECL)
496     return true;
497 
498   /* Local static vars must be non-local or we get multiple declaration
499      problems.  */
500   if (TREE_CODE (decl) == VAR_DECL
501       && !auto_var_in_fn_p (decl, id->src_fn))
502     return true;
503 
504   /* At the moment dwarf2out can handle only these types of nodes.  We
505      can support more later.  */
506   if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
507     return false;
508 
509   /* We must use global type.  We call remapped_type instead of
510      remap_type since we don't want to remap this type here if it
511      hasn't been remapped before.  */
512   if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
513     return false;
514 
515   /* Wihtout SSA we can't tell if variable is used.  */
516   if (!gimple_in_ssa_p (cfun))
517     return false;
518 
519   /* Live variables must be copied so we can attach DECL_RTL.  */
520   if (var_ann (decl))
521     return false;
522 
523   return true;
524 }
525 
526 static tree
527 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
528 {
529   tree old_var;
530   tree new_decls = NULL_TREE;
531 
532   /* Remap its variables.  */
533   for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
534     {
535       tree new_var;
536 
537       if (can_be_nonlocal (old_var, id))
538 	{
539 	  if (TREE_CODE (old_var) == VAR_DECL
540 	      && ! DECL_EXTERNAL (old_var)
541 	      && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
542 	    cfun->local_decls = tree_cons (NULL_TREE, old_var,
543 						   cfun->local_decls);
544 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
545 	      && !DECL_IGNORED_P (old_var)
546 	      && nonlocalized_list)
547 	    VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
548 	  continue;
549 	}
550 
551       /* Remap the variable.  */
552       new_var = remap_decl (old_var, id);
553 
554       /* If we didn't remap this variable, we can't mess with its
555 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
556 	 already declared somewhere else, so don't declare it here.  */
557 
558       if (new_var == id->retvar)
559 	;
560       else if (!new_var)
561         {
562 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
563 	      && !DECL_IGNORED_P (old_var)
564 	      && nonlocalized_list)
565 	    VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
566 	}
567       else
568 	{
569 	  gcc_assert (DECL_P (new_var));
570 	  TREE_CHAIN (new_var) = new_decls;
571 	  new_decls = new_var;
572 	}
573     }
574 
575   return nreverse (new_decls);
576 }
577 
578 /* Copy the BLOCK to contain remapped versions of the variables
579    therein.  And hook the new block into the block-tree.  */
580 
581 static void
582 remap_block (tree *block, copy_body_data *id)
583 {
584   tree old_block;
585   tree new_block;
586 
587   /* Make the new block.  */
588   old_block = *block;
589   new_block = make_node (BLOCK);
590   TREE_USED (new_block) = TREE_USED (old_block);
591   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
592   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
593   BLOCK_NONLOCALIZED_VARS (new_block)
594     = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
595   *block = new_block;
596 
597   /* Remap its variables.  */
598   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
599   					&BLOCK_NONLOCALIZED_VARS (new_block),
600 					id);
601 
602   if (id->transform_lang_insert_block)
603     id->transform_lang_insert_block (new_block);
604 
605   /* Remember the remapped block.  */
606   insert_decl_map (id, old_block, new_block);
607 }
608 
609 /* Copy the whole block tree and root it in id->block.  */
610 static tree
611 remap_blocks (tree block, copy_body_data *id)
612 {
613   tree t;
614   tree new_tree = block;
615 
616   if (!block)
617     return NULL;
618 
619   remap_block (&new_tree, id);
620   gcc_assert (new_tree != block);
621   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
622     prepend_lexical_block (new_tree, remap_blocks (t, id));
623   /* Blocks are in arbitrary order, but make things slightly prettier and do
624      not swap order when producing a copy.  */
625   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
626   return new_tree;
627 }
628 
629 static void
630 copy_statement_list (tree *tp)
631 {
632   tree_stmt_iterator oi, ni;
633   tree new_tree;
634 
635   new_tree = alloc_stmt_list ();
636   ni = tsi_start (new_tree);
637   oi = tsi_start (*tp);
638   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
639   *tp = new_tree;
640 
641   for (; !tsi_end_p (oi); tsi_next (&oi))
642     {
643       tree stmt = tsi_stmt (oi);
644       if (TREE_CODE (stmt) == STATEMENT_LIST)
645 	copy_statement_list (&stmt);
646       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
647     }
648 }
649 
650 static void
651 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
652 {
653   tree block = BIND_EXPR_BLOCK (*tp);
654   tree t;
655   /* Copy (and replace) the statement.  */
656   copy_tree_r (tp, walk_subtrees, NULL);
657   if (block)
658     {
659       remap_block (&block, id);
660       BIND_EXPR_BLOCK (*tp) = block;
661     }
662 
663   if (BIND_EXPR_VARS (*tp))
664     {
665       /* This will remap a lot of the same decls again, but this should be
666 	 harmless.  */
667       BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
668 
669       /* Also copy value-expressions.  */
670       for (t = BIND_EXPR_VARS (*tp); t; t = TREE_CHAIN (t))
671 	if (TREE_CODE (t) == VAR_DECL
672 	    && DECL_HAS_VALUE_EXPR_P (t))
673 	  {
674 	    tree tem = DECL_VALUE_EXPR (t);
675 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
676 	    SET_DECL_VALUE_EXPR (t, tem);
677 	  }
678     }
679 }
680 
681 
682 /* Create a new gimple_seq by remapping all the statements in BODY
683    using the inlining information in ID.  */
684 
685 gimple_seq
686 remap_gimple_seq (gimple_seq body, copy_body_data *id)
687 {
688   gimple_stmt_iterator si;
689   gimple_seq new_body = NULL;
690 
691   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
692     {
693       gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
694       gimple_seq_add_stmt (&new_body, new_stmt);
695     }
696 
697   return new_body;
698 }
699 
700 
701 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
702    block using the mapping information in ID.  */
703 
704 static gimple
705 copy_gimple_bind (gimple stmt, copy_body_data *id)
706 {
707   gimple new_bind;
708   tree new_block, new_vars;
709   gimple_seq body, new_body;
710 
711   /* Copy the statement.  Note that we purposely don't use copy_stmt
712      here because we need to remap statements as we copy.  */
713   body = gimple_bind_body (stmt);
714   new_body = remap_gimple_seq (body, id);
715 
716   new_block = gimple_bind_block (stmt);
717   if (new_block)
718     remap_block (&new_block, id);
719 
720   /* This will remap a lot of the same decls again, but this should be
721      harmless.  */
722   new_vars = gimple_bind_vars (stmt);
723   if (new_vars)
724     new_vars = remap_decls (new_vars, NULL, id);
725 
726   new_bind = gimple_build_bind (new_vars, new_body, new_block);
727 
728   return new_bind;
729 }
730 
731 
732 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
733    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
734    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
735    recursing into the children nodes of *TP.  */
736 
737 static tree
738 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
739 {
740   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
741   copy_body_data *id = (copy_body_data *) wi_p->info;
742   tree fn = id->src_fn;
743 
744   if (TREE_CODE (*tp) == SSA_NAME)
745     {
746       *tp = remap_ssa_name (*tp, id);
747       *walk_subtrees = 0;
748       return NULL;
749     }
750   else if (auto_var_in_fn_p (*tp, fn))
751     {
752       /* Local variables and labels need to be replaced by equivalent
753 	 variables.  We don't want to copy static variables; there's
754 	 only one of those, no matter how many times we inline the
755 	 containing function.  Similarly for globals from an outer
756 	 function.  */
757       tree new_decl;
758 
759       /* Remap the declaration.  */
760       new_decl = remap_decl (*tp, id);
761       gcc_assert (new_decl);
762       /* Replace this variable with the copy.  */
763       STRIP_TYPE_NOPS (new_decl);
764       /* ???  The C++ frontend uses void * pointer zero to initialize
765          any other type.  This confuses the middle-end type verification.
766 	 As cloned bodies do not go through gimplification again the fixup
767 	 there doesn't trigger.  */
768       if (TREE_CODE (new_decl) == INTEGER_CST
769 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
770 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
771       *tp = new_decl;
772       *walk_subtrees = 0;
773     }
774   else if (TREE_CODE (*tp) == STATEMENT_LIST)
775     gcc_unreachable ();
776   else if (TREE_CODE (*tp) == SAVE_EXPR)
777     gcc_unreachable ();
778   else if (TREE_CODE (*tp) == LABEL_DECL
779 	   && (!DECL_CONTEXT (*tp)
780 	       || decl_function_context (*tp) == id->src_fn))
781     /* These may need to be remapped for EH handling.  */
782     *tp = remap_decl (*tp, id);
783   else if (TYPE_P (*tp))
784     /* Types may need remapping as well.  */
785     *tp = remap_type (*tp, id);
786   else if (CONSTANT_CLASS_P (*tp))
787     {
788       /* If this is a constant, we have to copy the node iff the type
789 	 will be remapped.  copy_tree_r will not copy a constant.  */
790       tree new_type = remap_type (TREE_TYPE (*tp), id);
791 
792       if (new_type == TREE_TYPE (*tp))
793 	*walk_subtrees = 0;
794 
795       else if (TREE_CODE (*tp) == INTEGER_CST)
796 	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
797 				  TREE_INT_CST_HIGH (*tp));
798       else
799 	{
800 	  *tp = copy_node (*tp);
801 	  TREE_TYPE (*tp) = new_type;
802 	}
803     }
804   else
805     {
806       /* Otherwise, just copy the node.  Note that copy_tree_r already
807 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
808       if (TREE_CODE (*tp) == INDIRECT_REF)
809 	{
810 	  /* Get rid of *& from inline substitutions that can happen when a
811 	     pointer argument is an ADDR_EXPR.  */
812 	  tree decl = TREE_OPERAND (*tp, 0);
813 	  tree *n;
814 
815 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
816 	  if (n)
817 	    {
818 	      tree type, new_tree, old;
819 
820 	      /* If we happen to get an ADDR_EXPR in n->value, strip
821 		 it manually here as we'll eventually get ADDR_EXPRs
822 		 which lie about their types pointed to.  In this case
823 		 build_fold_indirect_ref wouldn't strip the
824 		 INDIRECT_REF, but we absolutely rely on that.  As
825 		 fold_indirect_ref does other useful transformations,
826 		 try that first, though.  */
827 	      type = TREE_TYPE (TREE_TYPE (*n));
828 	      new_tree = unshare_expr (*n);
829 	      old = *tp;
830 	      *tp = gimple_fold_indirect_ref (new_tree);
831 	      if (!*tp)
832 	        {
833 		  if (TREE_CODE (new_tree) == ADDR_EXPR)
834 		    {
835 		      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
836 						 type, new_tree);
837 		      /* ???  We should either assert here or build
838 			 a VIEW_CONVERT_EXPR instead of blindly leaking
839 			 incompatible types to our IL.  */
840 		      if (! *tp)
841 			*tp = TREE_OPERAND (new_tree, 0);
842 		    }
843 	          else
844 		    {
845 	              *tp = build1 (INDIRECT_REF, type, new_tree);
846 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
847 		      TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
848 		    }
849 		}
850 	      *walk_subtrees = 0;
851 	      return NULL;
852 	    }
853 	}
854 
855       /* Here is the "usual case".  Copy this tree node, and then
856 	 tweak some special cases.  */
857       copy_tree_r (tp, walk_subtrees, NULL);
858 
859       /* Global variables we haven't seen yet need to go into referenced
860 	 vars.  If not referenced from types only.  */
861       if (gimple_in_ssa_p (cfun)
862 	  && TREE_CODE (*tp) == VAR_DECL
863 	  && id->remapping_type_depth == 0
864 	  && !processing_debug_stmt)
865 	add_referenced_var (*tp);
866 
867       /* We should never have TREE_BLOCK set on non-statements.  */
868       if (EXPR_P (*tp))
869 	gcc_assert (!TREE_BLOCK (*tp));
870 
871       if (TREE_CODE (*tp) != OMP_CLAUSE)
872 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
873 
874       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
875 	{
876 	  /* The copied TARGET_EXPR has never been expanded, even if the
877 	     original node was expanded already.  */
878 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
879 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
880 	}
881       else if (TREE_CODE (*tp) == ADDR_EXPR)
882 	{
883 	  /* Variable substitution need not be simple.  In particular,
884 	     the INDIRECT_REF substitution above.  Make sure that
885 	     TREE_CONSTANT and friends are up-to-date.  But make sure
886 	     to not improperly set TREE_BLOCK on some sub-expressions.  */
887 	  int invariant = is_gimple_min_invariant (*tp);
888 	  tree block = id->block;
889 	  id->block = NULL_TREE;
890 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
891 	  id->block = block;
892 
893 	  /* Handle the case where we substituted an INDIRECT_REF
894 	     into the operand of the ADDR_EXPR.  */
895 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
896 	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
897 	  else
898 	    recompute_tree_invariant_for_addr_expr (*tp);
899 
900 	  /* If this used to be invariant, but is not any longer,
901 	     then regimplification is probably needed.  */
902 	  if (invariant && !is_gimple_min_invariant (*tp))
903 	    id->regimplify = true;
904 
905 	  *walk_subtrees = 0;
906 	}
907     }
908 
909   /* Keep iterating.  */
910   return NULL_TREE;
911 }
912 
913 
914 /* Called from copy_body_id via walk_tree.  DATA is really a
915    `copy_body_data *'.  */
916 
917 tree
918 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
919 {
920   copy_body_data *id = (copy_body_data *) data;
921   tree fn = id->src_fn;
922   tree new_block;
923 
924   /* Begin by recognizing trees that we'll completely rewrite for the
925      inlining context.  Our output for these trees is completely
926      different from out input (e.g. RETURN_EXPR is deleted, and morphs
927      into an edge).  Further down, we'll handle trees that get
928      duplicated and/or tweaked.  */
929 
930   /* When requested, RETURN_EXPRs should be transformed to just the
931      contained MODIFY_EXPR.  The branch semantics of the return will
932      be handled elsewhere by manipulating the CFG rather than a statement.  */
933   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
934     {
935       tree assignment = TREE_OPERAND (*tp, 0);
936 
937       /* If we're returning something, just turn that into an
938 	 assignment into the equivalent of the original RESULT_DECL.
939 	 If the "assignment" is just the result decl, the result
940 	 decl has already been set (e.g. a recent "foo (&result_decl,
941 	 ...)"); just toss the entire RETURN_EXPR.  */
942       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
943 	{
944 	  /* Replace the RETURN_EXPR with (a copy of) the
945 	     MODIFY_EXPR hanging underneath.  */
946 	  *tp = copy_node (assignment);
947 	}
948       else /* Else the RETURN_EXPR returns no value.  */
949 	{
950 	  *tp = NULL;
951 	  return (tree) (void *)1;
952 	}
953     }
954   else if (TREE_CODE (*tp) == SSA_NAME)
955     {
956       *tp = remap_ssa_name (*tp, id);
957       *walk_subtrees = 0;
958       return NULL;
959     }
960 
961   /* Local variables and labels need to be replaced by equivalent
962      variables.  We don't want to copy static variables; there's only
963      one of those, no matter how many times we inline the containing
964      function.  Similarly for globals from an outer function.  */
965   else if (auto_var_in_fn_p (*tp, fn))
966     {
967       tree new_decl;
968 
969       /* Remap the declaration.  */
970       new_decl = remap_decl (*tp, id);
971       gcc_assert (new_decl);
972       /* Replace this variable with the copy.  */
973       STRIP_TYPE_NOPS (new_decl);
974       *tp = new_decl;
975       *walk_subtrees = 0;
976     }
977   else if (TREE_CODE (*tp) == STATEMENT_LIST)
978     copy_statement_list (tp);
979   else if (TREE_CODE (*tp) == SAVE_EXPR
980 	   || TREE_CODE (*tp) == TARGET_EXPR)
981     remap_save_expr (tp, id->decl_map, walk_subtrees);
982   else if (TREE_CODE (*tp) == LABEL_DECL
983 	   && (! DECL_CONTEXT (*tp)
984 	       || decl_function_context (*tp) == id->src_fn))
985     /* These may need to be remapped for EH handling.  */
986     *tp = remap_decl (*tp, id);
987   else if (TREE_CODE (*tp) == BIND_EXPR)
988     copy_bind_expr (tp, walk_subtrees, id);
989   /* Types may need remapping as well.  */
990   else if (TYPE_P (*tp))
991     *tp = remap_type (*tp, id);
992 
993   /* If this is a constant, we have to copy the node iff the type will be
994      remapped.  copy_tree_r will not copy a constant.  */
995   else if (CONSTANT_CLASS_P (*tp))
996     {
997       tree new_type = remap_type (TREE_TYPE (*tp), id);
998 
999       if (new_type == TREE_TYPE (*tp))
1000 	*walk_subtrees = 0;
1001 
1002       else if (TREE_CODE (*tp) == INTEGER_CST)
1003 	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1004 				  TREE_INT_CST_HIGH (*tp));
1005       else
1006 	{
1007 	  *tp = copy_node (*tp);
1008 	  TREE_TYPE (*tp) = new_type;
1009 	}
1010     }
1011 
1012   /* Otherwise, just copy the node.  Note that copy_tree_r already
1013      knows not to copy VAR_DECLs, etc., so this is safe.  */
1014   else
1015     {
1016       /* Here we handle trees that are not completely rewritten.
1017 	 First we detect some inlining-induced bogosities for
1018 	 discarding.  */
1019       if (TREE_CODE (*tp) == MODIFY_EXPR
1020 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1021 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1022 	{
1023 	  /* Some assignments VAR = VAR; don't generate any rtl code
1024 	     and thus don't count as variable modification.  Avoid
1025 	     keeping bogosities like 0 = 0.  */
1026 	  tree decl = TREE_OPERAND (*tp, 0), value;
1027 	  tree *n;
1028 
1029 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1030 	  if (n)
1031 	    {
1032 	      value = *n;
1033 	      STRIP_TYPE_NOPS (value);
1034 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1035 		{
1036 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1037 		  return copy_tree_body_r (tp, walk_subtrees, data);
1038 		}
1039 	    }
1040 	}
1041       else if (TREE_CODE (*tp) == INDIRECT_REF)
1042 	{
1043 	  /* Get rid of *& from inline substitutions that can happen when a
1044 	     pointer argument is an ADDR_EXPR.  */
1045 	  tree decl = TREE_OPERAND (*tp, 0);
1046 	  tree *n;
1047 
1048 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1049 	  if (n)
1050 	    {
1051 	      tree new_tree;
1052 	      tree old;
1053 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1054 	         it manually here as we'll eventually get ADDR_EXPRs
1055 		 which lie about their types pointed to.  In this case
1056 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1057 		 but we absolutely rely on that.  As fold_indirect_ref
1058 	         does other useful transformations, try that first, though.  */
1059 	      tree type = TREE_TYPE (TREE_TYPE (*n));
1060 	      if (id->do_not_unshare)
1061 		new_tree = *n;
1062 	      else
1063 		new_tree = unshare_expr (*n);
1064 	      old = *tp;
1065 	      *tp = gimple_fold_indirect_ref (new_tree);
1066 	      if (! *tp)
1067 	        {
1068 		  if (TREE_CODE (new_tree) == ADDR_EXPR)
1069 		    {
1070 		      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1071 						 type, new_tree);
1072 		      /* ???  We should either assert here or build
1073 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1074 			 incompatible types to our IL.  */
1075 		      if (! *tp)
1076 			*tp = TREE_OPERAND (new_tree, 0);
1077 		    }
1078 	          else
1079 		    {
1080 	              *tp = build1 (INDIRECT_REF, type, new_tree);
1081 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1082 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1083 		    }
1084 		}
1085 	      *walk_subtrees = 0;
1086 	      return NULL;
1087 	    }
1088 	}
1089 
1090       /* Here is the "usual case".  Copy this tree node, and then
1091 	 tweak some special cases.  */
1092       copy_tree_r (tp, walk_subtrees, NULL);
1093 
1094       /* Global variables we haven't seen yet needs to go into referenced
1095 	 vars.  If not referenced from types or debug stmts only.  */
1096       if (gimple_in_ssa_p (cfun)
1097 	  && TREE_CODE (*tp) == VAR_DECL
1098 	  && id->remapping_type_depth == 0
1099 	  && !processing_debug_stmt)
1100 	add_referenced_var (*tp);
1101 
1102       /* If EXPR has block defined, map it to newly constructed block.
1103          When inlining we want EXPRs without block appear in the block
1104 	 of function call if we are not remapping a type.  */
1105       if (EXPR_P (*tp))
1106 	{
1107 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1108 	  if (TREE_BLOCK (*tp))
1109 	    {
1110 	      tree *n;
1111 	      n = (tree *) pointer_map_contains (id->decl_map,
1112 						 TREE_BLOCK (*tp));
1113 	      gcc_assert (n);
1114 	      new_block = *n;
1115 	    }
1116 	  TREE_BLOCK (*tp) = new_block;
1117 	}
1118 
1119       if (TREE_CODE (*tp) != OMP_CLAUSE)
1120 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1121 
1122       /* The copied TARGET_EXPR has never been expanded, even if the
1123 	 original node was expanded already.  */
1124       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1125 	{
1126 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1127 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1128 	}
1129 
1130       /* Variable substitution need not be simple.  In particular, the
1131 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1132 	 and friends are up-to-date.  */
1133       else if (TREE_CODE (*tp) == ADDR_EXPR)
1134 	{
1135 	  int invariant = is_gimple_min_invariant (*tp);
1136 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1137 
1138 	  /* Handle the case where we substituted an INDIRECT_REF
1139 	     into the operand of the ADDR_EXPR.  */
1140 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1141 	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1142 	  else
1143 	    recompute_tree_invariant_for_addr_expr (*tp);
1144 
1145 	  /* If this used to be invariant, but is not any longer,
1146 	     then regimplification is probably needed.  */
1147 	  if (invariant && !is_gimple_min_invariant (*tp))
1148 	    id->regimplify = true;
1149 
1150 	  *walk_subtrees = 0;
1151 	}
1152     }
1153 
1154   /* Keep iterating.  */
1155   return NULL_TREE;
1156 }
1157 
1158 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1159    source function, map that to the duplicate EH region number in
1160    the destination function.  */
1161 
1162 static int
1163 remap_eh_region_nr (int old_nr, copy_body_data *id)
1164 {
1165   eh_region old_r, new_r;
1166   void **slot;
1167 
1168   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1169   slot = pointer_map_contains (id->eh_map, old_r);
1170   new_r = (eh_region) *slot;
1171 
1172   return new_r->index;
1173 }
1174 
1175 /* Similar, but operate on INTEGER_CSTs.  */
1176 
1177 static tree
1178 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1179 {
1180   int old_nr, new_nr;
1181 
1182   old_nr = tree_low_cst (old_t_nr, 0);
1183   new_nr = remap_eh_region_nr (old_nr, id);
1184 
1185   return build_int_cst (NULL, new_nr);
1186 }
1187 
1188 /* Helper for copy_bb.  Remap statement STMT using the inlining
1189    information in ID.  Return the new statement copy.  */
1190 
1191 static gimple
1192 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1193 {
1194   gimple copy = NULL;
1195   struct walk_stmt_info wi;
1196   tree new_block;
1197   bool skip_first = false;
1198 
1199   /* Begin by recognizing trees that we'll completely rewrite for the
1200      inlining context.  Our output for these trees is completely
1201      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1202      into an edge).  Further down, we'll handle trees that get
1203      duplicated and/or tweaked.  */
1204 
1205   /* When requested, GIMPLE_RETURNs should be transformed to just the
1206      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1207      be handled elsewhere by manipulating the CFG rather than the
1208      statement.  */
1209   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1210     {
1211       tree retval = gimple_return_retval (stmt);
1212 
1213       /* If we're returning something, just turn that into an
1214 	 assignment into the equivalent of the original RESULT_DECL.
1215 	 If RETVAL is just the result decl, the result decl has
1216 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1217 	 just toss the entire GIMPLE_RETURN.  */
1218       if (retval && TREE_CODE (retval) != RESULT_DECL)
1219         {
1220 	  copy = gimple_build_assign (id->retvar, retval);
1221 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1222 	  skip_first = true;
1223 	}
1224       else
1225 	return gimple_build_nop ();
1226     }
1227   else if (gimple_has_substatements (stmt))
1228     {
1229       gimple_seq s1, s2;
1230 
1231       /* When cloning bodies from the C++ front end, we will be handed bodies
1232 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1233 	 have embedded statements.  */
1234       switch (gimple_code (stmt))
1235 	{
1236 	case GIMPLE_BIND:
1237 	  copy = copy_gimple_bind (stmt, id);
1238 	  break;
1239 
1240 	case GIMPLE_CATCH:
1241 	  s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1242 	  copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1243 	  break;
1244 
1245 	case GIMPLE_EH_FILTER:
1246 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1247 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1248 	  break;
1249 
1250 	case GIMPLE_TRY:
1251 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1252 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1253 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1254 	  break;
1255 
1256 	case GIMPLE_WITH_CLEANUP_EXPR:
1257 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1258 	  copy = gimple_build_wce (s1);
1259 	  break;
1260 
1261 	case GIMPLE_OMP_PARALLEL:
1262 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1263 	  copy = gimple_build_omp_parallel
1264 	           (s1,
1265 		    gimple_omp_parallel_clauses (stmt),
1266 		    gimple_omp_parallel_child_fn (stmt),
1267 		    gimple_omp_parallel_data_arg (stmt));
1268 	  break;
1269 
1270 	case GIMPLE_OMP_TASK:
1271 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1272 	  copy = gimple_build_omp_task
1273 	           (s1,
1274 		    gimple_omp_task_clauses (stmt),
1275 		    gimple_omp_task_child_fn (stmt),
1276 		    gimple_omp_task_data_arg (stmt),
1277 		    gimple_omp_task_copy_fn (stmt),
1278 		    gimple_omp_task_arg_size (stmt),
1279 		    gimple_omp_task_arg_align (stmt));
1280 	  break;
1281 
1282 	case GIMPLE_OMP_FOR:
1283 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1284 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1285 	  copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1286 				       gimple_omp_for_collapse (stmt), s2);
1287 	  {
1288 	    size_t i;
1289 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1290 	      {
1291 		gimple_omp_for_set_index (copy, i,
1292 					  gimple_omp_for_index (stmt, i));
1293 		gimple_omp_for_set_initial (copy, i,
1294 					    gimple_omp_for_initial (stmt, i));
1295 		gimple_omp_for_set_final (copy, i,
1296 					  gimple_omp_for_final (stmt, i));
1297 		gimple_omp_for_set_incr (copy, i,
1298 					 gimple_omp_for_incr (stmt, i));
1299 		gimple_omp_for_set_cond (copy, i,
1300 					 gimple_omp_for_cond (stmt, i));
1301 	      }
1302 	  }
1303 	  break;
1304 
1305 	case GIMPLE_OMP_MASTER:
1306 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1307 	  copy = gimple_build_omp_master (s1);
1308 	  break;
1309 
1310 	case GIMPLE_OMP_ORDERED:
1311 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1312 	  copy = gimple_build_omp_ordered (s1);
1313 	  break;
1314 
1315 	case GIMPLE_OMP_SECTION:
1316 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1317 	  copy = gimple_build_omp_section (s1);
1318 	  break;
1319 
1320 	case GIMPLE_OMP_SECTIONS:
1321 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1322 	  copy = gimple_build_omp_sections
1323 	           (s1, gimple_omp_sections_clauses (stmt));
1324 	  break;
1325 
1326 	case GIMPLE_OMP_SINGLE:
1327 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 	  copy = gimple_build_omp_single
1329 	           (s1, gimple_omp_single_clauses (stmt));
1330 	  break;
1331 
1332 	case GIMPLE_OMP_CRITICAL:
1333 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334 	  copy
1335 	    = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1336 	  break;
1337 
1338 	default:
1339 	  gcc_unreachable ();
1340 	}
1341     }
1342   else
1343     {
1344       if (gimple_assign_copy_p (stmt)
1345 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1346 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1347 	{
1348 	  /* Here we handle statements that are not completely rewritten.
1349 	     First we detect some inlining-induced bogosities for
1350 	     discarding.  */
1351 
1352 	  /* Some assignments VAR = VAR; don't generate any rtl code
1353 	     and thus don't count as variable modification.  Avoid
1354 	     keeping bogosities like 0 = 0.  */
1355 	  tree decl = gimple_assign_lhs (stmt), value;
1356 	  tree *n;
1357 
1358 	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1359 	  if (n)
1360 	    {
1361 	      value = *n;
1362 	      STRIP_TYPE_NOPS (value);
1363 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1364 		return gimple_build_nop ();
1365 	    }
1366 	}
1367 
1368       if (gimple_debug_bind_p (stmt))
1369 	{
1370 	  copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1371 					  gimple_debug_bind_get_value (stmt),
1372 					  stmt);
1373 	  VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1374 	  return copy;
1375 	}
1376 
1377       /* Create a new deep copy of the statement.  */
1378       copy = gimple_copy (stmt);
1379 
1380       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1381 	 RESX and EH_DISPATCH.  */
1382       if (id->eh_map)
1383 	switch (gimple_code (copy))
1384 	  {
1385 	  case GIMPLE_CALL:
1386 	    {
1387 	      tree r, fndecl = gimple_call_fndecl (copy);
1388 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1389 		switch (DECL_FUNCTION_CODE (fndecl))
1390 		  {
1391 		  case BUILT_IN_EH_COPY_VALUES:
1392 		    r = gimple_call_arg (copy, 1);
1393 		    r = remap_eh_region_tree_nr (r, id);
1394 		    gimple_call_set_arg (copy, 1, r);
1395 		    /* FALLTHRU */
1396 
1397 		  case BUILT_IN_EH_POINTER:
1398 		  case BUILT_IN_EH_FILTER:
1399 		    r = gimple_call_arg (copy, 0);
1400 		    r = remap_eh_region_tree_nr (r, id);
1401 		    gimple_call_set_arg (copy, 0, r);
1402 		    break;
1403 
1404 		  default:
1405 		    break;
1406 		  }
1407 	    }
1408 	    break;
1409 
1410 	  case GIMPLE_RESX:
1411 	    {
1412 	      int r = gimple_resx_region (copy);
1413 	      r = remap_eh_region_nr (r, id);
1414 	      gimple_resx_set_region (copy, r);
1415 	    }
1416 	    break;
1417 
1418 	  case GIMPLE_EH_DISPATCH:
1419 	    {
1420 	      int r = gimple_eh_dispatch_region (copy);
1421 	      r = remap_eh_region_nr (r, id);
1422 	      gimple_eh_dispatch_set_region (copy, r);
1423 	    }
1424 	    break;
1425 
1426 	  default:
1427 	    break;
1428 	  }
1429     }
1430 
1431   /* If STMT has a block defined, map it to the newly constructed
1432      block.  When inlining we want statements without a block to
1433      appear in the block of the function call.  */
1434   new_block = id->block;
1435   if (gimple_block (copy))
1436     {
1437       tree *n;
1438       n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1439       gcc_assert (n);
1440       new_block = *n;
1441     }
1442 
1443   gimple_set_block (copy, new_block);
1444 
1445   if (gimple_debug_bind_p (copy))
1446     return copy;
1447 
1448   /* Remap all the operands in COPY.  */
1449   memset (&wi, 0, sizeof (wi));
1450   wi.info = id;
1451   if (skip_first)
1452     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1453   else
1454     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1455 
1456   /* Clear the copied virtual operands.  We are not remapping them here
1457      but are going to recreate them from scratch.  */
1458   if (gimple_has_mem_ops (copy))
1459     {
1460       gimple_set_vdef (copy, NULL_TREE);
1461       gimple_set_vuse (copy, NULL_TREE);
1462     }
1463 
1464   return copy;
1465 }
1466 
1467 
1468 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1469    later  */
1470 
1471 static basic_block
1472 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1473          gcov_type count_scale)
1474 {
1475   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1476   basic_block copy_basic_block;
1477   tree decl;
1478   gcov_type freq;
1479 
1480   /* create_basic_block() will append every new block to
1481      basic_block_info automatically.  */
1482   copy_basic_block = create_basic_block (NULL, (void *) 0,
1483                                          (basic_block) bb->prev_bb->aux);
1484   copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1485 
1486   /* We are going to rebuild frequencies from scratch.  These values
1487      have just small importance to drive canonicalize_loop_headers.  */
1488   freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1489 
1490   /* We recompute frequencies after inlining, so this is quite safe.  */
1491   if (freq > BB_FREQ_MAX)
1492     freq = BB_FREQ_MAX;
1493   copy_basic_block->frequency = freq;
1494 
1495   copy_gsi = gsi_start_bb (copy_basic_block);
1496 
1497   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1498     {
1499       gimple stmt = gsi_stmt (gsi);
1500       gimple orig_stmt = stmt;
1501 
1502       id->regimplify = false;
1503       stmt = remap_gimple_stmt (stmt, id);
1504       if (gimple_nop_p (stmt))
1505 	continue;
1506 
1507       gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1508       seq_gsi = copy_gsi;
1509 
1510       /* With return slot optimization we can end up with
1511 	 non-gimple (foo *)&this->m, fix that here.  */
1512       if (is_gimple_assign (stmt)
1513 	  && gimple_assign_rhs_code (stmt) == NOP_EXPR
1514 	  && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1515 	{
1516 	  tree new_rhs;
1517 	  new_rhs = force_gimple_operand_gsi (&seq_gsi,
1518 					      gimple_assign_rhs1 (stmt),
1519 					      true, NULL, false,
1520 					      GSI_CONTINUE_LINKING);
1521 	  gimple_assign_set_rhs1 (stmt, new_rhs);
1522 	  id->regimplify = false;
1523 	}
1524 
1525       gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1526 
1527       if (id->regimplify)
1528 	gimple_regimplify_operands (stmt, &seq_gsi);
1529 
1530       /* If copy_basic_block has been empty at the start of this iteration,
1531 	 call gsi_start_bb again to get at the newly added statements.  */
1532       if (gsi_end_p (copy_gsi))
1533 	copy_gsi = gsi_start_bb (copy_basic_block);
1534       else
1535 	gsi_next (&copy_gsi);
1536 
1537       /* Process the new statement.  The call to gimple_regimplify_operands
1538 	 possibly turned the statement into multiple statements, we
1539 	 need to process all of them.  */
1540       do
1541 	{
1542 	  tree fn;
1543 
1544 	  stmt = gsi_stmt (copy_gsi);
1545 	  if (is_gimple_call (stmt)
1546 	      && gimple_call_va_arg_pack_p (stmt)
1547 	      && id->gimple_call)
1548 	    {
1549 	      /* __builtin_va_arg_pack () should be replaced by
1550 		 all arguments corresponding to ... in the caller.  */
1551 	      tree p;
1552 	      gimple new_call;
1553 	      VEC(tree, heap) *argarray;
1554 	      size_t nargs = gimple_call_num_args (id->gimple_call);
1555 	      size_t n;
1556 
1557 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1558 		nargs--;
1559 
1560 	      /* Create the new array of arguments.  */
1561 	      n = nargs + gimple_call_num_args (stmt);
1562 	      argarray = VEC_alloc (tree, heap, n);
1563 	      VEC_safe_grow (tree, heap, argarray, n);
1564 
1565 	      /* Copy all the arguments before '...'  */
1566 	      memcpy (VEC_address (tree, argarray),
1567 		      gimple_call_arg_ptr (stmt, 0),
1568 		      gimple_call_num_args (stmt) * sizeof (tree));
1569 
1570 	      /* Append the arguments passed in '...'  */
1571 	      memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1572 		      gimple_call_arg_ptr (id->gimple_call, 0)
1573 			+ (gimple_call_num_args (id->gimple_call) - nargs),
1574 		      nargs * sizeof (tree));
1575 
1576 	      new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1577 						argarray);
1578 
1579 	      VEC_free (tree, heap, argarray);
1580 
1581 	      /* Copy all GIMPLE_CALL flags, location and block, except
1582 		 GF_CALL_VA_ARG_PACK.  */
1583 	      gimple_call_copy_flags (new_call, stmt);
1584 	      gimple_call_set_va_arg_pack (new_call, false);
1585 	      gimple_set_location (new_call, gimple_location (stmt));
1586 	      gimple_set_block (new_call, gimple_block (stmt));
1587 	      gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1588 
1589 	      gsi_replace (&copy_gsi, new_call, false);
1590 	      stmt = new_call;
1591 	    }
1592 	  else if (is_gimple_call (stmt)
1593 		   && id->gimple_call
1594 		   && (decl = gimple_call_fndecl (stmt))
1595 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1596 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1597 	    {
1598 	      /* __builtin_va_arg_pack_len () should be replaced by
1599 		 the number of anonymous arguments.  */
1600 	      size_t nargs = gimple_call_num_args (id->gimple_call);
1601 	      tree count, p;
1602 	      gimple new_stmt;
1603 
1604 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1605 		nargs--;
1606 
1607 	      count = build_int_cst (integer_type_node, nargs);
1608 	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1609 	      gsi_replace (&copy_gsi, new_stmt, false);
1610 	      stmt = new_stmt;
1611 	    }
1612 
1613 	  /* Statements produced by inlining can be unfolded, especially
1614 	     when we constant propagated some operands.  We can't fold
1615 	     them right now for two reasons:
1616 	     1) folding require SSA_NAME_DEF_STMTs to be correct
1617 	     2) we can't change function calls to builtins.
1618 	     So we just mark statement for later folding.  We mark
1619 	     all new statements, instead just statements that has changed
1620 	     by some nontrivial substitution so even statements made
1621 	     foldable indirectly are updated.  If this turns out to be
1622 	     expensive, copy_body can be told to watch for nontrivial
1623 	     changes.  */
1624 	  if (id->statements_to_fold)
1625 	    pointer_set_insert (id->statements_to_fold, stmt);
1626 
1627 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1628 	     callgraph edges and update or duplicate them.  */
1629 	  if (is_gimple_call (stmt))
1630 	    {
1631 	      struct cgraph_edge *edge;
1632 	      int flags;
1633 
1634 	      switch (id->transform_call_graph_edges)
1635 		{
1636 		case CB_CGE_DUPLICATE:
1637 		  edge = cgraph_edge (id->src_node, orig_stmt);
1638 		  if (edge)
1639 		    {
1640 		      int edge_freq = edge->frequency;
1641 		      edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1642 					        gimple_uid (stmt),
1643 					        REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1644 					        edge->frequency, true);
1645 		      /* We could also just rescale the frequency, but
1646 		         doing so would introduce roundoff errors and make
1647 			 verifier unhappy.  */
1648 		      edge->frequency
1649 		        = compute_call_stmt_bb_frequency (id->dst_node->decl,
1650 							  copy_basic_block);
1651 		      if (dump_file
1652 		      	  && profile_status_for_function (cfun) != PROFILE_ABSENT
1653 			  && (edge_freq > edge->frequency + 10
1654 			      || edge_freq < edge->frequency - 10))
1655 			{
1656 			  fprintf (dump_file, "Edge frequency estimated by "
1657 			           "cgraph %i diverge from inliner's estimate %i\n",
1658 			  	   edge_freq,
1659 				   edge->frequency);
1660 			  fprintf (dump_file,
1661 			  	   "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1662 				   bb->index,
1663 				   bb->frequency,
1664 				   copy_basic_block->frequency);
1665 			}
1666 		      stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1667 		    }
1668 		  break;
1669 
1670 		case CB_CGE_MOVE_CLONES:
1671 		  cgraph_set_call_stmt_including_clones (id->dst_node,
1672 							 orig_stmt, stmt);
1673 		  edge = cgraph_edge (id->dst_node, stmt);
1674 		  break;
1675 
1676 		case CB_CGE_MOVE:
1677 		  edge = cgraph_edge (id->dst_node, orig_stmt);
1678 		  if (edge)
1679 		    cgraph_set_call_stmt (edge, stmt);
1680 		  break;
1681 
1682 		default:
1683 		  gcc_unreachable ();
1684 		}
1685 
1686 	      /* Constant propagation on argument done during inlining
1687 		 may create new direct call.  Produce an edge for it.  */
1688 	      if ((!edge
1689 		   || (edge->indirect_call
1690 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1691 		  && is_gimple_call (stmt)
1692 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
1693 		{
1694 		  struct cgraph_node *dest = cgraph_node (fn);
1695 
1696 		  /* We have missing edge in the callgraph.  This can happen
1697 		     when previous inlining turned an indirect call into a
1698 		     direct call by constant propagating arguments or we are
1699 		     producing dead clone (for further clonning).  In all
1700 		     other cases we hit a bug (incorrect node sharing is the
1701 		     most common reason for missing edges).  */
1702 		  gcc_assert (dest->needed || !dest->analyzed
1703 		  	      || !id->src_node->analyzed);
1704 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1705 		    cgraph_create_edge_including_clones
1706 		      (id->dst_node, dest, orig_stmt, stmt, bb->count,
1707 		       compute_call_stmt_bb_frequency (id->dst_node->decl,
1708 		       				       copy_basic_block),
1709 		       bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1710 		  else
1711 		    cgraph_create_edge (id->dst_node, dest, stmt,
1712 					bb->count,
1713 					compute_call_stmt_bb_frequency
1714 					  (id->dst_node->decl, copy_basic_block),
1715 					bb->loop_depth)->inline_failed
1716 		      = CIF_ORIGINALLY_INDIRECT_CALL;
1717 		  if (dump_file)
1718 		    {
1719 		      fprintf (dump_file, "Created new direct edge to %s",
1720 			       cgraph_node_name (dest));
1721 		    }
1722 		}
1723 
1724 	      flags = gimple_call_flags (stmt);
1725 	      if (flags & ECF_MAY_BE_ALLOCA)
1726 		cfun->calls_alloca = true;
1727 	      if (flags & ECF_RETURNS_TWICE)
1728 		cfun->calls_setjmp = true;
1729 	    }
1730 
1731 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1732 				      id->eh_map, id->eh_lp_nr);
1733 
1734 	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1735 	    {
1736 	      ssa_op_iter i;
1737 	      tree def;
1738 
1739 	      find_new_referenced_vars (gsi_stmt (copy_gsi));
1740 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1741 		if (TREE_CODE (def) == SSA_NAME)
1742 		  SSA_NAME_DEF_STMT (def) = stmt;
1743 	    }
1744 
1745 	  gsi_next (&copy_gsi);
1746 	}
1747       while (!gsi_end_p (copy_gsi));
1748 
1749       copy_gsi = gsi_last_bb (copy_basic_block);
1750     }
1751 
1752   return copy_basic_block;
1753 }
1754 
1755 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1756    form is quite easy, since dominator relationship for old basic blocks does
1757    not change.
1758 
1759    There is however exception where inlining might change dominator relation
1760    across EH edges from basic block within inlined functions destinating
1761    to landing pads in function we inline into.
1762 
1763    The function fills in PHI_RESULTs of such PHI nodes if they refer
1764    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
1765    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
1766    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1767    set, and this means that there will be no overlapping live ranges
1768    for the underlying symbol.
1769 
1770    This might change in future if we allow redirecting of EH edges and
1771    we might want to change way build CFG pre-inlining to include
1772    all the possible edges then.  */
1773 static void
1774 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1775 				  bool can_throw, bool nonlocal_goto)
1776 {
1777   edge e;
1778   edge_iterator ei;
1779 
1780   FOR_EACH_EDGE (e, ei, bb->succs)
1781     if (!e->dest->aux
1782 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1783       {
1784 	gimple phi;
1785 	gimple_stmt_iterator si;
1786 
1787 	if (!nonlocal_goto)
1788 	  gcc_assert (e->flags & EDGE_EH);
1789 
1790 	if (!can_throw)
1791 	  gcc_assert (!(e->flags & EDGE_EH));
1792 
1793 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1794 	  {
1795 	    edge re;
1796 
1797 	    phi = gsi_stmt (si);
1798 
1799 	    /* There shouldn't be any PHI nodes in the ENTRY_BLOCK.  */
1800 	    gcc_assert (!e->dest->aux);
1801 
1802 	    gcc_assert ((e->flags & EDGE_EH)
1803 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1804 
1805 	    if (!is_gimple_reg (PHI_RESULT (phi)))
1806 	      {
1807 		mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1808 		continue;
1809 	      }
1810 
1811 	    re = find_edge (ret_bb, e->dest);
1812 	    gcc_assert (re);
1813 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1814 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1815 
1816 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1817 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1818 	  }
1819       }
1820 }
1821 
1822 
1823 /* Copy edges from BB into its copy constructed earlier, scale profile
1824    accordingly.  Edges will be taken care of later.  Assume aux
1825    pointers to point to the copies of each BB.  Return true if any
1826    debug stmts are left after a statement that must end the basic block.  */
1827 
1828 static bool
1829 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1830 {
1831   basic_block new_bb = (basic_block) bb->aux;
1832   edge_iterator ei;
1833   edge old_edge;
1834   gimple_stmt_iterator si;
1835   int flags;
1836   bool need_debug_cleanup = false;
1837 
1838   /* Use the indices from the original blocks to create edges for the
1839      new ones.  */
1840   FOR_EACH_EDGE (old_edge, ei, bb->succs)
1841     if (!(old_edge->flags & EDGE_EH))
1842       {
1843 	edge new_edge;
1844 
1845 	flags = old_edge->flags;
1846 
1847 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
1848 	if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1849 	    && old_edge->dest->aux != EXIT_BLOCK_PTR)
1850 	  flags |= EDGE_FALLTHRU;
1851 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1852 	new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1853 	new_edge->probability = old_edge->probability;
1854       }
1855 
1856   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1857     return false;
1858 
1859   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1860     {
1861       gimple copy_stmt;
1862       bool can_throw, nonlocal_goto;
1863 
1864       copy_stmt = gsi_stmt (si);
1865       if (!is_gimple_debug (copy_stmt))
1866 	{
1867 	  update_stmt (copy_stmt);
1868 	  if (gimple_in_ssa_p (cfun))
1869 	    mark_symbols_for_renaming (copy_stmt);
1870 	}
1871 
1872       /* Do this before the possible split_block.  */
1873       gsi_next (&si);
1874 
1875       /* If this tree could throw an exception, there are two
1876          cases where we need to add abnormal edge(s): the
1877          tree wasn't in a region and there is a "current
1878          region" in the caller; or the original tree had
1879          EH edges.  In both cases split the block after the tree,
1880          and add abnormal edge(s) as needed; we need both
1881          those from the callee and the caller.
1882          We check whether the copy can throw, because the const
1883          propagation can change an INDIRECT_REF which throws
1884          into a COMPONENT_REF which doesn't.  If the copy
1885          can throw, the original could also throw.  */
1886       can_throw = stmt_can_throw_internal (copy_stmt);
1887       nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1888 
1889       if (can_throw || nonlocal_goto)
1890 	{
1891 	  if (!gsi_end_p (si))
1892 	    {
1893 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1894 		gsi_next (&si);
1895 	      if (gsi_end_p (si))
1896 		need_debug_cleanup = true;
1897 	    }
1898 	  if (!gsi_end_p (si))
1899 	    /* Note that bb's predecessor edges aren't necessarily
1900 	       right at this point; split_block doesn't care.  */
1901 	    {
1902 	      edge e = split_block (new_bb, copy_stmt);
1903 
1904 	      new_bb = e->dest;
1905 	      new_bb->aux = e->src->aux;
1906 	      si = gsi_start_bb (new_bb);
1907 	    }
1908 	}
1909 
1910       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1911 	make_eh_dispatch_edges (copy_stmt);
1912       else if (can_throw)
1913 	make_eh_edges (copy_stmt);
1914 
1915       if (nonlocal_goto)
1916 	make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1917 
1918       if ((can_throw || nonlocal_goto)
1919 	  && gimple_in_ssa_p (cfun))
1920 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1921 					  can_throw, nonlocal_goto);
1922     }
1923   return need_debug_cleanup;
1924 }
1925 
1926 /* Copy the PHIs.  All blocks and edges are copied, some blocks
1927    was possibly split and new outgoing EH edges inserted.
1928    BB points to the block of original function and AUX pointers links
1929    the original and newly copied blocks.  */
1930 
1931 static void
1932 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1933 {
1934   basic_block const new_bb = (basic_block) bb->aux;
1935   edge_iterator ei;
1936   gimple phi;
1937   gimple_stmt_iterator si;
1938   edge new_edge;
1939   bool inserted = false;
1940 
1941   for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1942     {
1943       tree res, new_res;
1944       gimple new_phi;
1945 
1946       phi = gsi_stmt (si);
1947       res = PHI_RESULT (phi);
1948       new_res = res;
1949       if (is_gimple_reg (res))
1950 	{
1951 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
1952 	  SSA_NAME_DEF_STMT (new_res)
1953 	    = new_phi = create_phi_node (new_res, new_bb);
1954 	  FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1955 	    {
1956 	      edge const old_edge
1957 		= find_edge ((basic_block) new_edge->src->aux, bb);
1958 	      tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1959 	      tree new_arg = arg;
1960 	      tree block = id->block;
1961 	      id->block = NULL_TREE;
1962 	      walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1963 	      id->block = block;
1964 	      gcc_assert (new_arg);
1965 	      /* With return slot optimization we can end up with
1966 	         non-gimple (foo *)&this->m, fix that here.  */
1967 	      if (TREE_CODE (new_arg) != SSA_NAME
1968 		  && TREE_CODE (new_arg) != FUNCTION_DECL
1969 		  && !is_gimple_val (new_arg))
1970 		{
1971 		  gimple_seq stmts = NULL;
1972 		  new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1973 		  gsi_insert_seq_on_edge (new_edge, stmts);
1974 		  inserted = true;
1975 		}
1976 	      add_phi_arg (new_phi, new_arg, new_edge,
1977 			   gimple_phi_arg_location_from_edge (phi, old_edge));
1978 	    }
1979 	}
1980     }
1981 
1982   /* Commit the delayed edge insertions.  */
1983   if (inserted)
1984     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1985       gsi_commit_one_edge_insert (new_edge, NULL);
1986 }
1987 
1988 
1989 /* Wrapper for remap_decl so it can be used as a callback.  */
1990 
1991 static tree
1992 remap_decl_1 (tree decl, void *data)
1993 {
1994   return remap_decl (decl, (copy_body_data *) data);
1995 }
1996 
1997 /* Build struct function and associated datastructures for the new clone
1998    NEW_FNDECL to be build.  CALLEE_FNDECL is the original */
1999 
2000 static void
2001 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2002 {
2003   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2004   gcov_type count_scale;
2005 
2006   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2007     count_scale = (REG_BR_PROB_BASE * count
2008 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2009   else
2010     count_scale = REG_BR_PROB_BASE;
2011 
2012   /* Register specific tree functions.  */
2013   gimple_register_cfg_hooks ();
2014 
2015   /* Get clean struct function.  */
2016   push_struct_function (new_fndecl);
2017 
2018   /* We will rebuild these, so just sanity check that they are empty.  */
2019   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2020   gcc_assert (cfun->local_decls == NULL);
2021   gcc_assert (cfun->cfg == NULL);
2022   gcc_assert (cfun->decl == new_fndecl);
2023 
2024   /* Copy items we preserve during clonning.  */
2025   cfun->static_chain_decl = src_cfun->static_chain_decl;
2026   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2027   cfun->function_end_locus = src_cfun->function_end_locus;
2028   cfun->curr_properties = src_cfun->curr_properties;
2029   cfun->last_verified = src_cfun->last_verified;
2030   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2031   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2032   cfun->function_frequency = src_cfun->function_frequency;
2033   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2034   cfun->stdarg = src_cfun->stdarg;
2035   cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2036   cfun->after_inlining = src_cfun->after_inlining;
2037   cfun->returns_struct = src_cfun->returns_struct;
2038   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2039   cfun->after_tree_profile = src_cfun->after_tree_profile;
2040 
2041   init_empty_tree_cfg ();
2042 
2043   profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2044   ENTRY_BLOCK_PTR->count =
2045     (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2046      REG_BR_PROB_BASE);
2047   ENTRY_BLOCK_PTR->frequency
2048     = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2049   EXIT_BLOCK_PTR->count =
2050     (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2051      REG_BR_PROB_BASE);
2052   EXIT_BLOCK_PTR->frequency =
2053     EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2054   if (src_cfun->eh)
2055     init_eh_for_function ();
2056 
2057   if (src_cfun->gimple_df)
2058     {
2059       init_tree_ssa (cfun);
2060       cfun->gimple_df->in_ssa_p = true;
2061       init_ssa_operands ();
2062     }
2063   pop_cfun ();
2064 }
2065 
2066 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2067    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2068    successor has multiple predecessors, reset them, otherwise keep
2069    their value.  */
2070 
2071 static void
2072 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2073 {
2074   edge e;
2075   edge_iterator ei;
2076   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2077 
2078   if (gsi_end_p (si)
2079       || gsi_one_before_end_p (si)
2080       || !(stmt_can_throw_internal (gsi_stmt (si))
2081 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2082     return;
2083 
2084   FOR_EACH_EDGE (e, ei, new_bb->succs)
2085     {
2086       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2087       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2088       while (is_gimple_debug (gsi_stmt (ssi)))
2089 	{
2090 	  gimple stmt = gsi_stmt (ssi), new_stmt;
2091 	  tree var;
2092 	  tree value;
2093 
2094 	  /* For the last edge move the debug stmts instead of copying
2095 	     them.  */
2096 	  if (ei_one_before_end_p (ei))
2097 	    {
2098 	      si = ssi;
2099 	      gsi_prev (&ssi);
2100 	      if (!single_pred_p (e->dest))
2101 		gimple_debug_bind_reset_value (stmt);
2102 	      gsi_remove (&si, false);
2103 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2104 	      continue;
2105 	    }
2106 
2107 	  var = gimple_debug_bind_get_var (stmt);
2108 	  if (single_pred_p (e->dest))
2109 	    {
2110 	      value = gimple_debug_bind_get_value (stmt);
2111 	      value = unshare_expr (value);
2112 	    }
2113 	  else
2114 	    value = NULL_TREE;
2115 	  new_stmt = gimple_build_debug_bind (var, value, stmt);
2116 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2117 	  VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2118 	  gsi_prev (&ssi);
2119 	}
2120     }
2121 }
2122 
2123 /* Make a copy of the body of FN so that it can be inserted inline in
2124    another function.  Walks FN via CFG, returns new fndecl.  */
2125 
2126 static tree
2127 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2128 	       basic_block entry_block_map, basic_block exit_block_map)
2129 {
2130   tree callee_fndecl = id->src_fn;
2131   /* Original cfun for the callee, doesn't change.  */
2132   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2133   struct function *cfun_to_copy;
2134   basic_block bb;
2135   tree new_fndecl = NULL;
2136   bool need_debug_cleanup = false;
2137   gcov_type count_scale;
2138   int last;
2139 
2140   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2141     count_scale = (REG_BR_PROB_BASE * count
2142 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2143   else
2144     count_scale = REG_BR_PROB_BASE;
2145 
2146   /* Register specific tree functions.  */
2147   gimple_register_cfg_hooks ();
2148 
2149   /* Must have a CFG here at this point.  */
2150   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2151 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2152 
2153   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2154 
2155   ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2156   EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2157   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2158   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2159 
2160   /* Duplicate any exception-handling regions.  */
2161   if (cfun->eh)
2162     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2163 				       remap_decl_1, id);
2164 
2165   /* Use aux pointers to map the original blocks to copy.  */
2166   FOR_EACH_BB_FN (bb, cfun_to_copy)
2167     {
2168       basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2169       bb->aux = new_bb;
2170       new_bb->aux = bb;
2171     }
2172 
2173   last = last_basic_block;
2174 
2175   /* Now that we've duplicated the blocks, duplicate their edges.  */
2176   FOR_ALL_BB_FN (bb, cfun_to_copy)
2177     need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2178 
2179   if (gimple_in_ssa_p (cfun))
2180     FOR_ALL_BB_FN (bb, cfun_to_copy)
2181       copy_phis_for_bb (bb, id);
2182 
2183   FOR_ALL_BB_FN (bb, cfun_to_copy)
2184     {
2185       if (need_debug_cleanup
2186 	  && bb->index != ENTRY_BLOCK
2187 	  && bb->index != EXIT_BLOCK)
2188 	maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2189       ((basic_block)bb->aux)->aux = NULL;
2190       bb->aux = NULL;
2191     }
2192 
2193   /* Zero out AUX fields of newly created block during EH edge
2194      insertion. */
2195   for (; last < last_basic_block; last++)
2196     {
2197       if (need_debug_cleanup)
2198 	maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2199       BASIC_BLOCK (last)->aux = NULL;
2200     }
2201   entry_block_map->aux = NULL;
2202   exit_block_map->aux = NULL;
2203 
2204   if (id->eh_map)
2205     {
2206       pointer_map_destroy (id->eh_map);
2207       id->eh_map = NULL;
2208     }
2209 
2210   return new_fndecl;
2211 }
2212 
2213 /* Copy the debug STMT using ID.  We deal with these statements in a
2214    special way: if any variable in their VALUE expression wasn't
2215    remapped yet, we won't remap it, because that would get decl uids
2216    out of sync, causing codegen differences between -g and -g0.  If
2217    this arises, we drop the VALUE expression altogether.  */
2218 
2219 static void
2220 copy_debug_stmt (gimple stmt, copy_body_data *id)
2221 {
2222   tree t, *n;
2223   struct walk_stmt_info wi;
2224 
2225   t = id->block;
2226   if (gimple_block (stmt))
2227     {
2228       tree *n;
2229       n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2230       if (n)
2231 	t = *n;
2232     }
2233   gimple_set_block (stmt, t);
2234 
2235   /* Remap all the operands in COPY.  */
2236   memset (&wi, 0, sizeof (wi));
2237   wi.info = id;
2238 
2239   processing_debug_stmt = 1;
2240 
2241   t = gimple_debug_bind_get_var (stmt);
2242 
2243   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2244       && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2245     {
2246       gcc_assert (TREE_CODE (*n) == VAR_DECL);
2247       t = *n;
2248     }
2249   else if (TREE_CODE (t) == VAR_DECL
2250 	   && !TREE_STATIC (t)
2251 	   && gimple_in_ssa_p (cfun)
2252 	   && !pointer_map_contains (id->decl_map, t)
2253 	   && !var_ann (t))
2254     /* T is a non-localized variable.  */;
2255   else
2256     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2257 
2258   gimple_debug_bind_set_var (stmt, t);
2259 
2260   if (gimple_debug_bind_has_value_p (stmt))
2261     walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2262 	       remap_gimple_op_r, &wi, NULL);
2263 
2264   /* Punt if any decl couldn't be remapped.  */
2265   if (processing_debug_stmt < 0)
2266     gimple_debug_bind_reset_value (stmt);
2267 
2268   processing_debug_stmt = 0;
2269 
2270   update_stmt (stmt);
2271   if (gimple_in_ssa_p (cfun))
2272     mark_symbols_for_renaming (stmt);
2273 }
2274 
2275 /* Process deferred debug stmts.  In order to give values better odds
2276    of being successfully remapped, we delay the processing of debug
2277    stmts until all other stmts that might require remapping are
2278    processed.  */
2279 
2280 static void
2281 copy_debug_stmts (copy_body_data *id)
2282 {
2283   size_t i;
2284   gimple stmt;
2285 
2286   if (!id->debug_stmts)
2287     return;
2288 
2289   for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2290     copy_debug_stmt (stmt, id);
2291 
2292   VEC_free (gimple, heap, id->debug_stmts);
2293 }
2294 
2295 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2296    another function.  */
2297 
2298 static tree
2299 copy_tree_body (copy_body_data *id)
2300 {
2301   tree fndecl = id->src_fn;
2302   tree body = DECL_SAVED_TREE (fndecl);
2303 
2304   walk_tree (&body, copy_tree_body_r, id, NULL);
2305 
2306   return body;
2307 }
2308 
2309 /* Make a copy of the body of FN so that it can be inserted inline in
2310    another function.  */
2311 
2312 static tree
2313 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2314 	   basic_block entry_block_map, basic_block exit_block_map)
2315 {
2316   tree fndecl = id->src_fn;
2317   tree body;
2318 
2319   /* If this body has a CFG, walk CFG and copy.  */
2320   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2321   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2322   copy_debug_stmts (id);
2323 
2324   return body;
2325 }
2326 
2327 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2328    defined in function FN, or of a data member thereof.  */
2329 
2330 static bool
2331 self_inlining_addr_expr (tree value, tree fn)
2332 {
2333   tree var;
2334 
2335   if (TREE_CODE (value) != ADDR_EXPR)
2336     return false;
2337 
2338   var = get_base_address (TREE_OPERAND (value, 0));
2339 
2340   return var && auto_var_in_fn_p (var, fn);
2341 }
2342 
2343 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2344    lexical block and line number information from base_stmt, if given,
2345    or from the last stmt of the block otherwise.  */
2346 
2347 static gimple
2348 insert_init_debug_bind (copy_body_data *id,
2349 			basic_block bb, tree var, tree value,
2350 			gimple base_stmt)
2351 {
2352   gimple note;
2353   gimple_stmt_iterator gsi;
2354   tree tracked_var;
2355 
2356   if (!gimple_in_ssa_p (id->src_cfun))
2357     return NULL;
2358 
2359   if (!MAY_HAVE_DEBUG_STMTS)
2360     return NULL;
2361 
2362   tracked_var = target_for_debug_bind (var);
2363   if (!tracked_var)
2364     return NULL;
2365 
2366   if (bb)
2367     {
2368       gsi = gsi_last_bb (bb);
2369       if (!base_stmt && !gsi_end_p (gsi))
2370 	base_stmt = gsi_stmt (gsi);
2371     }
2372 
2373   note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2374 
2375   if (bb)
2376     {
2377       if (!gsi_end_p (gsi))
2378 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2379       else
2380 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2381     }
2382 
2383   return note;
2384 }
2385 
2386 static void
2387 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2388 {
2389   /* If VAR represents a zero-sized variable, it's possible that the
2390      assignment statement may result in no gimple statements.  */
2391   if (init_stmt)
2392     {
2393       gimple_stmt_iterator si = gsi_last_bb (bb);
2394 
2395       /* We can end up with init statements that store to a non-register
2396          from a rhs with a conversion.  Handle that here by forcing the
2397 	 rhs into a temporary.  gimple_regimplify_operands is not
2398 	 prepared to do this for us.  */
2399       if (!is_gimple_debug (init_stmt)
2400 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2401 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2402 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2403 	{
2404 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2405 			     gimple_expr_type (init_stmt),
2406 			     gimple_assign_rhs1 (init_stmt));
2407 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2408 					  GSI_NEW_STMT);
2409 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2410 	  gimple_assign_set_rhs1 (init_stmt, rhs);
2411 	}
2412       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2413       gimple_regimplify_operands (init_stmt, &si);
2414       mark_symbols_for_renaming (init_stmt);
2415 
2416       if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2417 	{
2418 	  tree var, def = gimple_assign_lhs (init_stmt);
2419 
2420 	  if (TREE_CODE (def) == SSA_NAME)
2421 	    var = SSA_NAME_VAR (def);
2422 	  else
2423 	    var = def;
2424 
2425 	  insert_init_debug_bind (id, bb, var, def, init_stmt);
2426 	}
2427     }
2428 }
2429 
2430 /* Initialize parameter P with VALUE.  If needed, produce init statement
2431    at the end of BB.  When BB is NULL, we return init statement to be
2432    output later.  */
2433 static gimple
2434 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2435 		     basic_block bb, tree *vars)
2436 {
2437   gimple init_stmt = NULL;
2438   tree var;
2439   tree rhs = value;
2440   tree def = (gimple_in_ssa_p (cfun)
2441 	      ? gimple_default_def (id->src_cfun, p) : NULL);
2442 
2443   if (value
2444       && value != error_mark_node
2445       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2446     {
2447       if (fold_convertible_p (TREE_TYPE (p), value))
2448 	rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2449       else
2450 	/* ???  For valid (GIMPLE) programs we should not end up here.
2451 	   Still if something has gone wrong and we end up with truly
2452 	   mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2453 	   to not leak invalid GIMPLE to the following passes.  */
2454 	rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2455     }
2456 
2457   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
2458      here since the type of this decl must be visible to the calling
2459      function.  */
2460   var = copy_decl_to_var (p, id);
2461 
2462   /* We're actually using the newly-created var.  */
2463   if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2464     {
2465       get_var_ann (var);
2466       add_referenced_var (var);
2467     }
2468 
2469   /* Declare this new variable.  */
2470   TREE_CHAIN (var) = *vars;
2471   *vars = var;
2472 
2473   /* Make gimplifier happy about this variable.  */
2474   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2475 
2476   /* If the parameter is never assigned to, has no SSA_NAMEs created,
2477      we would not need to create a new variable here at all, if it
2478      weren't for debug info.  Still, we can just use the argument
2479      value.  */
2480   if (TREE_READONLY (p)
2481       && !TREE_ADDRESSABLE (p)
2482       && value && !TREE_SIDE_EFFECTS (value)
2483       && !def)
2484     {
2485       /* We may produce non-gimple trees by adding NOPs or introduce
2486 	 invalid sharing when operand is not really constant.
2487 	 It is not big deal to prohibit constant propagation here as
2488 	 we will constant propagate in DOM1 pass anyway.  */
2489       if (is_gimple_min_invariant (value)
2490 	  && useless_type_conversion_p (TREE_TYPE (p),
2491 						 TREE_TYPE (value))
2492 	  /* We have to be very careful about ADDR_EXPR.  Make sure
2493 	     the base variable isn't a local variable of the inlined
2494 	     function, e.g., when doing recursive inlining, direct or
2495 	     mutually-recursive or whatever, which is why we don't
2496 	     just test whether fn == current_function_decl.  */
2497 	  && ! self_inlining_addr_expr (value, fn))
2498 	{
2499 	  insert_decl_map (id, p, value);
2500 	  insert_debug_decl_map (id, p, var);
2501 	  return insert_init_debug_bind (id, bb, var, value, NULL);
2502 	}
2503     }
2504 
2505   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2506      that way, when the PARM_DECL is encountered, it will be
2507      automatically replaced by the VAR_DECL.  */
2508   insert_decl_map (id, p, var);
2509 
2510   /* Even if P was TREE_READONLY, the new VAR should not be.
2511      In the original code, we would have constructed a
2512      temporary, and then the function body would have never
2513      changed the value of P.  However, now, we will be
2514      constructing VAR directly.  The constructor body may
2515      change its value multiple times as it is being
2516      constructed.  Therefore, it must not be TREE_READONLY;
2517      the back-end assumes that TREE_READONLY variable is
2518      assigned to only once.  */
2519   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2520     TREE_READONLY (var) = 0;
2521 
2522   /* If there is no setup required and we are in SSA, take the easy route
2523      replacing all SSA names representing the function parameter by the
2524      SSA name passed to function.
2525 
2526      We need to construct map for the variable anyway as it might be used
2527      in different SSA names when parameter is set in function.
2528 
2529      Do replacement at -O0 for const arguments replaced by constant.
2530      This is important for builtin_constant_p and other construct requiring
2531      constant argument to be visible in inlined function body.  */
2532   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2533       && (optimize
2534           || (TREE_READONLY (p)
2535 	      && is_gimple_min_invariant (rhs)))
2536       && (TREE_CODE (rhs) == SSA_NAME
2537 	  || is_gimple_min_invariant (rhs))
2538       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2539     {
2540       insert_decl_map (id, def, rhs);
2541       return insert_init_debug_bind (id, bb, var, rhs, NULL);
2542     }
2543 
2544   /* If the value of argument is never used, don't care about initializing
2545      it.  */
2546   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2547     {
2548       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2549       return insert_init_debug_bind (id, bb, var, rhs, NULL);
2550     }
2551 
2552   /* Initialize this VAR_DECL from the equivalent argument.  Convert
2553      the argument to the proper type in case it was promoted.  */
2554   if (value)
2555     {
2556       if (rhs == error_mark_node)
2557 	{
2558 	  insert_decl_map (id, p, var);
2559 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
2560 	}
2561 
2562       STRIP_USELESS_TYPE_CONVERSION (rhs);
2563 
2564       /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2565 	 keep our trees in gimple form.  */
2566       if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2567 	{
2568 	  def = remap_ssa_name (def, id);
2569           init_stmt = gimple_build_assign (def, rhs);
2570 	  SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2571 	  set_default_def (var, NULL);
2572 	}
2573       else
2574         init_stmt = gimple_build_assign (var, rhs);
2575 
2576       if (bb && init_stmt)
2577         insert_init_stmt (id, bb, init_stmt);
2578     }
2579   return init_stmt;
2580 }
2581 
2582 /* Generate code to initialize the parameters of the function at the
2583    top of the stack in ID from the GIMPLE_CALL STMT.  */
2584 
2585 static void
2586 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2587 			       tree fn, basic_block bb)
2588 {
2589   tree parms;
2590   size_t i;
2591   tree p;
2592   tree vars = NULL_TREE;
2593   tree static_chain = gimple_call_chain (stmt);
2594 
2595   /* Figure out what the parameters are.  */
2596   parms = DECL_ARGUMENTS (fn);
2597 
2598   /* Loop through the parameter declarations, replacing each with an
2599      equivalent VAR_DECL, appropriately initialized.  */
2600   for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2601     {
2602       tree val;
2603       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2604       setup_one_parameter (id, p, val, fn, bb, &vars);
2605     }
2606 
2607   /* Initialize the static chain.  */
2608   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2609   gcc_assert (fn != current_function_decl);
2610   if (p)
2611     {
2612       /* No static chain?  Seems like a bug in tree-nested.c.  */
2613       gcc_assert (static_chain);
2614 
2615       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2616     }
2617 
2618   declare_inline_vars (id->block, vars);
2619 }
2620 
2621 
2622 /* Declare a return variable to replace the RESULT_DECL for the
2623    function we are calling.  An appropriate DECL_STMT is returned.
2624    The USE_STMT is filled to contain a use of the declaration to
2625    indicate the return value of the function.
2626 
2627    RETURN_SLOT, if non-null is place where to store the result.  It
2628    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
2629    was the LHS of the MODIFY_EXPR to which this call is the RHS.
2630 
2631    The return value is a (possibly null) value that holds the result
2632    as seen by the caller.  */
2633 
2634 static tree
2635 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2636 {
2637   tree callee = id->src_fn;
2638   tree caller = id->dst_fn;
2639   tree result = DECL_RESULT (callee);
2640   tree callee_type = TREE_TYPE (result);
2641   tree caller_type;
2642   tree var, use;
2643 
2644   /* Handle type-mismatches in the function declaration return type
2645      vs. the call expression.  */
2646   if (modify_dest)
2647     caller_type = TREE_TYPE (modify_dest);
2648   else
2649     caller_type = TREE_TYPE (TREE_TYPE (callee));
2650 
2651   /* We don't need to do anything for functions that don't return
2652      anything.  */
2653   if (!result || VOID_TYPE_P (callee_type))
2654     return NULL_TREE;
2655 
2656   /* If there was a return slot, then the return value is the
2657      dereferenced address of that object.  */
2658   if (return_slot)
2659     {
2660       /* The front end shouldn't have used both return_slot and
2661 	 a modify expression.  */
2662       gcc_assert (!modify_dest);
2663       if (DECL_BY_REFERENCE (result))
2664 	{
2665 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
2666 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2667 
2668 	  /* We are going to construct *&return_slot and we can't do that
2669 	     for variables believed to be not addressable.
2670 
2671 	     FIXME: This check possibly can match, because values returned
2672 	     via return slot optimization are not believed to have address
2673 	     taken by alias analysis.  */
2674 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2675 	  if (gimple_in_ssa_p (cfun))
2676 	    {
2677 	      HOST_WIDE_INT bitsize;
2678 	      HOST_WIDE_INT bitpos;
2679 	      tree offset;
2680 	      enum machine_mode mode;
2681 	      int unsignedp;
2682 	      int volatilep;
2683 	      tree base;
2684 	      base = get_inner_reference (return_slot, &bitsize, &bitpos,
2685 					  &offset,
2686 					  &mode, &unsignedp, &volatilep,
2687 					  false);
2688 	      if (TREE_CODE (base) == INDIRECT_REF)
2689 		base = TREE_OPERAND (base, 0);
2690 	      if (TREE_CODE (base) == SSA_NAME)
2691 		base = SSA_NAME_VAR (base);
2692 	      mark_sym_for_renaming (base);
2693 	    }
2694 	  var = return_slot_addr;
2695 	}
2696       else
2697 	{
2698 	  var = return_slot;
2699 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
2700 	  TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2701 	}
2702       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2703            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2704 	  && !DECL_GIMPLE_REG_P (result)
2705 	  && DECL_P (var))
2706 	DECL_GIMPLE_REG_P (var) = 0;
2707       use = NULL;
2708       goto done;
2709     }
2710 
2711   /* All types requiring non-trivial constructors should have been handled.  */
2712   gcc_assert (!TREE_ADDRESSABLE (callee_type));
2713 
2714   /* Attempt to avoid creating a new temporary variable.  */
2715   if (modify_dest
2716       && TREE_CODE (modify_dest) != SSA_NAME)
2717     {
2718       bool use_it = false;
2719 
2720       /* We can't use MODIFY_DEST if there's type promotion involved.  */
2721       if (!useless_type_conversion_p (callee_type, caller_type))
2722 	use_it = false;
2723 
2724       /* ??? If we're assigning to a variable sized type, then we must
2725 	 reuse the destination variable, because we've no good way to
2726 	 create variable sized temporaries at this point.  */
2727       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2728 	use_it = true;
2729 
2730       /* If the callee cannot possibly modify MODIFY_DEST, then we can
2731 	 reuse it as the result of the call directly.  Don't do this if
2732 	 it would promote MODIFY_DEST to addressable.  */
2733       else if (TREE_ADDRESSABLE (result))
2734 	use_it = false;
2735       else
2736 	{
2737 	  tree base_m = get_base_address (modify_dest);
2738 
2739 	  /* If the base isn't a decl, then it's a pointer, and we don't
2740 	     know where that's going to go.  */
2741 	  if (!DECL_P (base_m))
2742 	    use_it = false;
2743 	  else if (is_global_var (base_m))
2744 	    use_it = false;
2745 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2746 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2747 		   && !DECL_GIMPLE_REG_P (result)
2748 		   && DECL_GIMPLE_REG_P (base_m))
2749 	    use_it = false;
2750 	  else if (!TREE_ADDRESSABLE (base_m))
2751 	    use_it = true;
2752 	}
2753 
2754       if (use_it)
2755 	{
2756 	  var = modify_dest;
2757 	  use = NULL;
2758 	  goto done;
2759 	}
2760     }
2761 
2762   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2763 
2764   var = copy_result_decl_to_var (result, id);
2765   if (gimple_in_ssa_p (cfun))
2766     {
2767       get_var_ann (var);
2768       add_referenced_var (var);
2769     }
2770 
2771   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2772   DECL_STRUCT_FUNCTION (caller)->local_decls
2773     = tree_cons (NULL_TREE, var,
2774 		 DECL_STRUCT_FUNCTION (caller)->local_decls);
2775 
2776   /* Do not have the rest of GCC warn about this variable as it should
2777      not be visible to the user.  */
2778   TREE_NO_WARNING (var) = 1;
2779 
2780   declare_inline_vars (id->block, var);
2781 
2782   /* Build the use expr.  If the return type of the function was
2783      promoted, convert it back to the expected type.  */
2784   use = var;
2785   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2786     use = fold_convert (caller_type, var);
2787 
2788   STRIP_USELESS_TYPE_CONVERSION (use);
2789 
2790   if (DECL_BY_REFERENCE (result))
2791     {
2792       TREE_ADDRESSABLE (var) = 1;
2793       var = build_fold_addr_expr (var);
2794     }
2795 
2796  done:
2797   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2798      way, when the RESULT_DECL is encountered, it will be
2799      automatically replaced by the VAR_DECL.  */
2800   insert_decl_map (id, result, var);
2801 
2802   /* Remember this so we can ignore it in remap_decls.  */
2803   id->retvar = var;
2804 
2805   return use;
2806 }
2807 
2808 /* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
2809    to a local label.  */
2810 
2811 static tree
2812 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2813 {
2814   tree node = *nodep;
2815   tree fn = (tree) fnp;
2816 
2817   if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2818     return node;
2819 
2820   if (TYPE_P (node))
2821     *walk_subtrees = 0;
2822 
2823   return NULL_TREE;
2824 }
2825 
2826 /* Determine if the function can be copied.  If so return NULL.  If
2827    not return a string describng the reason for failure.  */
2828 
2829 static const char *
2830 copy_forbidden (struct function *fun, tree fndecl)
2831 {
2832   const char *reason = fun->cannot_be_copied_reason;
2833   tree step;
2834 
2835   /* Only examine the function once.  */
2836   if (fun->cannot_be_copied_set)
2837     return reason;
2838 
2839   /* We cannot copy a function that receives a non-local goto
2840      because we cannot remap the destination label used in the
2841      function that is performing the non-local goto.  */
2842   /* ??? Actually, this should be possible, if we work at it.
2843      No doubt there's just a handful of places that simply
2844      assume it doesn't happen and don't substitute properly.  */
2845   if (fun->has_nonlocal_label)
2846     {
2847       reason = G_("function %q+F can never be copied "
2848 		  "because it receives a non-local goto");
2849       goto fail;
2850     }
2851 
2852   for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2853     {
2854       tree decl = TREE_VALUE (step);
2855 
2856       if (TREE_CODE (decl) == VAR_DECL
2857 	  && TREE_STATIC (decl)
2858 	  && !DECL_EXTERNAL (decl)
2859 	  && DECL_INITIAL (decl)
2860 	  && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2861 					   has_label_address_in_static_1,
2862 					   fndecl))
2863 	{
2864 	  reason = G_("function %q+F can never be copied because it saves "
2865 		      "address of local label in a static variable");
2866 	  goto fail;
2867 	}
2868     }
2869 
2870  fail:
2871   fun->cannot_be_copied_reason = reason;
2872   fun->cannot_be_copied_set = true;
2873   return reason;
2874 }
2875 
2876 
2877 static const char *inline_forbidden_reason;
2878 
2879 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
2880    iff a function can not be inlined.  Also sets the reason why. */
2881 
2882 static tree
2883 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2884 			 struct walk_stmt_info *wip)
2885 {
2886   tree fn = (tree) wip->info;
2887   tree t;
2888   gimple stmt = gsi_stmt (*gsi);
2889 
2890   switch (gimple_code (stmt))
2891     {
2892     case GIMPLE_CALL:
2893       /* Refuse to inline alloca call unless user explicitly forced so as
2894 	 this may change program's memory overhead drastically when the
2895 	 function using alloca is called in loop.  In GCC present in
2896 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
2897 	 RAM instead of 256MB.  */
2898       if (gimple_alloca_call_p (stmt)
2899 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2900 	{
2901 	  inline_forbidden_reason
2902 	    = G_("function %q+F can never be inlined because it uses "
2903 		 "alloca (override using the always_inline attribute)");
2904 	  *handled_ops_p = true;
2905 	  return fn;
2906 	}
2907 
2908       t = gimple_call_fndecl (stmt);
2909       if (t == NULL_TREE)
2910 	break;
2911 
2912       /* We cannot inline functions that call setjmp.  */
2913       if (setjmp_call_p (t))
2914 	{
2915 	  inline_forbidden_reason
2916 	    = G_("function %q+F can never be inlined because it uses setjmp");
2917 	  *handled_ops_p = true;
2918 	  return t;
2919 	}
2920 
2921       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2922 	switch (DECL_FUNCTION_CODE (t))
2923 	  {
2924 	    /* We cannot inline functions that take a variable number of
2925 	       arguments.  */
2926 	  case BUILT_IN_VA_START:
2927 	  case BUILT_IN_NEXT_ARG:
2928 	  case BUILT_IN_VA_END:
2929 	    inline_forbidden_reason
2930 	      = G_("function %q+F can never be inlined because it "
2931 		   "uses variable argument lists");
2932 	    *handled_ops_p = true;
2933 	    return t;
2934 
2935 	  case BUILT_IN_LONGJMP:
2936 	    /* We can't inline functions that call __builtin_longjmp at
2937 	       all.  The non-local goto machinery really requires the
2938 	       destination be in a different function.  If we allow the
2939 	       function calling __builtin_longjmp to be inlined into the
2940 	       function calling __builtin_setjmp, Things will Go Awry.  */
2941 	    inline_forbidden_reason
2942 	      = G_("function %q+F can never be inlined because "
2943 		   "it uses setjmp-longjmp exception handling");
2944 	    *handled_ops_p = true;
2945 	    return t;
2946 
2947 	  case BUILT_IN_NONLOCAL_GOTO:
2948 	    /* Similarly.  */
2949 	    inline_forbidden_reason
2950 	      = G_("function %q+F can never be inlined because "
2951 		   "it uses non-local goto");
2952 	    *handled_ops_p = true;
2953 	    return t;
2954 
2955 	  case BUILT_IN_RETURN:
2956 	  case BUILT_IN_APPLY_ARGS:
2957 	    /* If a __builtin_apply_args caller would be inlined,
2958 	       it would be saving arguments of the function it has
2959 	       been inlined into.  Similarly __builtin_return would
2960 	       return from the function the inline has been inlined into.  */
2961 	    inline_forbidden_reason
2962 	      = G_("function %q+F can never be inlined because "
2963 		   "it uses __builtin_return or __builtin_apply_args");
2964 	    *handled_ops_p = true;
2965 	    return t;
2966 
2967 	  default:
2968 	    break;
2969 	  }
2970       break;
2971 
2972     case GIMPLE_GOTO:
2973       t = gimple_goto_dest (stmt);
2974 
2975       /* We will not inline a function which uses computed goto.  The
2976 	 addresses of its local labels, which may be tucked into
2977 	 global storage, are of course not constant across
2978 	 instantiations, which causes unexpected behavior.  */
2979       if (TREE_CODE (t) != LABEL_DECL)
2980 	{
2981 	  inline_forbidden_reason
2982 	    = G_("function %q+F can never be inlined "
2983 		 "because it contains a computed goto");
2984 	  *handled_ops_p = true;
2985 	  return t;
2986 	}
2987       break;
2988 
2989     default:
2990       break;
2991     }
2992 
2993   *handled_ops_p = false;
2994   return NULL_TREE;
2995 }
2996 
2997 /* Return true if FNDECL is a function that cannot be inlined into
2998    another one.  */
2999 
3000 static bool
3001 inline_forbidden_p (tree fndecl)
3002 {
3003   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3004   struct walk_stmt_info wi;
3005   struct pointer_set_t *visited_nodes;
3006   basic_block bb;
3007   bool forbidden_p = false;
3008 
3009   /* First check for shared reasons not to copy the code.  */
3010   inline_forbidden_reason = copy_forbidden (fun, fndecl);
3011   if (inline_forbidden_reason != NULL)
3012     return true;
3013 
3014   /* Next, walk the statements of the function looking for
3015      constraucts we can't handle, or are non-optimal for inlining.  */
3016   visited_nodes = pointer_set_create ();
3017   memset (&wi, 0, sizeof (wi));
3018   wi.info = (void *) fndecl;
3019   wi.pset = visited_nodes;
3020 
3021   FOR_EACH_BB_FN (bb, fun)
3022     {
3023       gimple ret;
3024       gimple_seq seq = bb_seq (bb);
3025       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3026       forbidden_p = (ret != NULL);
3027       if (forbidden_p)
3028 	break;
3029     }
3030 
3031   pointer_set_destroy (visited_nodes);
3032   return forbidden_p;
3033 }
3034 
3035 /* Returns nonzero if FN is a function that does not have any
3036    fundamental inline blocking properties.  */
3037 
3038 bool
3039 tree_inlinable_function_p (tree fn)
3040 {
3041   bool inlinable = true;
3042   bool do_warning;
3043   tree always_inline;
3044 
3045   /* If we've already decided this function shouldn't be inlined,
3046      there's no need to check again.  */
3047   if (DECL_UNINLINABLE (fn))
3048     return false;
3049 
3050   /* We only warn for functions declared `inline' by the user.  */
3051   do_warning = (warn_inline
3052 		&& DECL_DECLARED_INLINE_P (fn)
3053 		&& !DECL_NO_INLINE_WARNING_P (fn)
3054 		&& !DECL_IN_SYSTEM_HEADER (fn));
3055 
3056   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3057 
3058   if (flag_no_inline
3059       && always_inline == NULL)
3060     {
3061       if (do_warning)
3062         warning (OPT_Winline, "function %q+F can never be inlined because it "
3063                  "is suppressed using -fno-inline", fn);
3064       inlinable = false;
3065     }
3066 
3067   /* Don't auto-inline anything that might not be bound within
3068      this unit of translation.  */
3069   else if (!DECL_DECLARED_INLINE_P (fn)
3070 	   && decl_replaceable_p (fn))
3071     inlinable = false;
3072 
3073   else if (!function_attribute_inlinable_p (fn))
3074     {
3075       if (do_warning)
3076         warning (OPT_Winline, "function %q+F can never be inlined because it "
3077                  "uses attributes conflicting with inlining", fn);
3078       inlinable = false;
3079     }
3080 
3081   else if (inline_forbidden_p (fn))
3082     {
3083       /* See if we should warn about uninlinable functions.  Previously,
3084 	 some of these warnings would be issued while trying to expand
3085 	 the function inline, but that would cause multiple warnings
3086 	 about functions that would for example call alloca.  But since
3087 	 this a property of the function, just one warning is enough.
3088 	 As a bonus we can now give more details about the reason why a
3089 	 function is not inlinable.  */
3090       if (always_inline)
3091 	sorry (inline_forbidden_reason, fn);
3092       else if (do_warning)
3093 	warning (OPT_Winline, inline_forbidden_reason, fn);
3094 
3095       inlinable = false;
3096     }
3097 
3098   /* Squirrel away the result so that we don't have to check again.  */
3099   DECL_UNINLINABLE (fn) = !inlinable;
3100 
3101   return inlinable;
3102 }
3103 
3104 /* Estimate the cost of a memory move.  Use machine dependent
3105    word size and take possible memcpy call into account.  */
3106 
3107 int
3108 estimate_move_cost (tree type)
3109 {
3110   HOST_WIDE_INT size;
3111 
3112   gcc_assert (!VOID_TYPE_P (type));
3113 
3114   size = int_size_in_bytes (type);
3115 
3116   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3117     /* Cost of a memcpy call, 3 arguments and the call.  */
3118     return 4;
3119   else
3120     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3121 }
3122 
3123 /* Returns cost of operation CODE, according to WEIGHTS  */
3124 
3125 static int
3126 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3127 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3128 {
3129   switch (code)
3130     {
3131     /* These are "free" conversions, or their presumed cost
3132        is folded into other operations.  */
3133     case RANGE_EXPR:
3134     CASE_CONVERT:
3135     case COMPLEX_EXPR:
3136     case PAREN_EXPR:
3137       return 0;
3138 
3139     /* Assign cost of 1 to usual operations.
3140        ??? We may consider mapping RTL costs to this.  */
3141     case COND_EXPR:
3142     case VEC_COND_EXPR:
3143 
3144     case PLUS_EXPR:
3145     case POINTER_PLUS_EXPR:
3146     case MINUS_EXPR:
3147     case MULT_EXPR:
3148 
3149     case ADDR_SPACE_CONVERT_EXPR:
3150     case FIXED_CONVERT_EXPR:
3151     case FIX_TRUNC_EXPR:
3152 
3153     case NEGATE_EXPR:
3154     case FLOAT_EXPR:
3155     case MIN_EXPR:
3156     case MAX_EXPR:
3157     case ABS_EXPR:
3158 
3159     case LSHIFT_EXPR:
3160     case RSHIFT_EXPR:
3161     case LROTATE_EXPR:
3162     case RROTATE_EXPR:
3163     case VEC_LSHIFT_EXPR:
3164     case VEC_RSHIFT_EXPR:
3165 
3166     case BIT_IOR_EXPR:
3167     case BIT_XOR_EXPR:
3168     case BIT_AND_EXPR:
3169     case BIT_NOT_EXPR:
3170 
3171     case TRUTH_ANDIF_EXPR:
3172     case TRUTH_ORIF_EXPR:
3173     case TRUTH_AND_EXPR:
3174     case TRUTH_OR_EXPR:
3175     case TRUTH_XOR_EXPR:
3176     case TRUTH_NOT_EXPR:
3177 
3178     case LT_EXPR:
3179     case LE_EXPR:
3180     case GT_EXPR:
3181     case GE_EXPR:
3182     case EQ_EXPR:
3183     case NE_EXPR:
3184     case ORDERED_EXPR:
3185     case UNORDERED_EXPR:
3186 
3187     case UNLT_EXPR:
3188     case UNLE_EXPR:
3189     case UNGT_EXPR:
3190     case UNGE_EXPR:
3191     case UNEQ_EXPR:
3192     case LTGT_EXPR:
3193 
3194     case CONJ_EXPR:
3195 
3196     case PREDECREMENT_EXPR:
3197     case PREINCREMENT_EXPR:
3198     case POSTDECREMENT_EXPR:
3199     case POSTINCREMENT_EXPR:
3200 
3201     case REALIGN_LOAD_EXPR:
3202 
3203     case REDUC_MAX_EXPR:
3204     case REDUC_MIN_EXPR:
3205     case REDUC_PLUS_EXPR:
3206     case WIDEN_SUM_EXPR:
3207     case WIDEN_MULT_EXPR:
3208     case DOT_PROD_EXPR:
3209 
3210     case VEC_WIDEN_MULT_HI_EXPR:
3211     case VEC_WIDEN_MULT_LO_EXPR:
3212     case VEC_UNPACK_HI_EXPR:
3213     case VEC_UNPACK_LO_EXPR:
3214     case VEC_UNPACK_FLOAT_HI_EXPR:
3215     case VEC_UNPACK_FLOAT_LO_EXPR:
3216     case VEC_PACK_TRUNC_EXPR:
3217     case VEC_PACK_SAT_EXPR:
3218     case VEC_PACK_FIX_TRUNC_EXPR:
3219     case VEC_EXTRACT_EVEN_EXPR:
3220     case VEC_EXTRACT_ODD_EXPR:
3221     case VEC_INTERLEAVE_HIGH_EXPR:
3222     case VEC_INTERLEAVE_LOW_EXPR:
3223 
3224       return 1;
3225 
3226     /* Few special cases of expensive operations.  This is useful
3227        to avoid inlining on functions having too many of these.  */
3228     case TRUNC_DIV_EXPR:
3229     case CEIL_DIV_EXPR:
3230     case FLOOR_DIV_EXPR:
3231     case ROUND_DIV_EXPR:
3232     case EXACT_DIV_EXPR:
3233     case TRUNC_MOD_EXPR:
3234     case CEIL_MOD_EXPR:
3235     case FLOOR_MOD_EXPR:
3236     case ROUND_MOD_EXPR:
3237     case RDIV_EXPR:
3238       if (TREE_CODE (op2) != INTEGER_CST)
3239         return weights->div_mod_cost;
3240       return 1;
3241 
3242     default:
3243       /* We expect a copy assignment with no operator.  */
3244       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3245       return 0;
3246     }
3247 }
3248 
3249 
3250 /* Estimate number of instructions that will be created by expanding
3251    the statements in the statement sequence STMTS.
3252    WEIGHTS contains weights attributed to various constructs.  */
3253 
3254 static
3255 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3256 {
3257   int cost;
3258   gimple_stmt_iterator gsi;
3259 
3260   cost = 0;
3261   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3262     cost += estimate_num_insns (gsi_stmt (gsi), weights);
3263 
3264   return cost;
3265 }
3266 
3267 
3268 /* Estimate number of instructions that will be created by expanding STMT.
3269    WEIGHTS contains weights attributed to various constructs.  */
3270 
3271 int
3272 estimate_num_insns (gimple stmt, eni_weights *weights)
3273 {
3274   unsigned cost, i;
3275   enum gimple_code code = gimple_code (stmt);
3276   tree lhs;
3277   tree rhs;
3278 
3279   switch (code)
3280     {
3281     case GIMPLE_ASSIGN:
3282       /* Try to estimate the cost of assignments.  We have three cases to
3283 	 deal with:
3284 	 1) Simple assignments to registers;
3285 	 2) Stores to things that must live in memory.  This includes
3286 	    "normal" stores to scalars, but also assignments of large
3287 	    structures, or constructors of big arrays;
3288 
3289 	 Let us look at the first two cases, assuming we have "a = b + C":
3290 	 <GIMPLE_ASSIGN <var_decl "a">
3291 	        <plus_expr <var_decl "b"> <constant C>>
3292 	 If "a" is a GIMPLE register, the assignment to it is free on almost
3293 	 any target, because "a" usually ends up in a real register.  Hence
3294 	 the only cost of this expression comes from the PLUS_EXPR, and we
3295 	 can ignore the GIMPLE_ASSIGN.
3296 	 If "a" is not a GIMPLE register, the assignment to "a" will most
3297 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3298 	 of moving something into "a", which we compute using the function
3299 	 estimate_move_cost.  */
3300       lhs = gimple_assign_lhs (stmt);
3301       rhs = gimple_assign_rhs1 (stmt);
3302 
3303       if (is_gimple_reg (lhs))
3304 	cost = 0;
3305       else
3306 	cost = estimate_move_cost (TREE_TYPE (lhs));
3307 
3308       if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3309 	cost += estimate_move_cost (TREE_TYPE (rhs));
3310 
3311       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3312       				      gimple_assign_rhs1 (stmt),
3313 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3314 				      == GIMPLE_BINARY_RHS
3315 				      ? gimple_assign_rhs2 (stmt) : NULL);
3316       break;
3317 
3318     case GIMPLE_COND:
3319       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3320       				         gimple_op (stmt, 0),
3321 				         gimple_op (stmt, 1));
3322       break;
3323 
3324     case GIMPLE_SWITCH:
3325       /* Take into account cost of the switch + guess 2 conditional jumps for
3326          each case label.
3327 
3328 	 TODO: once the switch expansion logic is sufficiently separated, we can
3329 	 do better job on estimating cost of the switch.  */
3330       if (weights->time_based)
3331         cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3332       else
3333         cost = gimple_switch_num_labels (stmt) * 2;
3334       break;
3335 
3336     case GIMPLE_CALL:
3337       {
3338 	tree decl = gimple_call_fndecl (stmt);
3339 	tree addr = gimple_call_fn (stmt);
3340 	tree funtype = TREE_TYPE (addr);
3341 
3342 	if (POINTER_TYPE_P (funtype))
3343 	  funtype = TREE_TYPE (funtype);
3344 
3345 	if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3346 	  cost = weights->target_builtin_call_cost;
3347 	else
3348 	  cost = weights->call_cost;
3349 
3350 	if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3351 	  switch (DECL_FUNCTION_CODE (decl))
3352 	    {
3353 	    case BUILT_IN_CONSTANT_P:
3354 	      return 0;
3355 	    case BUILT_IN_EXPECT:
3356 	      return 0;
3357 
3358 	    /* Prefetch instruction is not expensive.  */
3359 	    case BUILT_IN_PREFETCH:
3360 	      cost = weights->target_builtin_call_cost;
3361 	      break;
3362 
3363 	    /* Exception state returns or moves registers around.  */
3364 	    case BUILT_IN_EH_FILTER:
3365 	    case BUILT_IN_EH_POINTER:
3366 	    case BUILT_IN_EH_COPY_VALUES:
3367 	      return 0;
3368 
3369 	    default:
3370 	      break;
3371 	    }
3372 
3373 	if (decl)
3374 	  funtype = TREE_TYPE (decl);
3375 
3376 	if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3377 	  cost += estimate_move_cost (TREE_TYPE (funtype));
3378 	/* Our cost must be kept in sync with
3379 	   cgraph_estimate_size_after_inlining that does use function
3380 	   declaration to figure out the arguments.  */
3381 	if (decl && DECL_ARGUMENTS (decl))
3382 	  {
3383 	    tree arg;
3384 	    for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3385 	      if (!VOID_TYPE_P (TREE_TYPE (arg)))
3386 	        cost += estimate_move_cost (TREE_TYPE (arg));
3387 	  }
3388 	else if (funtype && prototype_p (funtype))
3389 	  {
3390 	    tree t;
3391 	    for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3392 	    	 t = TREE_CHAIN (t))
3393 	      if (!VOID_TYPE_P (TREE_VALUE (t)))
3394 	        cost += estimate_move_cost (TREE_VALUE (t));
3395 	  }
3396 	else
3397 	  {
3398 	    for (i = 0; i < gimple_call_num_args (stmt); i++)
3399 	      {
3400 		tree arg = gimple_call_arg (stmt, i);
3401 	        if (!VOID_TYPE_P (TREE_TYPE (arg)))
3402 		  cost += estimate_move_cost (TREE_TYPE (arg));
3403 	      }
3404 	  }
3405 
3406 	break;
3407       }
3408 
3409     case GIMPLE_GOTO:
3410     case GIMPLE_LABEL:
3411     case GIMPLE_NOP:
3412     case GIMPLE_PHI:
3413     case GIMPLE_RETURN:
3414     case GIMPLE_PREDICT:
3415     case GIMPLE_DEBUG:
3416       return 0;
3417 
3418     case GIMPLE_ASM:
3419       return asm_str_count (gimple_asm_string (stmt));
3420 
3421     case GIMPLE_RESX:
3422       /* This is either going to be an external function call with one
3423 	 argument, or two register copy statements plus a goto.  */
3424       return 2;
3425 
3426     case GIMPLE_EH_DISPATCH:
3427       /* ??? This is going to turn into a switch statement.  Ideally
3428 	 we'd have a look at the eh region and estimate the number of
3429 	 edges involved.  */
3430       return 10;
3431 
3432     case GIMPLE_BIND:
3433       return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3434 
3435     case GIMPLE_EH_FILTER:
3436       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3437 
3438     case GIMPLE_CATCH:
3439       return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3440 
3441     case GIMPLE_TRY:
3442       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3443               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3444 
3445     /* OpenMP directives are generally very expensive.  */
3446 
3447     case GIMPLE_OMP_RETURN:
3448     case GIMPLE_OMP_SECTIONS_SWITCH:
3449     case GIMPLE_OMP_ATOMIC_STORE:
3450     case GIMPLE_OMP_CONTINUE:
3451       /* ...except these, which are cheap.  */
3452       return 0;
3453 
3454     case GIMPLE_OMP_ATOMIC_LOAD:
3455       return weights->omp_cost;
3456 
3457     case GIMPLE_OMP_FOR:
3458       return (weights->omp_cost
3459               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3460               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3461 
3462     case GIMPLE_OMP_PARALLEL:
3463     case GIMPLE_OMP_TASK:
3464     case GIMPLE_OMP_CRITICAL:
3465     case GIMPLE_OMP_MASTER:
3466     case GIMPLE_OMP_ORDERED:
3467     case GIMPLE_OMP_SECTION:
3468     case GIMPLE_OMP_SECTIONS:
3469     case GIMPLE_OMP_SINGLE:
3470       return (weights->omp_cost
3471               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3472 
3473     default:
3474       gcc_unreachable ();
3475     }
3476 
3477   return cost;
3478 }
3479 
3480 /* Estimate number of instructions that will be created by expanding
3481    function FNDECL.  WEIGHTS contains weights attributed to various
3482    constructs.  */
3483 
3484 int
3485 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3486 {
3487   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3488   gimple_stmt_iterator bsi;
3489   basic_block bb;
3490   int n = 0;
3491 
3492   gcc_assert (my_function && my_function->cfg);
3493   FOR_EACH_BB_FN (bb, my_function)
3494     {
3495       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3496 	n += estimate_num_insns (gsi_stmt (bsi), weights);
3497     }
3498 
3499   return n;
3500 }
3501 
3502 
3503 /* Initializes weights used by estimate_num_insns.  */
3504 
3505 void
3506 init_inline_once (void)
3507 {
3508   eni_size_weights.call_cost = 1;
3509   eni_size_weights.target_builtin_call_cost = 1;
3510   eni_size_weights.div_mod_cost = 1;
3511   eni_size_weights.omp_cost = 40;
3512   eni_size_weights.time_based = false;
3513 
3514   /* Estimating time for call is difficult, since we have no idea what the
3515      called function does.  In the current uses of eni_time_weights,
3516      underestimating the cost does less harm than overestimating it, so
3517      we choose a rather small value here.  */
3518   eni_time_weights.call_cost = 10;
3519   eni_time_weights.target_builtin_call_cost = 10;
3520   eni_time_weights.div_mod_cost = 10;
3521   eni_time_weights.omp_cost = 40;
3522   eni_time_weights.time_based = true;
3523 }
3524 
3525 /* Estimate the number of instructions in a gimple_seq. */
3526 
3527 int
3528 count_insns_seq (gimple_seq seq, eni_weights *weights)
3529 {
3530   gimple_stmt_iterator gsi;
3531   int n = 0;
3532   for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3533     n += estimate_num_insns (gsi_stmt (gsi), weights);
3534 
3535   return n;
3536 }
3537 
3538 
3539 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
3540 
3541 static void
3542 prepend_lexical_block (tree current_block, tree new_block)
3543 {
3544   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3545   BLOCK_SUBBLOCKS (current_block) = new_block;
3546   BLOCK_SUPERCONTEXT (new_block) = current_block;
3547 }
3548 
3549 /* Fetch callee declaration from the call graph edge going from NODE and
3550    associated with STMR call statement.  Return NULL_TREE if not found.  */
3551 static tree
3552 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3553 {
3554   struct cgraph_edge *cs;
3555 
3556   cs = cgraph_edge (node, stmt);
3557   if (cs)
3558     return cs->callee->decl;
3559 
3560   return NULL_TREE;
3561 }
3562 
3563 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
3564 
3565 static bool
3566 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3567 {
3568   tree use_retvar;
3569   tree fn;
3570   struct pointer_map_t *st, *dst;
3571   tree return_slot;
3572   tree modify_dest;
3573   location_t saved_location;
3574   struct cgraph_edge *cg_edge;
3575   cgraph_inline_failed_t reason;
3576   basic_block return_block;
3577   edge e;
3578   gimple_stmt_iterator gsi, stmt_gsi;
3579   bool successfully_inlined = FALSE;
3580   bool purge_dead_abnormal_edges;
3581   tree t_step;
3582   tree var;
3583 
3584   /* Set input_location here so we get the right instantiation context
3585      if we call instantiate_decl from inlinable_function_p.  */
3586   saved_location = input_location;
3587   if (gimple_has_location (stmt))
3588     input_location = gimple_location (stmt);
3589 
3590   /* From here on, we're only interested in CALL_EXPRs.  */
3591   if (gimple_code (stmt) != GIMPLE_CALL)
3592     goto egress;
3593 
3594   /* First, see if we can figure out what function is being called.
3595      If we cannot, then there is no hope of inlining the function.  */
3596   fn = gimple_call_fndecl (stmt);
3597   if (!fn)
3598     {
3599       fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3600       if (!fn)
3601 	goto egress;
3602     }
3603 
3604   /* Turn forward declarations into real ones.  */
3605   fn = cgraph_node (fn)->decl;
3606 
3607   /* If FN is a declaration of a function in a nested scope that was
3608      globally declared inline, we don't set its DECL_INITIAL.
3609      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3610      C++ front-end uses it for cdtors to refer to their internal
3611      declarations, that are not real functions.  Fortunately those
3612      don't have trees to be saved, so we can tell by checking their
3613      gimple_body.  */
3614   if (!DECL_INITIAL (fn)
3615       && DECL_ABSTRACT_ORIGIN (fn)
3616       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3617     fn = DECL_ABSTRACT_ORIGIN (fn);
3618 
3619   /* Objective C and fortran still calls tree_rest_of_compilation directly.
3620      Kill this check once this is fixed.  */
3621   if (!id->dst_node->analyzed)
3622     goto egress;
3623 
3624   cg_edge = cgraph_edge (id->dst_node, stmt);
3625 
3626   /* Don't inline functions with different EH personalities.  */
3627   if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3628       && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3629       && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3630 	  != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3631     goto egress;
3632 
3633   /* Don't try to inline functions that are not well-suited to
3634      inlining.  */
3635   if (!cgraph_inline_p (cg_edge, &reason))
3636     {
3637       /* If this call was originally indirect, we do not want to emit any
3638 	 inlining related warnings or sorry messages because there are no
3639 	 guarantees regarding those.  */
3640       if (cg_edge->indirect_call)
3641 	goto egress;
3642 
3643       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3644 	  /* Avoid warnings during early inline pass. */
3645 	  && cgraph_global_info_ready)
3646 	{
3647 	  sorry ("inlining failed in call to %q+F: %s", fn,
3648 		 cgraph_inline_failed_string (reason));
3649 	  sorry ("called from here");
3650 	}
3651       else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3652 	       && !DECL_IN_SYSTEM_HEADER (fn)
3653 	       && reason != CIF_UNSPECIFIED
3654 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3655 	       /* Avoid warnings during early inline pass. */
3656 	       && cgraph_global_info_ready)
3657 	{
3658 	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3659 		   fn, cgraph_inline_failed_string (reason));
3660 	  warning (OPT_Winline, "called from here");
3661 	}
3662       goto egress;
3663     }
3664   fn = cg_edge->callee->decl;
3665 
3666 #ifdef ENABLE_CHECKING
3667   if (cg_edge->callee->decl != id->dst_node->decl)
3668     verify_cgraph_node (cg_edge->callee);
3669 #endif
3670 
3671   /* We will be inlining this callee.  */
3672   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3673 
3674   /* Update the callers EH personality.  */
3675   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3676     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3677       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3678 
3679   /* Split the block holding the GIMPLE_CALL.  */
3680   e = split_block (bb, stmt);
3681   bb = e->src;
3682   return_block = e->dest;
3683   remove_edge (e);
3684 
3685   /* split_block splits after the statement; work around this by
3686      moving the call into the second block manually.  Not pretty,
3687      but seems easier than doing the CFG manipulation by hand
3688      when the GIMPLE_CALL is in the last statement of BB.  */
3689   stmt_gsi = gsi_last_bb (bb);
3690   gsi_remove (&stmt_gsi, false);
3691 
3692   /* If the GIMPLE_CALL was in the last statement of BB, it may have
3693      been the source of abnormal edges.  In this case, schedule
3694      the removal of dead abnormal edges.  */
3695   gsi = gsi_start_bb (return_block);
3696   if (gsi_end_p (gsi))
3697     {
3698       gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3699       purge_dead_abnormal_edges = true;
3700     }
3701   else
3702     {
3703       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3704       purge_dead_abnormal_edges = false;
3705     }
3706 
3707   stmt_gsi = gsi_start_bb (return_block);
3708 
3709   /* Build a block containing code to initialize the arguments, the
3710      actual inline expansion of the body, and a label for the return
3711      statements within the function to jump to.  The type of the
3712      statement expression is the return type of the function call.  */
3713   id->block = make_node (BLOCK);
3714   BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3715   BLOCK_SOURCE_LOCATION (id->block) = input_location;
3716   prepend_lexical_block (gimple_block (stmt), id->block);
3717 
3718   /* Local declarations will be replaced by their equivalents in this
3719      map.  */
3720   st = id->decl_map;
3721   id->decl_map = pointer_map_create ();
3722   dst = id->debug_map;
3723   id->debug_map = NULL;
3724 
3725   /* Record the function we are about to inline.  */
3726   id->src_fn = fn;
3727   id->src_node = cg_edge->callee;
3728   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3729   id->gimple_call = stmt;
3730 
3731   gcc_assert (!id->src_cfun->after_inlining);
3732 
3733   id->entry_bb = bb;
3734   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3735     {
3736       gimple_stmt_iterator si = gsi_last_bb (bb);
3737       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3738       						   NOT_TAKEN),
3739 			GSI_NEW_STMT);
3740     }
3741   initialize_inlined_parameters (id, stmt, fn, bb);
3742 
3743   if (DECL_INITIAL (fn))
3744     prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3745 
3746   /* Return statements in the function body will be replaced by jumps
3747      to the RET_LABEL.  */
3748   gcc_assert (DECL_INITIAL (fn));
3749   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3750 
3751   /* Find the LHS to which the result of this call is assigned.  */
3752   return_slot = NULL;
3753   if (gimple_call_lhs (stmt))
3754     {
3755       modify_dest = gimple_call_lhs (stmt);
3756 
3757       /* The function which we are inlining might not return a value,
3758 	 in which case we should issue a warning that the function
3759 	 does not return a value.  In that case the optimizers will
3760 	 see that the variable to which the value is assigned was not
3761 	 initialized.  We do not want to issue a warning about that
3762 	 uninitialized variable.  */
3763       if (DECL_P (modify_dest))
3764 	TREE_NO_WARNING (modify_dest) = 1;
3765 
3766       if (gimple_call_return_slot_opt_p (stmt))
3767 	{
3768 	  return_slot = modify_dest;
3769 	  modify_dest = NULL;
3770 	}
3771     }
3772   else
3773     modify_dest = NULL;
3774 
3775   /* If we are inlining a call to the C++ operator new, we don't want
3776      to use type based alias analysis on the return value.  Otherwise
3777      we may get confused if the compiler sees that the inlined new
3778      function returns a pointer which was just deleted.  See bug
3779      33407.  */
3780   if (DECL_IS_OPERATOR_NEW (fn))
3781     {
3782       return_slot = NULL;
3783       modify_dest = NULL;
3784     }
3785 
3786   /* Declare the return variable for the function.  */
3787   use_retvar = declare_return_variable (id, return_slot, modify_dest);
3788 
3789   /* Add local vars in this inlined callee to caller.  */
3790   t_step = id->src_cfun->local_decls;
3791   for (; t_step; t_step = TREE_CHAIN (t_step))
3792     {
3793       var = TREE_VALUE (t_step);
3794       if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3795 	{
3796 	  if (var_ann (var) && add_referenced_var (var))
3797 	    cfun->local_decls = tree_cons (NULL_TREE, var,
3798 					   cfun->local_decls);
3799 	}
3800       else if (!can_be_nonlocal (var, id))
3801 	cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3802 				       cfun->local_decls);
3803     }
3804 
3805   if (dump_file && (dump_flags & TDF_DETAILS))
3806     {
3807       fprintf (dump_file, "Inlining ");
3808       print_generic_expr (dump_file, id->src_fn, 0);
3809       fprintf (dump_file, " to ");
3810       print_generic_expr (dump_file, id->dst_fn, 0);
3811       fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3812     }
3813 
3814   /* This is it.  Duplicate the callee body.  Assume callee is
3815      pre-gimplified.  Note that we must not alter the caller
3816      function in any way before this point, as this CALL_EXPR may be
3817      a self-referential call; if we're calling ourselves, we need to
3818      duplicate our body before altering anything.  */
3819   copy_body (id, bb->count,
3820   	     cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3821 	     bb, return_block);
3822 
3823   /* Reset the escaped and callused solutions.  */
3824   if (cfun->gimple_df)
3825     {
3826       pt_solution_reset (&cfun->gimple_df->escaped);
3827       pt_solution_reset (&cfun->gimple_df->callused);
3828     }
3829 
3830   /* Clean up.  */
3831   if (id->debug_map)
3832     {
3833       pointer_map_destroy (id->debug_map);
3834       id->debug_map = dst;
3835     }
3836   pointer_map_destroy (id->decl_map);
3837   id->decl_map = st;
3838 
3839   /* Unlink the calls virtual operands before replacing it.  */
3840   unlink_stmt_vdef (stmt);
3841 
3842   /* If the inlined function returns a result that we care about,
3843      substitute the GIMPLE_CALL with an assignment of the return
3844      variable to the LHS of the call.  That is, if STMT was
3845      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
3846   if (use_retvar && gimple_call_lhs (stmt))
3847     {
3848       gimple old_stmt = stmt;
3849       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3850       gsi_replace (&stmt_gsi, stmt, false);
3851       if (gimple_in_ssa_p (cfun))
3852 	mark_symbols_for_renaming (stmt);
3853       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3854     }
3855   else
3856     {
3857       /* Handle the case of inlining a function with no return
3858 	 statement, which causes the return value to become undefined.  */
3859       if (gimple_call_lhs (stmt)
3860 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3861 	{
3862 	  tree name = gimple_call_lhs (stmt);
3863 	  tree var = SSA_NAME_VAR (name);
3864 	  tree def = gimple_default_def (cfun, var);
3865 
3866 	  if (def)
3867 	    {
3868 	      /* If the variable is used undefined, make this name
3869 		 undefined via a move.  */
3870 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3871 	      gsi_replace (&stmt_gsi, stmt, true);
3872 	    }
3873 	  else
3874 	    {
3875 	      /* Otherwise make this variable undefined.  */
3876 	      gsi_remove (&stmt_gsi, true);
3877 	      set_default_def (var, name);
3878 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3879 	    }
3880 	}
3881       else
3882         gsi_remove (&stmt_gsi, true);
3883     }
3884 
3885   if (purge_dead_abnormal_edges)
3886     gimple_purge_dead_abnormal_call_edges (return_block);
3887 
3888   /* If the value of the new expression is ignored, that's OK.  We
3889      don't warn about this for CALL_EXPRs, so we shouldn't warn about
3890      the equivalent inlined version either.  */
3891   if (is_gimple_assign (stmt))
3892     {
3893       gcc_assert (gimple_assign_single_p (stmt)
3894 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3895       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3896     }
3897 
3898   /* Output the inlining info for this abstract function, since it has been
3899      inlined.  If we don't do this now, we can lose the information about the
3900      variables in the function when the blocks get blown away as soon as we
3901      remove the cgraph node.  */
3902   (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3903 
3904   /* Update callgraph if needed.  */
3905   cgraph_remove_node (cg_edge->callee);
3906 
3907   id->block = NULL_TREE;
3908   successfully_inlined = TRUE;
3909 
3910  egress:
3911   input_location = saved_location;
3912   return successfully_inlined;
3913 }
3914 
3915 /* Expand call statements reachable from STMT_P.
3916    We can only have CALL_EXPRs as the "toplevel" tree code or nested
3917    in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
3918    unfortunately not use that function here because we need a pointer
3919    to the CALL_EXPR, not the tree itself.  */
3920 
3921 static bool
3922 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3923 {
3924   gimple_stmt_iterator gsi;
3925 
3926   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3927     {
3928       gimple stmt = gsi_stmt (gsi);
3929 
3930       if (is_gimple_call (stmt)
3931 	  && expand_call_inline (bb, stmt, id))
3932 	return true;
3933     }
3934 
3935   return false;
3936 }
3937 
3938 
3939 /* Walk all basic blocks created after FIRST and try to fold every statement
3940    in the STATEMENTS pointer set.  */
3941 
3942 static void
3943 fold_marked_statements (int first, struct pointer_set_t *statements)
3944 {
3945   for (; first < n_basic_blocks; first++)
3946     if (BASIC_BLOCK (first))
3947       {
3948         gimple_stmt_iterator gsi;
3949 
3950 	for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3951 	     !gsi_end_p (gsi);
3952 	     gsi_next (&gsi))
3953 	  if (pointer_set_contains (statements, gsi_stmt (gsi)))
3954 	    {
3955 	      gimple old_stmt = gsi_stmt (gsi);
3956 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3957 
3958 	      if (old_decl && DECL_BUILT_IN (old_decl))
3959 		{
3960 		  /* Folding builtins can create multiple instructions,
3961 		     we need to look at all of them.  */
3962 		  gimple_stmt_iterator i2 = gsi;
3963 		  gsi_prev (&i2);
3964 		  if (fold_stmt (&gsi))
3965 		    {
3966 		      gimple new_stmt;
3967 		      if (gsi_end_p (i2))
3968 			i2 = gsi_start_bb (BASIC_BLOCK (first));
3969 		      else
3970 			gsi_next (&i2);
3971 		      while (1)
3972 			{
3973 			  new_stmt = gsi_stmt (i2);
3974 			  update_stmt (new_stmt);
3975 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3976 							     new_stmt);
3977 
3978 			  if (new_stmt == gsi_stmt (gsi))
3979 			    {
3980 			      /* It is okay to check only for the very last
3981 				 of these statements.  If it is a throwing
3982 				 statement nothing will change.  If it isn't
3983 				 this can remove EH edges.  If that weren't
3984 				 correct then because some intermediate stmts
3985 				 throw, but not the last one.  That would mean
3986 				 we'd have to split the block, which we can't
3987 				 here and we'd loose anyway.  And as builtins
3988 				 probably never throw, this all
3989 				 is mood anyway.  */
3990 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
3991 								  new_stmt))
3992 				gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3993 			      break;
3994 			    }
3995 			  gsi_next (&i2);
3996 			}
3997 		    }
3998 		}
3999 	      else if (fold_stmt (&gsi))
4000 		{
4001 		  /* Re-read the statement from GSI as fold_stmt() may
4002 		     have changed it.  */
4003 		  gimple new_stmt = gsi_stmt (gsi);
4004 		  update_stmt (new_stmt);
4005 
4006 		  if (is_gimple_call (old_stmt)
4007 		      || is_gimple_call (new_stmt))
4008 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4009 						       new_stmt);
4010 
4011 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4012 		    gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4013 		}
4014 	    }
4015       }
4016 }
4017 
4018 /* Return true if BB has at least one abnormal outgoing edge.  */
4019 
4020 static inline bool
4021 has_abnormal_outgoing_edge_p (basic_block bb)
4022 {
4023   edge e;
4024   edge_iterator ei;
4025 
4026   FOR_EACH_EDGE (e, ei, bb->succs)
4027     if (e->flags & EDGE_ABNORMAL)
4028       return true;
4029 
4030   return false;
4031 }
4032 
4033 /* Expand calls to inline functions in the body of FN.  */
4034 
4035 unsigned int
4036 optimize_inline_calls (tree fn)
4037 {
4038   copy_body_data id;
4039   basic_block bb;
4040   int last = n_basic_blocks;
4041   struct gimplify_ctx gctx;
4042 
4043   /* There is no point in performing inlining if errors have already
4044      occurred -- and we might crash if we try to inline invalid
4045      code.  */
4046   if (errorcount || sorrycount)
4047     return 0;
4048 
4049   /* Clear out ID.  */
4050   memset (&id, 0, sizeof (id));
4051 
4052   id.src_node = id.dst_node = cgraph_node (fn);
4053   id.dst_fn = fn;
4054   /* Or any functions that aren't finished yet.  */
4055   if (current_function_decl)
4056     id.dst_fn = current_function_decl;
4057 
4058   id.copy_decl = copy_decl_maybe_to_var;
4059   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4060   id.transform_new_cfg = false;
4061   id.transform_return_to_modify = true;
4062   id.transform_lang_insert_block = NULL;
4063   id.statements_to_fold = pointer_set_create ();
4064 
4065   push_gimplify_context (&gctx);
4066 
4067   /* We make no attempts to keep dominance info up-to-date.  */
4068   free_dominance_info (CDI_DOMINATORS);
4069   free_dominance_info (CDI_POST_DOMINATORS);
4070 
4071   /* Register specific gimple functions.  */
4072   gimple_register_cfg_hooks ();
4073 
4074   /* Reach the trees by walking over the CFG, and note the
4075      enclosing basic-blocks in the call edges.  */
4076   /* We walk the blocks going forward, because inlined function bodies
4077      will split id->current_basic_block, and the new blocks will
4078      follow it; we'll trudge through them, processing their CALL_EXPRs
4079      along the way.  */
4080   FOR_EACH_BB (bb)
4081     gimple_expand_calls_inline (bb, &id);
4082 
4083   pop_gimplify_context (NULL);
4084 
4085 #ifdef ENABLE_CHECKING
4086     {
4087       struct cgraph_edge *e;
4088 
4089       verify_cgraph_node (id.dst_node);
4090 
4091       /* Double check that we inlined everything we are supposed to inline.  */
4092       for (e = id.dst_node->callees; e; e = e->next_callee)
4093 	gcc_assert (e->inline_failed);
4094     }
4095 #endif
4096 
4097   /* Fold the statements before compacting/renumbering the basic blocks.  */
4098   fold_marked_statements (last, id.statements_to_fold);
4099   pointer_set_destroy (id.statements_to_fold);
4100 
4101   gcc_assert (!id.debug_stmts);
4102 
4103   /* Renumber the (code) basic_blocks consecutively.  */
4104   compact_blocks ();
4105   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4106   number_blocks (fn);
4107 
4108   fold_cond_expr_cond ();
4109   delete_unreachable_blocks_update_callgraph (&id);
4110 #ifdef ENABLE_CHECKING
4111   verify_cgraph_node (id.dst_node);
4112 #endif
4113 
4114   /* It would be nice to check SSA/CFG/statement consistency here, but it is
4115      not possible yet - the IPA passes might make various functions to not
4116      throw and they don't care to proactively update local EH info.  This is
4117      done later in fixup_cfg pass that also execute the verification.  */
4118   return (TODO_update_ssa
4119 	  | TODO_cleanup_cfg
4120 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4121 	  | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4122 }
4123 
4124 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
4125 
4126 tree
4127 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4128 {
4129   enum tree_code code = TREE_CODE (*tp);
4130   enum tree_code_class cl = TREE_CODE_CLASS (code);
4131 
4132   /* We make copies of most nodes.  */
4133   if (IS_EXPR_CODE_CLASS (cl)
4134       || code == TREE_LIST
4135       || code == TREE_VEC
4136       || code == TYPE_DECL
4137       || code == OMP_CLAUSE)
4138     {
4139       /* Because the chain gets clobbered when we make a copy, we save it
4140 	 here.  */
4141       tree chain = NULL_TREE, new_tree;
4142 
4143       chain = TREE_CHAIN (*tp);
4144 
4145       /* Copy the node.  */
4146       new_tree = copy_node (*tp);
4147 
4148       /* Propagate mudflap marked-ness.  */
4149       if (flag_mudflap && mf_marked_p (*tp))
4150         mf_mark (new_tree);
4151 
4152       *tp = new_tree;
4153 
4154       /* Now, restore the chain, if appropriate.  That will cause
4155 	 walk_tree to walk into the chain as well.  */
4156       if (code == PARM_DECL
4157 	  || code == TREE_LIST
4158 	  || code == OMP_CLAUSE)
4159 	TREE_CHAIN (*tp) = chain;
4160 
4161       /* For now, we don't update BLOCKs when we make copies.  So, we
4162 	 have to nullify all BIND_EXPRs.  */
4163       if (TREE_CODE (*tp) == BIND_EXPR)
4164 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4165     }
4166   else if (code == CONSTRUCTOR)
4167     {
4168       /* CONSTRUCTOR nodes need special handling because
4169          we need to duplicate the vector of elements.  */
4170       tree new_tree;
4171 
4172       new_tree = copy_node (*tp);
4173 
4174       /* Propagate mudflap marked-ness.  */
4175       if (flag_mudflap && mf_marked_p (*tp))
4176         mf_mark (new_tree);
4177 
4178       CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4179 					 CONSTRUCTOR_ELTS (*tp));
4180       *tp = new_tree;
4181     }
4182   else if (code == STATEMENT_LIST)
4183     /* We used to just abort on STATEMENT_LIST, but we can run into them
4184        with statement-expressions (c++/40975).  */
4185     copy_statement_list (tp);
4186   else if (TREE_CODE_CLASS (code) == tcc_type)
4187     *walk_subtrees = 0;
4188   else if (TREE_CODE_CLASS (code) == tcc_declaration)
4189     *walk_subtrees = 0;
4190   else if (TREE_CODE_CLASS (code) == tcc_constant)
4191     *walk_subtrees = 0;
4192   return NULL_TREE;
4193 }
4194 
4195 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
4196    information indicating to what new SAVE_EXPR this one should be mapped,
4197    use that one.  Otherwise, create a new node and enter it in ST.  FN is
4198    the function into which the copy will be placed.  */
4199 
4200 static void
4201 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4202 {
4203   struct pointer_map_t *st = (struct pointer_map_t *) st_;
4204   tree *n;
4205   tree t;
4206 
4207   /* See if we already encountered this SAVE_EXPR.  */
4208   n = (tree *) pointer_map_contains (st, *tp);
4209 
4210   /* If we didn't already remap this SAVE_EXPR, do so now.  */
4211   if (!n)
4212     {
4213       t = copy_node (*tp);
4214 
4215       /* Remember this SAVE_EXPR.  */
4216       *pointer_map_insert (st, *tp) = t;
4217       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
4218       *pointer_map_insert (st, t) = t;
4219     }
4220   else
4221     {
4222       /* We've already walked into this SAVE_EXPR; don't do it again.  */
4223       *walk_subtrees = 0;
4224       t = *n;
4225     }
4226 
4227   /* Replace this SAVE_EXPR with the copy.  */
4228   *tp = t;
4229 }
4230 
4231 /* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
4232    copies the declaration and enters it in the splay_tree in DATA (which is
4233    really an `copy_body_data *').  */
4234 
4235 static tree
4236 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4237 			void *data)
4238 {
4239   copy_body_data *id = (copy_body_data *) data;
4240 
4241   /* Don't walk into types.  */
4242   if (TYPE_P (*tp))
4243     *walk_subtrees = 0;
4244 
4245   else if (TREE_CODE (*tp) == LABEL_EXPR)
4246     {
4247       tree decl = TREE_OPERAND (*tp, 0);
4248 
4249       /* Copy the decl and remember the copy.  */
4250       insert_decl_map (id, decl, id->copy_decl (decl, id));
4251     }
4252 
4253   return NULL_TREE;
4254 }
4255 
4256 /* Perform any modifications to EXPR required when it is unsaved.  Does
4257    not recurse into EXPR's subtrees.  */
4258 
4259 static void
4260 unsave_expr_1 (tree expr)
4261 {
4262   switch (TREE_CODE (expr))
4263     {
4264     case TARGET_EXPR:
4265       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4266          It's OK for this to happen if it was part of a subtree that
4267          isn't immediately expanded, such as operand 2 of another
4268          TARGET_EXPR.  */
4269       if (TREE_OPERAND (expr, 1))
4270 	break;
4271 
4272       TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4273       TREE_OPERAND (expr, 3) = NULL_TREE;
4274       break;
4275 
4276     default:
4277       break;
4278     }
4279 }
4280 
4281 /* Called via walk_tree when an expression is unsaved.  Using the
4282    splay_tree pointed to by ST (which is really a `splay_tree'),
4283    remaps all local declarations to appropriate replacements.  */
4284 
4285 static tree
4286 unsave_r (tree *tp, int *walk_subtrees, void *data)
4287 {
4288   copy_body_data *id = (copy_body_data *) data;
4289   struct pointer_map_t *st = id->decl_map;
4290   tree *n;
4291 
4292   /* Only a local declaration (variable or label).  */
4293   if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4294       || TREE_CODE (*tp) == LABEL_DECL)
4295     {
4296       /* Lookup the declaration.  */
4297       n = (tree *) pointer_map_contains (st, *tp);
4298 
4299       /* If it's there, remap it.  */
4300       if (n)
4301 	*tp = *n;
4302     }
4303 
4304   else if (TREE_CODE (*tp) == STATEMENT_LIST)
4305     gcc_unreachable ();
4306   else if (TREE_CODE (*tp) == BIND_EXPR)
4307     copy_bind_expr (tp, walk_subtrees, id);
4308   else if (TREE_CODE (*tp) == SAVE_EXPR
4309 	   || TREE_CODE (*tp) == TARGET_EXPR)
4310     remap_save_expr (tp, st, walk_subtrees);
4311   else
4312     {
4313       copy_tree_r (tp, walk_subtrees, NULL);
4314 
4315       /* Do whatever unsaving is required.  */
4316       unsave_expr_1 (*tp);
4317     }
4318 
4319   /* Keep iterating.  */
4320   return NULL_TREE;
4321 }
4322 
4323 /* Copies everything in EXPR and replaces variables, labels
4324    and SAVE_EXPRs local to EXPR.  */
4325 
4326 tree
4327 unsave_expr_now (tree expr)
4328 {
4329   copy_body_data id;
4330 
4331   /* There's nothing to do for NULL_TREE.  */
4332   if (expr == 0)
4333     return expr;
4334 
4335   /* Set up ID.  */
4336   memset (&id, 0, sizeof (id));
4337   id.src_fn = current_function_decl;
4338   id.dst_fn = current_function_decl;
4339   id.decl_map = pointer_map_create ();
4340   id.debug_map = NULL;
4341 
4342   id.copy_decl = copy_decl_no_change;
4343   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4344   id.transform_new_cfg = false;
4345   id.transform_return_to_modify = false;
4346   id.transform_lang_insert_block = NULL;
4347 
4348   /* Walk the tree once to find local labels.  */
4349   walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4350 
4351   /* Walk the tree again, copying, remapping, and unsaving.  */
4352   walk_tree (&expr, unsave_r, &id, NULL);
4353 
4354   /* Clean up.  */
4355   pointer_map_destroy (id.decl_map);
4356   if (id.debug_map)
4357     pointer_map_destroy (id.debug_map);
4358 
4359   return expr;
4360 }
4361 
4362 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
4363    label, copies the declaration and enters it in the splay_tree in DATA (which
4364    is really a 'copy_body_data *'.  */
4365 
4366 static tree
4367 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4368 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
4369 		        struct walk_stmt_info *wi)
4370 {
4371   copy_body_data *id = (copy_body_data *) wi->info;
4372   gimple stmt = gsi_stmt (*gsip);
4373 
4374   if (gimple_code (stmt) == GIMPLE_LABEL)
4375     {
4376       tree decl = gimple_label_label (stmt);
4377 
4378       /* Copy the decl and remember the copy.  */
4379       insert_decl_map (id, decl, id->copy_decl (decl, id));
4380     }
4381 
4382   return NULL_TREE;
4383 }
4384 
4385 
4386 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4387    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4388    remaps all local declarations to appropriate replacements in gimple
4389    operands. */
4390 
4391 static tree
4392 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4393 {
4394   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4395   copy_body_data *id = (copy_body_data *) wi->info;
4396   struct pointer_map_t *st = id->decl_map;
4397   tree *n;
4398   tree expr = *tp;
4399 
4400   /* Only a local declaration (variable or label).  */
4401   if ((TREE_CODE (expr) == VAR_DECL
4402        && !TREE_STATIC (expr))
4403       || TREE_CODE (expr) == LABEL_DECL)
4404     {
4405       /* Lookup the declaration.  */
4406       n = (tree *) pointer_map_contains (st, expr);
4407 
4408       /* If it's there, remap it.  */
4409       if (n)
4410 	*tp = *n;
4411       *walk_subtrees = 0;
4412     }
4413   else if (TREE_CODE (expr) == STATEMENT_LIST
4414 	   || TREE_CODE (expr) == BIND_EXPR
4415 	   || TREE_CODE (expr) == SAVE_EXPR)
4416     gcc_unreachable ();
4417   else if (TREE_CODE (expr) == TARGET_EXPR)
4418     {
4419       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4420          It's OK for this to happen if it was part of a subtree that
4421          isn't immediately expanded, such as operand 2 of another
4422          TARGET_EXPR.  */
4423       if (!TREE_OPERAND (expr, 1))
4424 	{
4425 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4426 	  TREE_OPERAND (expr, 3) = NULL_TREE;
4427 	}
4428     }
4429 
4430   /* Keep iterating.  */
4431   return NULL_TREE;
4432 }
4433 
4434 
4435 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4436    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4437    remaps all local declarations to appropriate replacements in gimple
4438    statements. */
4439 
4440 static tree
4441 replace_locals_stmt (gimple_stmt_iterator *gsip,
4442 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
4443 		     struct walk_stmt_info *wi)
4444 {
4445   copy_body_data *id = (copy_body_data *) wi->info;
4446   gimple stmt = gsi_stmt (*gsip);
4447 
4448   if (gimple_code (stmt) == GIMPLE_BIND)
4449     {
4450       tree block = gimple_bind_block (stmt);
4451 
4452       if (block)
4453 	{
4454 	  remap_block (&block, id);
4455 	  gimple_bind_set_block (stmt, block);
4456 	}
4457 
4458       /* This will remap a lot of the same decls again, but this should be
4459 	 harmless.  */
4460       if (gimple_bind_vars (stmt))
4461 	gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4462     }
4463 
4464   /* Keep iterating.  */
4465   return NULL_TREE;
4466 }
4467 
4468 
4469 /* Copies everything in SEQ and replaces variables and labels local to
4470    current_function_decl.  */
4471 
4472 gimple_seq
4473 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4474 {
4475   copy_body_data id;
4476   struct walk_stmt_info wi;
4477   struct pointer_set_t *visited;
4478   gimple_seq copy;
4479 
4480   /* There's nothing to do for NULL_TREE.  */
4481   if (seq == NULL)
4482     return seq;
4483 
4484   /* Set up ID.  */
4485   memset (&id, 0, sizeof (id));
4486   id.src_fn = current_function_decl;
4487   id.dst_fn = current_function_decl;
4488   id.decl_map = pointer_map_create ();
4489   id.debug_map = NULL;
4490 
4491   id.copy_decl = copy_decl_no_change;
4492   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4493   id.transform_new_cfg = false;
4494   id.transform_return_to_modify = false;
4495   id.transform_lang_insert_block = NULL;
4496 
4497   /* Walk the tree once to find local labels.  */
4498   memset (&wi, 0, sizeof (wi));
4499   visited = pointer_set_create ();
4500   wi.info = &id;
4501   wi.pset = visited;
4502   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4503   pointer_set_destroy (visited);
4504 
4505   copy = gimple_seq_copy (seq);
4506 
4507   /* Walk the copy, remapping decls.  */
4508   memset (&wi, 0, sizeof (wi));
4509   wi.info = &id;
4510   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4511 
4512   /* Clean up.  */
4513   pointer_map_destroy (id.decl_map);
4514   if (id.debug_map)
4515     pointer_map_destroy (id.debug_map);
4516 
4517   return copy;
4518 }
4519 
4520 
4521 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
4522 
4523 static tree
4524 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4525 {
4526   if (*tp == data)
4527     return (tree) data;
4528   else
4529     return NULL;
4530 }
4531 
4532 bool
4533 debug_find_tree (tree top, tree search)
4534 {
4535   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4536 }
4537 
4538 
4539 /* Declare the variables created by the inliner.  Add all the variables in
4540    VARS to BIND_EXPR.  */
4541 
4542 static void
4543 declare_inline_vars (tree block, tree vars)
4544 {
4545   tree t;
4546   for (t = vars; t; t = TREE_CHAIN (t))
4547     {
4548       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4549       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4550       cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4551     }
4552 
4553   if (block)
4554     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4555 }
4556 
4557 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
4558    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
4559    VAR_DECL translation.  */
4560 
4561 static tree
4562 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4563 {
4564   /* Don't generate debug information for the copy if we wouldn't have
4565      generated it for the copy either.  */
4566   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4567   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4568 
4569   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4570      declaration inspired this copy.  */
4571   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4572 
4573   /* The new variable/label has no RTL, yet.  */
4574   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4575       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4576     SET_DECL_RTL (copy, NULL_RTX);
4577 
4578   /* These args would always appear unused, if not for this.  */
4579   TREE_USED (copy) = 1;
4580 
4581   /* Set the context for the new declaration.  */
4582   if (!DECL_CONTEXT (decl))
4583     /* Globals stay global.  */
4584     ;
4585   else if (DECL_CONTEXT (decl) != id->src_fn)
4586     /* Things that weren't in the scope of the function we're inlining
4587        from aren't in the scope we're inlining to, either.  */
4588     ;
4589   else if (TREE_STATIC (decl))
4590     /* Function-scoped static variables should stay in the original
4591        function.  */
4592     ;
4593   else
4594     /* Ordinary automatic local variables are now in the scope of the
4595        new function.  */
4596     DECL_CONTEXT (copy) = id->dst_fn;
4597 
4598   return copy;
4599 }
4600 
4601 static tree
4602 copy_decl_to_var (tree decl, copy_body_data *id)
4603 {
4604   tree copy, type;
4605 
4606   gcc_assert (TREE_CODE (decl) == PARM_DECL
4607 	      || TREE_CODE (decl) == RESULT_DECL);
4608 
4609   type = TREE_TYPE (decl);
4610 
4611   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4612 		     VAR_DECL, DECL_NAME (decl), type);
4613   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4614   TREE_READONLY (copy) = TREE_READONLY (decl);
4615   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4616   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4617 
4618   return copy_decl_for_dup_finish (id, decl, copy);
4619 }
4620 
4621 /* Like copy_decl_to_var, but create a return slot object instead of a
4622    pointer variable for return by invisible reference.  */
4623 
4624 static tree
4625 copy_result_decl_to_var (tree decl, copy_body_data *id)
4626 {
4627   tree copy, type;
4628 
4629   gcc_assert (TREE_CODE (decl) == PARM_DECL
4630 	      || TREE_CODE (decl) == RESULT_DECL);
4631 
4632   type = TREE_TYPE (decl);
4633   if (DECL_BY_REFERENCE (decl))
4634     type = TREE_TYPE (type);
4635 
4636   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4637 		     VAR_DECL, DECL_NAME (decl), type);
4638   TREE_READONLY (copy) = TREE_READONLY (decl);
4639   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4640   if (!DECL_BY_REFERENCE (decl))
4641     {
4642       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4643       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4644     }
4645 
4646   return copy_decl_for_dup_finish (id, decl, copy);
4647 }
4648 
4649 tree
4650 copy_decl_no_change (tree decl, copy_body_data *id)
4651 {
4652   tree copy;
4653 
4654   copy = copy_node (decl);
4655 
4656   /* The COPY is not abstract; it will be generated in DST_FN.  */
4657   DECL_ABSTRACT (copy) = 0;
4658   lang_hooks.dup_lang_specific_decl (copy);
4659 
4660   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4661      been taken; it's for internal bookkeeping in expand_goto_internal.  */
4662   if (TREE_CODE (copy) == LABEL_DECL)
4663     {
4664       TREE_ADDRESSABLE (copy) = 0;
4665       LABEL_DECL_UID (copy) = -1;
4666     }
4667 
4668   return copy_decl_for_dup_finish (id, decl, copy);
4669 }
4670 
4671 static tree
4672 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4673 {
4674   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4675     return copy_decl_to_var (decl, id);
4676   else
4677     return copy_decl_no_change (decl, id);
4678 }
4679 
4680 /* Return a copy of the function's argument tree.  */
4681 static tree
4682 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4683 			       bitmap args_to_skip, tree *vars)
4684 {
4685   tree arg, *parg;
4686   tree new_parm = NULL;
4687   int i = 0;
4688 
4689   parg = &new_parm;
4690 
4691   for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4692     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4693       {
4694         tree new_tree = remap_decl (arg, id);
4695         lang_hooks.dup_lang_specific_decl (new_tree);
4696         *parg = new_tree;
4697 	parg = &TREE_CHAIN (new_tree);
4698       }
4699     else if (!pointer_map_contains (id->decl_map, arg))
4700       {
4701 	/* Make an equivalent VAR_DECL.  If the argument was used
4702 	   as temporary variable later in function, the uses will be
4703 	   replaced by local variable.  */
4704 	tree var = copy_decl_to_var (arg, id);
4705 	get_var_ann (var);
4706 	add_referenced_var (var);
4707 	insert_decl_map (id, arg, var);
4708         /* Declare this new variable.  */
4709         TREE_CHAIN (var) = *vars;
4710         *vars = var;
4711       }
4712   return new_parm;
4713 }
4714 
4715 /* Return a copy of the function's static chain.  */
4716 static tree
4717 copy_static_chain (tree static_chain, copy_body_data * id)
4718 {
4719   tree *chain_copy, *pvar;
4720 
4721   chain_copy = &static_chain;
4722   for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4723     {
4724       tree new_tree = remap_decl (*pvar, id);
4725       lang_hooks.dup_lang_specific_decl (new_tree);
4726       TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4727       *pvar = new_tree;
4728     }
4729   return static_chain;
4730 }
4731 
4732 /* Return true if the function is allowed to be versioned.
4733    This is a guard for the versioning functionality.  */
4734 
4735 bool
4736 tree_versionable_function_p (tree fndecl)
4737 {
4738   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4739 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4740 }
4741 
4742 /* Delete all unreachable basic blocks and update callgraph.
4743    Doing so is somewhat nontrivial because we need to update all clones and
4744    remove inline function that become unreachable.  */
4745 
4746 static bool
4747 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4748 {
4749   bool changed = false;
4750   basic_block b, next_bb;
4751 
4752   find_unreachable_blocks ();
4753 
4754   /* Delete all unreachable basic blocks.  */
4755 
4756   for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4757     {
4758       next_bb = b->next_bb;
4759 
4760       if (!(b->flags & BB_REACHABLE))
4761 	{
4762           gimple_stmt_iterator bsi;
4763 
4764           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4765 	    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4766 	      {
4767 	        struct cgraph_edge *e;
4768 		struct cgraph_node *node;
4769 
4770 	        if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4771 		  {
4772 		    if (!e->inline_failed)
4773 		      cgraph_remove_node_and_inline_clones (e->callee);
4774 		    else
4775 	              cgraph_remove_edge (e);
4776 		  }
4777 		if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4778 		    && id->dst_node->clones)
4779      		  for (node = id->dst_node->clones; node != id->dst_node;)
4780 		    {
4781 	              if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4782 			{
4783 		          if (!e->inline_failed)
4784 		            cgraph_remove_node_and_inline_clones (e->callee);
4785 			  else
4786 	                    cgraph_remove_edge (e);
4787 			}
4788 
4789 		      if (node->clones)
4790 			node = node->clones;
4791 		      else if (node->next_sibling_clone)
4792 			node = node->next_sibling_clone;
4793 		      else
4794 			{
4795 			  while (node != id->dst_node && !node->next_sibling_clone)
4796 			    node = node->clone_of;
4797 			  if (node != id->dst_node)
4798 			    node = node->next_sibling_clone;
4799 			}
4800 		    }
4801 	      }
4802 	  delete_basic_block (b);
4803 	  changed = true;
4804 	}
4805     }
4806 
4807   if (changed)
4808     tidy_fallthru_edges ();
4809   return changed;
4810 }
4811 
4812 /* Update clone info after duplication.  */
4813 
4814 static void
4815 update_clone_info (copy_body_data * id)
4816 {
4817   struct cgraph_node *node;
4818   if (!id->dst_node->clones)
4819     return;
4820   for (node = id->dst_node->clones; node != id->dst_node;)
4821     {
4822       /* First update replace maps to match the new body.  */
4823       if (node->clone.tree_map)
4824         {
4825 	  unsigned int i;
4826           for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4827 	    {
4828 	      struct ipa_replace_map *replace_info;
4829 	      replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4830 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4831 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4832 	    }
4833 	}
4834       if (node->clones)
4835 	node = node->clones;
4836       else if (node->next_sibling_clone)
4837 	node = node->next_sibling_clone;
4838       else
4839 	{
4840 	  while (node != id->dst_node && !node->next_sibling_clone)
4841 	    node = node->clone_of;
4842 	  if (node != id->dst_node)
4843 	    node = node->next_sibling_clone;
4844 	}
4845     }
4846 }
4847 
4848 /* Create a copy of a function's tree.
4849    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4850    of the original function and the new copied function
4851    respectively.  In case we want to replace a DECL
4852    tree with another tree while duplicating the function's
4853    body, TREE_MAP represents the mapping between these
4854    trees. If UPDATE_CLONES is set, the call_stmt fields
4855    of edges of clones of the function will be updated.  */
4856 void
4857 tree_function_versioning (tree old_decl, tree new_decl,
4858 			  VEC(ipa_replace_map_p,gc)* tree_map,
4859 			  bool update_clones, bitmap args_to_skip)
4860 {
4861   struct cgraph_node *old_version_node;
4862   struct cgraph_node *new_version_node;
4863   copy_body_data id;
4864   tree p;
4865   unsigned i;
4866   struct ipa_replace_map *replace_info;
4867   basic_block old_entry_block, bb;
4868   VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4869 
4870   tree t_step;
4871   tree old_current_function_decl = current_function_decl;
4872   tree vars = NULL_TREE;
4873 
4874   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4875 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
4876   DECL_POSSIBLY_INLINED (old_decl) = 1;
4877 
4878   old_version_node = cgraph_node (old_decl);
4879   new_version_node = cgraph_node (new_decl);
4880 
4881   /* Output the inlining info for this abstract function, since it has been
4882      inlined.  If we don't do this now, we can lose the information about the
4883      variables in the function when the blocks get blown away as soon as we
4884      remove the cgraph node.  */
4885   (*debug_hooks->outlining_inline_function) (old_decl);
4886 
4887   DECL_ARTIFICIAL (new_decl) = 1;
4888   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4889   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4890 
4891   /* Prepare the data structures for the tree copy.  */
4892   memset (&id, 0, sizeof (id));
4893 
4894   /* Generate a new name for the new version. */
4895   id.statements_to_fold = pointer_set_create ();
4896 
4897   id.decl_map = pointer_map_create ();
4898   id.debug_map = NULL;
4899   id.src_fn = old_decl;
4900   id.dst_fn = new_decl;
4901   id.src_node = old_version_node;
4902   id.dst_node = new_version_node;
4903   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4904   if (id.src_node->ipa_transforms_to_apply)
4905     {
4906       VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4907       unsigned int i;
4908 
4909       id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4910 					               id.src_node->ipa_transforms_to_apply);
4911       for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4912         VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4913 		       VEC_index (ipa_opt_pass,
4914 		       		  old_transforms_to_apply,
4915 				  i));
4916     }
4917 
4918   id.copy_decl = copy_decl_no_change;
4919   id.transform_call_graph_edges
4920     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4921   id.transform_new_cfg = true;
4922   id.transform_return_to_modify = false;
4923   id.transform_lang_insert_block = NULL;
4924 
4925   current_function_decl = new_decl;
4926   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4927     (DECL_STRUCT_FUNCTION (old_decl));
4928   initialize_cfun (new_decl, old_decl,
4929 		   old_entry_block->count);
4930   push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4931 
4932   /* Copy the function's static chain.  */
4933   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4934   if (p)
4935     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4936       copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4937 			 &id);
4938 
4939   /* If there's a tree_map, prepare for substitution.  */
4940   if (tree_map)
4941     for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4942       {
4943 	gimple init;
4944 	replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4945 	if (replace_info->replace_p)
4946 	  {
4947 	    tree op = replace_info->new_tree;
4948 
4949 	    STRIP_NOPS (op);
4950 
4951 	    if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4952 	      op = TREE_OPERAND (op, 0);
4953 
4954 	    if (TREE_CODE (op) == ADDR_EXPR)
4955 	      {
4956 		op = TREE_OPERAND (op, 0);
4957 		while (handled_component_p (op))
4958 		  op = TREE_OPERAND (op, 0);
4959 		if (TREE_CODE (op) == VAR_DECL)
4960 		  add_referenced_var (op);
4961 	      }
4962 	    gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4963 	    init = setup_one_parameter (&id, replace_info->old_tree,
4964 	    			        replace_info->new_tree, id.src_fn,
4965 				        NULL,
4966 				        &vars);
4967 	    if (init)
4968 	      VEC_safe_push (gimple, heap, init_stmts, init);
4969 	  }
4970       }
4971   /* Copy the function's arguments.  */
4972   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4973     DECL_ARGUMENTS (new_decl) =
4974       copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4975       				     args_to_skip, &vars);
4976 
4977   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4978 
4979   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4980   number_blocks (id.dst_fn);
4981 
4982   declare_inline_vars (DECL_INITIAL (new_decl), vars);
4983 
4984   if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4985     /* Add local vars.  */
4986     for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4987 	 t_step; t_step = TREE_CHAIN (t_step))
4988       {
4989 	tree var = TREE_VALUE (t_step);
4990 	if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4991 	  cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4992 	else if (!can_be_nonlocal (var, &id))
4993 	  cfun->local_decls =
4994 	    tree_cons (NULL_TREE, remap_decl (var, &id),
4995 		       cfun->local_decls);
4996       }
4997 
4998   /* Copy the Function's body.  */
4999   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5000 	     ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
5001 
5002   if (DECL_RESULT (old_decl) != NULL_TREE)
5003     {
5004       tree *res_decl = &DECL_RESULT (old_decl);
5005       DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5006       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5007     }
5008 
5009   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5010   number_blocks (new_decl);
5011 
5012   /* We want to create the BB unconditionally, so that the addition of
5013      debug stmts doesn't affect BB count, which may in the end cause
5014      codegen differences.  */
5015   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5016   while (VEC_length (gimple, init_stmts))
5017     insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5018   update_clone_info (&id);
5019 
5020   /* Remap the nonlocal_goto_save_area, if any.  */
5021   if (cfun->nonlocal_goto_save_area)
5022     {
5023       struct walk_stmt_info wi;
5024 
5025       memset (&wi, 0, sizeof (wi));
5026       wi.info = &id;
5027       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5028     }
5029 
5030   /* Clean up.  */
5031   pointer_map_destroy (id.decl_map);
5032   if (id.debug_map)
5033     pointer_map_destroy (id.debug_map);
5034   free_dominance_info (CDI_DOMINATORS);
5035   free_dominance_info (CDI_POST_DOMINATORS);
5036 
5037   fold_marked_statements (0, id.statements_to_fold);
5038   pointer_set_destroy (id.statements_to_fold);
5039   fold_cond_expr_cond ();
5040   delete_unreachable_blocks_update_callgraph (&id);
5041   update_ssa (TODO_update_ssa);
5042   free_dominance_info (CDI_DOMINATORS);
5043   free_dominance_info (CDI_POST_DOMINATORS);
5044 
5045   gcc_assert (!id.debug_stmts);
5046   VEC_free (gimple, heap, init_stmts);
5047   pop_cfun ();
5048   current_function_decl = old_current_function_decl;
5049   gcc_assert (!current_function_decl
5050 	      || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5051   return;
5052 }
5053 
5054 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5055    the callee and return the inlined body on success.  */
5056 
5057 tree
5058 maybe_inline_call_in_expr (tree exp)
5059 {
5060   tree fn = get_callee_fndecl (exp);
5061 
5062   /* We can only try to inline "const" functions.  */
5063   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5064     {
5065       struct pointer_map_t *decl_map = pointer_map_create ();
5066       call_expr_arg_iterator iter;
5067       copy_body_data id;
5068       tree param, arg, t;
5069 
5070       /* Remap the parameters.  */
5071       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5072 	   param;
5073 	   param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5074 	*pointer_map_insert (decl_map, param) = arg;
5075 
5076       memset (&id, 0, sizeof (id));
5077       id.src_fn = fn;
5078       id.dst_fn = current_function_decl;
5079       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5080       id.decl_map = decl_map;
5081 
5082       id.copy_decl = copy_decl_no_change;
5083       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5084       id.transform_new_cfg = false;
5085       id.transform_return_to_modify = true;
5086       id.transform_lang_insert_block = false;
5087 
5088       /* Make sure not to unshare trees behind the front-end's back
5089 	 since front-end specific mechanisms may rely on sharing.  */
5090       id.regimplify = false;
5091       id.do_not_unshare = true;
5092 
5093       /* We're not inside any EH region.  */
5094       id.eh_lp_nr = 0;
5095 
5096       t = copy_tree_body (&id);
5097       pointer_map_destroy (decl_map);
5098 
5099       /* We can only return something suitable for use in a GENERIC
5100 	 expression tree.  */
5101       if (TREE_CODE (t) == MODIFY_EXPR)
5102 	return TREE_OPERAND (t, 1);
5103     }
5104 
5105    return NULL_TREE;
5106 }
5107 
5108 /* Duplicate a type, fields and all.  */
5109 
5110 tree
5111 build_duplicate_type (tree type)
5112 {
5113   struct copy_body_data id;
5114 
5115   memset (&id, 0, sizeof (id));
5116   id.src_fn = current_function_decl;
5117   id.dst_fn = current_function_decl;
5118   id.src_cfun = cfun;
5119   id.decl_map = pointer_map_create ();
5120   id.debug_map = NULL;
5121   id.copy_decl = copy_decl_no_change;
5122 
5123   type = remap_type_1 (type, &id);
5124 
5125   pointer_map_destroy (id.decl_map);
5126   if (id.debug_map)
5127     pointer_map_destroy (id.debug_map);
5128 
5129   TYPE_CANONICAL (type) = type;
5130 
5131   return type;
5132 }
5133 
5134 /* Return whether it is safe to inline a function because it used different
5135    target specific options or call site actual types mismatch parameter types.
5136    E is the call edge to be checked.  */
5137 bool
5138 tree_can_inline_p (struct cgraph_edge *e)
5139 {
5140 #if 0
5141   /* This causes a regression in SPEC in that it prevents a cold function from
5142      inlining a hot function.  Perhaps this should only apply to functions
5143      that the user declares hot/cold/optimize explicitly.  */
5144 
5145   /* Don't inline a function with a higher optimization level than the
5146      caller, or with different space constraints (hot/cold functions).  */
5147   tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5148   tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5149 
5150   if (caller_tree != callee_tree)
5151     {
5152       struct cl_optimization *caller_opt
5153 	= TREE_OPTIMIZATION ((caller_tree)
5154 			     ? caller_tree
5155 			     : optimization_default_node);
5156 
5157       struct cl_optimization *callee_opt
5158 	= TREE_OPTIMIZATION ((callee_tree)
5159 			     ? callee_tree
5160 			     : optimization_default_node);
5161 
5162       if ((caller_opt->optimize > callee_opt->optimize)
5163 	  || (caller_opt->optimize_size != callee_opt->optimize_size))
5164 	return false;
5165     }
5166 #endif
5167   tree caller, callee, lhs;
5168 
5169   caller = e->caller->decl;
5170   callee = e->callee->decl;
5171 
5172   /* We cannot inline a function that uses a different EH personality
5173      than the caller.  */
5174   if (DECL_FUNCTION_PERSONALITY (caller)
5175       && DECL_FUNCTION_PERSONALITY (callee)
5176       && (DECL_FUNCTION_PERSONALITY (caller)
5177 	  != DECL_FUNCTION_PERSONALITY (callee)))
5178     {
5179       e->inline_failed = CIF_UNSPECIFIED;
5180       gimple_call_set_cannot_inline (e->call_stmt, true);
5181       return false;
5182     }
5183 
5184   /* Allow the backend to decide if inlining is ok.  */
5185   if (!targetm.target_option.can_inline_p (caller, callee))
5186     {
5187       e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5188       gimple_call_set_cannot_inline (e->call_stmt, true);
5189       e->call_stmt_cannot_inline_p = true;
5190       return false;
5191     }
5192 
5193   /* Do not inline calls where we cannot triviall work around mismatches
5194      in argument or return types.  */
5195   if (e->call_stmt
5196       && ((DECL_RESULT (callee)
5197 	   && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5198 	   && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5199 	   && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5200 					  TREE_TYPE (lhs))
5201 	   && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5202 	  || !gimple_check_call_args (e->call_stmt)))
5203     {
5204       e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5205       gimple_call_set_cannot_inline (e->call_stmt, true);
5206       e->call_stmt_cannot_inline_p = true;
5207       return false;
5208     }
5209 
5210   return true;
5211 }
5212