xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-tailcall.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
11debfc3dSmrg /* Tail call optimization on trees.
2*8feb0f0bSmrg    Copyright (C) 2003-2020 Free Software Foundation, Inc.
31debfc3dSmrg 
41debfc3dSmrg This file is part of GCC.
51debfc3dSmrg 
61debfc3dSmrg GCC is free software; you can redistribute it and/or modify
71debfc3dSmrg it under the terms of the GNU General Public License as published by
81debfc3dSmrg the Free Software Foundation; either version 3, or (at your option)
91debfc3dSmrg any later version.
101debfc3dSmrg 
111debfc3dSmrg GCC is distributed in the hope that it will be useful,
121debfc3dSmrg but WITHOUT ANY WARRANTY; without even the implied warranty of
131debfc3dSmrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
141debfc3dSmrg GNU General Public License for more details.
151debfc3dSmrg 
161debfc3dSmrg You should have received a copy of the GNU General Public License
171debfc3dSmrg along with GCC; see the file COPYING3.  If not see
181debfc3dSmrg <http://www.gnu.org/licenses/>.  */
191debfc3dSmrg 
201debfc3dSmrg #include "config.h"
211debfc3dSmrg #include "system.h"
221debfc3dSmrg #include "coretypes.h"
231debfc3dSmrg #include "backend.h"
241debfc3dSmrg #include "rtl.h"
251debfc3dSmrg #include "tree.h"
261debfc3dSmrg #include "gimple.h"
271debfc3dSmrg #include "cfghooks.h"
281debfc3dSmrg #include "tree-pass.h"
291debfc3dSmrg #include "ssa.h"
301debfc3dSmrg #include "cgraph.h"
311debfc3dSmrg #include "gimple-pretty-print.h"
321debfc3dSmrg #include "fold-const.h"
331debfc3dSmrg #include "stor-layout.h"
341debfc3dSmrg #include "gimple-iterator.h"
351debfc3dSmrg #include "gimplify-me.h"
361debfc3dSmrg #include "tree-cfg.h"
371debfc3dSmrg #include "tree-into-ssa.h"
381debfc3dSmrg #include "tree-dfa.h"
391debfc3dSmrg #include "except.h"
40a2dc1f3fSmrg #include "tree-eh.h"
411debfc3dSmrg #include "dbgcnt.h"
421debfc3dSmrg #include "cfgloop.h"
431debfc3dSmrg #include "common/common-target.h"
441debfc3dSmrg #include "ipa-utils.h"
45*8feb0f0bSmrg #include "tree-ssa-live.h"
461debfc3dSmrg 
471debfc3dSmrg /* The file implements the tail recursion elimination.  It is also used to
481debfc3dSmrg    analyze the tail calls in general, passing the results to the rtl level
491debfc3dSmrg    where they are used for sibcall optimization.
501debfc3dSmrg 
511debfc3dSmrg    In addition to the standard tail recursion elimination, we handle the most
521debfc3dSmrg    trivial cases of making the call tail recursive by creating accumulators.
531debfc3dSmrg    For example the following function
541debfc3dSmrg 
551debfc3dSmrg    int sum (int n)
561debfc3dSmrg    {
571debfc3dSmrg      if (n > 0)
581debfc3dSmrg        return n + sum (n - 1);
591debfc3dSmrg      else
601debfc3dSmrg        return 0;
611debfc3dSmrg    }
621debfc3dSmrg 
631debfc3dSmrg    is transformed into
641debfc3dSmrg 
651debfc3dSmrg    int sum (int n)
661debfc3dSmrg    {
671debfc3dSmrg      int acc = 0;
681debfc3dSmrg 
691debfc3dSmrg      while (n > 0)
701debfc3dSmrg        acc += n--;
711debfc3dSmrg 
721debfc3dSmrg      return acc;
731debfc3dSmrg    }
741debfc3dSmrg 
751debfc3dSmrg    To do this, we maintain two accumulators (a_acc and m_acc) that indicate
761debfc3dSmrg    when we reach the return x statement, we should return a_acc + x * m_acc
771debfc3dSmrg    instead.  They are initially initialized to 0 and 1, respectively,
781debfc3dSmrg    so the semantics of the function is obviously preserved.  If we are
791debfc3dSmrg    guaranteed that the value of the accumulator never change, we
801debfc3dSmrg    omit the accumulator.
811debfc3dSmrg 
821debfc3dSmrg    There are three cases how the function may exit.  The first one is
831debfc3dSmrg    handled in adjust_return_value, the other two in adjust_accumulator_values
841debfc3dSmrg    (the second case is actually a special case of the third one and we
851debfc3dSmrg    present it separately just for clarity):
861debfc3dSmrg 
871debfc3dSmrg    1) Just return x, where x is not in any of the remaining special shapes.
881debfc3dSmrg       We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
891debfc3dSmrg 
901debfc3dSmrg    2) return f (...), where f is the current function, is rewritten in a
911debfc3dSmrg       classical tail-recursion elimination way, into assignment of arguments
921debfc3dSmrg       and jump to the start of the function.  Values of the accumulators
931debfc3dSmrg       are unchanged.
941debfc3dSmrg 
951debfc3dSmrg    3) return a + m * f(...), where a and m do not depend on call to f.
961debfc3dSmrg       To preserve the semantics described before we want this to be rewritten
971debfc3dSmrg       in such a way that we finally return
981debfc3dSmrg 
991debfc3dSmrg       a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
1001debfc3dSmrg 
1011debfc3dSmrg       I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
1021debfc3dSmrg       eliminate the tail call to f.  Special cases when the value is just
1031debfc3dSmrg       added or just multiplied are obtained by setting a = 0 or m = 1.
1041debfc3dSmrg 
1051debfc3dSmrg    TODO -- it is possible to do similar tricks for other operations.  */
1061debfc3dSmrg 
1071debfc3dSmrg /* A structure that describes the tailcall.  */
1081debfc3dSmrg 
1091debfc3dSmrg struct tailcall
1101debfc3dSmrg {
1111debfc3dSmrg   /* The iterator pointing to the call statement.  */
1121debfc3dSmrg   gimple_stmt_iterator call_gsi;
1131debfc3dSmrg 
1141debfc3dSmrg   /* True if it is a call to the current function.  */
1151debfc3dSmrg   bool tail_recursion;
1161debfc3dSmrg 
1171debfc3dSmrg   /* The return value of the caller is mult * f + add, where f is the return
1181debfc3dSmrg      value of the call.  */
1191debfc3dSmrg   tree mult, add;
1201debfc3dSmrg 
1211debfc3dSmrg   /* Next tailcall in the chain.  */
1221debfc3dSmrg   struct tailcall *next;
1231debfc3dSmrg };
1241debfc3dSmrg 
1251debfc3dSmrg /* The variables holding the value of multiplicative and additive
1261debfc3dSmrg    accumulator.  */
1271debfc3dSmrg static tree m_acc, a_acc;
1281debfc3dSmrg 
129*8feb0f0bSmrg /* Bitmap with a bit for each function parameter which is set to true if we
130*8feb0f0bSmrg    have to copy the parameter for conversion of tail-recursive calls.  */
131*8feb0f0bSmrg 
132*8feb0f0bSmrg static bitmap tailr_arg_needs_copy;
133*8feb0f0bSmrg 
1341debfc3dSmrg static bool optimize_tail_call (struct tailcall *, bool);
1351debfc3dSmrg static void eliminate_tail_call (struct tailcall *);
1361debfc3dSmrg 
1371debfc3dSmrg /* Returns false when the function is not suitable for tail call optimization
1381debfc3dSmrg    from some reason (e.g. if it takes variable number of arguments).  */
1391debfc3dSmrg 
1401debfc3dSmrg static bool
suitable_for_tail_opt_p(void)1411debfc3dSmrg suitable_for_tail_opt_p (void)
1421debfc3dSmrg {
1431debfc3dSmrg   if (cfun->stdarg)
1441debfc3dSmrg     return false;
1451debfc3dSmrg 
1461debfc3dSmrg   return true;
1471debfc3dSmrg }
148*8feb0f0bSmrg 
1491debfc3dSmrg /* Returns false when the function is not suitable for tail call optimization
1501debfc3dSmrg    for some reason (e.g. if it takes variable number of arguments).
1511debfc3dSmrg    This test must pass in addition to suitable_for_tail_opt_p in order to make
1521debfc3dSmrg    tail call discovery happen.  */
1531debfc3dSmrg 
1541debfc3dSmrg static bool
suitable_for_tail_call_opt_p(void)1551debfc3dSmrg suitable_for_tail_call_opt_p (void)
1561debfc3dSmrg {
1571debfc3dSmrg   tree param;
1581debfc3dSmrg 
1591debfc3dSmrg   /* alloca (until we have stack slot life analysis) inhibits
1601debfc3dSmrg      sibling call optimizations, but not tail recursion.  */
1611debfc3dSmrg   if (cfun->calls_alloca)
1621debfc3dSmrg     return false;
1631debfc3dSmrg 
1641debfc3dSmrg   /* If we are using sjlj exceptions, we may need to add a call to
1651debfc3dSmrg      _Unwind_SjLj_Unregister at exit of the function.  Which means
1661debfc3dSmrg      that we cannot do any sibcall transformations.  */
1671debfc3dSmrg   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1681debfc3dSmrg       && current_function_has_exception_handlers ())
1691debfc3dSmrg     return false;
1701debfc3dSmrg 
1711debfc3dSmrg   /* Any function that calls setjmp might have longjmp called from
1721debfc3dSmrg      any called function.  ??? We really should represent this
1731debfc3dSmrg      properly in the CFG so that this needn't be special cased.  */
1741debfc3dSmrg   if (cfun->calls_setjmp)
1751debfc3dSmrg     return false;
1761debfc3dSmrg 
177*8feb0f0bSmrg   /* Various targets don't handle tail calls correctly in functions
178*8feb0f0bSmrg      that call __builtin_eh_return.  */
179*8feb0f0bSmrg   if (cfun->calls_eh_return)
180*8feb0f0bSmrg     return false;
181*8feb0f0bSmrg 
1821debfc3dSmrg   /* ??? It is OK if the argument of a function is taken in some cases,
1831debfc3dSmrg      but not in all cases.  See PR15387 and PR19616.  Revisit for 4.1.  */
1841debfc3dSmrg   for (param = DECL_ARGUMENTS (current_function_decl);
1851debfc3dSmrg        param;
1861debfc3dSmrg        param = DECL_CHAIN (param))
1871debfc3dSmrg     if (TREE_ADDRESSABLE (param))
1881debfc3dSmrg       return false;
1891debfc3dSmrg 
1901debfc3dSmrg   return true;
1911debfc3dSmrg }
1921debfc3dSmrg 
1931debfc3dSmrg /* Checks whether the expression EXPR in stmt AT is independent of the
1941debfc3dSmrg    statement pointed to by GSI (in a sense that we already know EXPR's value
1951debfc3dSmrg    at GSI).  We use the fact that we are only called from the chain of
1961debfc3dSmrg    basic blocks that have only single successor.  Returns the expression
1971debfc3dSmrg    containing the value of EXPR at GSI.  */
1981debfc3dSmrg 
1991debfc3dSmrg static tree
independent_of_stmt_p(tree expr,gimple * at,gimple_stmt_iterator gsi,bitmap to_move)200a2dc1f3fSmrg independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi,
201a2dc1f3fSmrg 		       bitmap to_move)
2021debfc3dSmrg {
2031debfc3dSmrg   basic_block bb, call_bb, at_bb;
2041debfc3dSmrg   edge e;
2051debfc3dSmrg   edge_iterator ei;
2061debfc3dSmrg 
2071debfc3dSmrg   if (is_gimple_min_invariant (expr))
2081debfc3dSmrg     return expr;
2091debfc3dSmrg 
2101debfc3dSmrg   if (TREE_CODE (expr) != SSA_NAME)
2111debfc3dSmrg     return NULL_TREE;
2121debfc3dSmrg 
213a2dc1f3fSmrg   if (bitmap_bit_p (to_move, SSA_NAME_VERSION (expr)))
214a2dc1f3fSmrg     return expr;
215a2dc1f3fSmrg 
2161debfc3dSmrg   /* Mark the blocks in the chain leading to the end.  */
2171debfc3dSmrg   at_bb = gimple_bb (at);
2181debfc3dSmrg   call_bb = gimple_bb (gsi_stmt (gsi));
2191debfc3dSmrg   for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
2201debfc3dSmrg     bb->aux = &bb->aux;
2211debfc3dSmrg   bb->aux = &bb->aux;
2221debfc3dSmrg 
2231debfc3dSmrg   while (1)
2241debfc3dSmrg     {
2251debfc3dSmrg       at = SSA_NAME_DEF_STMT (expr);
2261debfc3dSmrg       bb = gimple_bb (at);
2271debfc3dSmrg 
2281debfc3dSmrg       /* The default definition or defined before the chain.  */
2291debfc3dSmrg       if (!bb || !bb->aux)
2301debfc3dSmrg 	break;
2311debfc3dSmrg 
2321debfc3dSmrg       if (bb == call_bb)
2331debfc3dSmrg 	{
2341debfc3dSmrg 	  for (; !gsi_end_p (gsi); gsi_next (&gsi))
2351debfc3dSmrg 	    if (gsi_stmt (gsi) == at)
2361debfc3dSmrg 	      break;
2371debfc3dSmrg 
2381debfc3dSmrg 	  if (!gsi_end_p (gsi))
2391debfc3dSmrg 	    expr = NULL_TREE;
2401debfc3dSmrg 	  break;
2411debfc3dSmrg 	}
2421debfc3dSmrg 
2431debfc3dSmrg       if (gimple_code (at) != GIMPLE_PHI)
2441debfc3dSmrg 	{
2451debfc3dSmrg 	  expr = NULL_TREE;
2461debfc3dSmrg 	  break;
2471debfc3dSmrg 	}
2481debfc3dSmrg 
2491debfc3dSmrg       FOR_EACH_EDGE (e, ei, bb->preds)
2501debfc3dSmrg 	if (e->src->aux)
2511debfc3dSmrg 	  break;
2521debfc3dSmrg       gcc_assert (e);
2531debfc3dSmrg 
2541debfc3dSmrg       expr = PHI_ARG_DEF_FROM_EDGE (at, e);
2551debfc3dSmrg       if (TREE_CODE (expr) != SSA_NAME)
2561debfc3dSmrg 	{
2571debfc3dSmrg 	  /* The value is a constant.  */
2581debfc3dSmrg 	  break;
2591debfc3dSmrg 	}
2601debfc3dSmrg     }
2611debfc3dSmrg 
2621debfc3dSmrg   /* Unmark the blocks.  */
2631debfc3dSmrg   for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
2641debfc3dSmrg     bb->aux = NULL;
2651debfc3dSmrg   bb->aux = NULL;
2661debfc3dSmrg 
2671debfc3dSmrg   return expr;
2681debfc3dSmrg }
2691debfc3dSmrg 
270a2dc1f3fSmrg enum par { FAIL, OK, TRY_MOVE };
271a2dc1f3fSmrg 
2721debfc3dSmrg /* Simulates the effect of an assignment STMT on the return value of the tail
2731debfc3dSmrg    recursive CALL passed in ASS_VAR.  M and A are the multiplicative and the
2741debfc3dSmrg    additive factor for the real return value.  */
2751debfc3dSmrg 
276a2dc1f3fSmrg static par
process_assignment(gassign * stmt,gimple_stmt_iterator call,tree * m,tree * a,tree * ass_var,bitmap to_move)277a2dc1f3fSmrg process_assignment (gassign *stmt,
278a2dc1f3fSmrg 		    gimple_stmt_iterator call, tree *m,
279a2dc1f3fSmrg 		    tree *a, tree *ass_var, bitmap to_move)
2801debfc3dSmrg {
2811debfc3dSmrg   tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
2821debfc3dSmrg   tree dest = gimple_assign_lhs (stmt);
2831debfc3dSmrg   enum tree_code code = gimple_assign_rhs_code (stmt);
2841debfc3dSmrg   enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2851debfc3dSmrg   tree src_var = gimple_assign_rhs1 (stmt);
2861debfc3dSmrg 
2871debfc3dSmrg   /* See if this is a simple copy operation of an SSA name to the function
2881debfc3dSmrg      result.  In that case we may have a simple tail call.  Ignore type
2891debfc3dSmrg      conversions that can never produce extra code between the function
2901debfc3dSmrg      call and the function return.  */
2911debfc3dSmrg   if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
2921debfc3dSmrg       && src_var == *ass_var)
2931debfc3dSmrg     {
2941debfc3dSmrg       /* Reject a tailcall if the type conversion might need
2951debfc3dSmrg 	 additional code.  */
2961debfc3dSmrg       if (gimple_assign_cast_p (stmt))
2971debfc3dSmrg 	{
2981debfc3dSmrg 	  if (TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
299a2dc1f3fSmrg 	    return FAIL;
3001debfc3dSmrg 
3011debfc3dSmrg 	  /* Even if the type modes are the same, if the precision of the
3021debfc3dSmrg 	     type is smaller than mode's precision,
3031debfc3dSmrg 	     reduce_to_bit_field_precision would generate additional code.  */
3041debfc3dSmrg 	  if (INTEGRAL_TYPE_P (TREE_TYPE (dest))
305a2dc1f3fSmrg 	      && !type_has_mode_precision_p (TREE_TYPE (dest)))
306a2dc1f3fSmrg 	    return FAIL;
3071debfc3dSmrg 	}
3081debfc3dSmrg 
3091debfc3dSmrg       *ass_var = dest;
310a2dc1f3fSmrg       return OK;
3111debfc3dSmrg     }
3121debfc3dSmrg 
3131debfc3dSmrg   switch (rhs_class)
3141debfc3dSmrg     {
3151debfc3dSmrg     case GIMPLE_BINARY_RHS:
3161debfc3dSmrg       op1 = gimple_assign_rhs2 (stmt);
3171debfc3dSmrg 
3181debfc3dSmrg       /* Fall through.  */
3191debfc3dSmrg 
3201debfc3dSmrg     case GIMPLE_UNARY_RHS:
3211debfc3dSmrg       op0 = gimple_assign_rhs1 (stmt);
3221debfc3dSmrg       break;
3231debfc3dSmrg 
3241debfc3dSmrg     default:
325a2dc1f3fSmrg       return FAIL;
3261debfc3dSmrg     }
3271debfc3dSmrg 
3281debfc3dSmrg   /* Accumulator optimizations will reverse the order of operations.
3291debfc3dSmrg      We can only do that for floating-point types if we're assuming
3301debfc3dSmrg      that addition and multiplication are associative.  */
3311debfc3dSmrg   if (!flag_associative_math)
3321debfc3dSmrg     if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
333a2dc1f3fSmrg       return FAIL;
3341debfc3dSmrg 
335a2dc1f3fSmrg   if (rhs_class == GIMPLE_UNARY_RHS
336a2dc1f3fSmrg       && op0 == *ass_var)
3371debfc3dSmrg     ;
3381debfc3dSmrg   else if (op0 == *ass_var
339a2dc1f3fSmrg 	   && (non_ass_var = independent_of_stmt_p (op1, stmt, call,
340a2dc1f3fSmrg 						    to_move)))
3411debfc3dSmrg     ;
342*8feb0f0bSmrg   else if (*ass_var
343*8feb0f0bSmrg 	   && op1 == *ass_var
344a2dc1f3fSmrg 	   && (non_ass_var = independent_of_stmt_p (op0, stmt, call,
345a2dc1f3fSmrg 						    to_move)))
3461debfc3dSmrg     ;
3471debfc3dSmrg   else
348a2dc1f3fSmrg     return TRY_MOVE;
3491debfc3dSmrg 
3501debfc3dSmrg   switch (code)
3511debfc3dSmrg     {
3521debfc3dSmrg     case PLUS_EXPR:
3531debfc3dSmrg       *a = non_ass_var;
3541debfc3dSmrg       *ass_var = dest;
355a2dc1f3fSmrg       return OK;
3561debfc3dSmrg 
3571debfc3dSmrg     case POINTER_PLUS_EXPR:
3581debfc3dSmrg       if (op0 != *ass_var)
359a2dc1f3fSmrg 	return FAIL;
3601debfc3dSmrg       *a = non_ass_var;
3611debfc3dSmrg       *ass_var = dest;
362a2dc1f3fSmrg       return OK;
3631debfc3dSmrg 
3641debfc3dSmrg     case MULT_EXPR:
3651debfc3dSmrg       *m = non_ass_var;
3661debfc3dSmrg       *ass_var = dest;
367a2dc1f3fSmrg       return OK;
3681debfc3dSmrg 
3691debfc3dSmrg     case NEGATE_EXPR:
3701debfc3dSmrg       *m = build_minus_one_cst (TREE_TYPE (op0));
3711debfc3dSmrg       *ass_var = dest;
372a2dc1f3fSmrg       return OK;
3731debfc3dSmrg 
3741debfc3dSmrg     case MINUS_EXPR:
3751debfc3dSmrg       if (*ass_var == op0)
3761debfc3dSmrg         *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
3771debfc3dSmrg       else
3781debfc3dSmrg         {
3791debfc3dSmrg 	  *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
3801debfc3dSmrg           *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
3811debfc3dSmrg         }
3821debfc3dSmrg 
3831debfc3dSmrg       *ass_var = dest;
384a2dc1f3fSmrg       return OK;
3851debfc3dSmrg 
3861debfc3dSmrg     default:
387a2dc1f3fSmrg       return FAIL;
3881debfc3dSmrg     }
3891debfc3dSmrg }
3901debfc3dSmrg 
3911debfc3dSmrg /* Propagate VAR through phis on edge E.  */
3921debfc3dSmrg 
3931debfc3dSmrg static tree
propagate_through_phis(tree var,edge e)3941debfc3dSmrg propagate_through_phis (tree var, edge e)
3951debfc3dSmrg {
3961debfc3dSmrg   basic_block dest = e->dest;
3971debfc3dSmrg   gphi_iterator gsi;
3981debfc3dSmrg 
3991debfc3dSmrg   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
4001debfc3dSmrg     {
4011debfc3dSmrg       gphi *phi = gsi.phi ();
4021debfc3dSmrg       if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
4031debfc3dSmrg         return PHI_RESULT (phi);
4041debfc3dSmrg     }
4051debfc3dSmrg   return var;
4061debfc3dSmrg }
4071debfc3dSmrg 
408*8feb0f0bSmrg /* Argument for compute_live_vars/live_vars_at_stmt and what compute_live_vars
409*8feb0f0bSmrg    returns.  Computed lazily, but just once for the function.  */
410*8feb0f0bSmrg static live_vars_map *live_vars;
411*8feb0f0bSmrg static vec<bitmap_head> live_vars_vec;
412*8feb0f0bSmrg 
4131debfc3dSmrg /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
4141debfc3dSmrg    added to the start of RET.  */
4151debfc3dSmrg 
4161debfc3dSmrg static void
find_tail_calls(basic_block bb,struct tailcall ** ret)4171debfc3dSmrg find_tail_calls (basic_block bb, struct tailcall **ret)
4181debfc3dSmrg {
4191debfc3dSmrg   tree ass_var = NULL_TREE, ret_var, func, param;
4201debfc3dSmrg   gimple *stmt;
4211debfc3dSmrg   gcall *call = NULL;
4221debfc3dSmrg   gimple_stmt_iterator gsi, agsi;
4231debfc3dSmrg   bool tail_recursion;
4241debfc3dSmrg   struct tailcall *nw;
4251debfc3dSmrg   edge e;
4261debfc3dSmrg   tree m, a;
4271debfc3dSmrg   basic_block abb;
4281debfc3dSmrg   size_t idx;
4291debfc3dSmrg   tree var;
4301debfc3dSmrg 
4311debfc3dSmrg   if (!single_succ_p (bb))
4321debfc3dSmrg     return;
4331debfc3dSmrg 
4341debfc3dSmrg   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
4351debfc3dSmrg     {
4361debfc3dSmrg       stmt = gsi_stmt (gsi);
4371debfc3dSmrg 
4381debfc3dSmrg       /* Ignore labels, returns, nops, clobbers and debug stmts.  */
4391debfc3dSmrg       if (gimple_code (stmt) == GIMPLE_LABEL
4401debfc3dSmrg 	  || gimple_code (stmt) == GIMPLE_RETURN
4411debfc3dSmrg 	  || gimple_code (stmt) == GIMPLE_NOP
442a2dc1f3fSmrg 	  || gimple_code (stmt) == GIMPLE_PREDICT
4431debfc3dSmrg 	  || gimple_clobber_p (stmt)
4441debfc3dSmrg 	  || is_gimple_debug (stmt))
4451debfc3dSmrg 	continue;
4461debfc3dSmrg 
4471debfc3dSmrg       /* Check for a call.  */
4481debfc3dSmrg       if (is_gimple_call (stmt))
4491debfc3dSmrg 	{
4501debfc3dSmrg 	  call = as_a <gcall *> (stmt);
4511debfc3dSmrg 	  ass_var = gimple_call_lhs (call);
4521debfc3dSmrg 	  break;
4531debfc3dSmrg 	}
4541debfc3dSmrg 
4551debfc3dSmrg       /* Allow simple copies between local variables, even if they're
4561debfc3dSmrg 	 aggregates.  */
4571debfc3dSmrg       if (is_gimple_assign (stmt)
4581debfc3dSmrg 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), cfun->decl)
4591debfc3dSmrg 	  && auto_var_in_fn_p (gimple_assign_rhs1 (stmt), cfun->decl))
4601debfc3dSmrg 	continue;
4611debfc3dSmrg 
4621debfc3dSmrg       /* If the statement references memory or volatile operands, fail.  */
4631debfc3dSmrg       if (gimple_references_memory_p (stmt)
4641debfc3dSmrg 	  || gimple_has_volatile_ops (stmt))
4651debfc3dSmrg 	return;
4661debfc3dSmrg     }
4671debfc3dSmrg 
4681debfc3dSmrg   if (gsi_end_p (gsi))
4691debfc3dSmrg     {
4701debfc3dSmrg       edge_iterator ei;
4711debfc3dSmrg       /* Recurse to the predecessors.  */
4721debfc3dSmrg       FOR_EACH_EDGE (e, ei, bb->preds)
4731debfc3dSmrg 	find_tail_calls (e->src, ret);
4741debfc3dSmrg 
4751debfc3dSmrg       return;
4761debfc3dSmrg     }
4771debfc3dSmrg 
4781debfc3dSmrg   /* If the LHS of our call is not just a simple register or local
4791debfc3dSmrg      variable, we can't transform this into a tail or sibling call.
4801debfc3dSmrg      This situation happens, in (e.g.) "*p = foo()" where foo returns a
4811debfc3dSmrg      struct.  In this case we won't have a temporary here, but we need
4821debfc3dSmrg      to carry out the side effect anyway, so tailcall is impossible.
4831debfc3dSmrg 
4841debfc3dSmrg      ??? In some situations (when the struct is returned in memory via
4851debfc3dSmrg      invisible argument) we could deal with this, e.g. by passing 'p'
4861debfc3dSmrg      itself as that argument to foo, but it's too early to do this here,
4871debfc3dSmrg      and expand_call() will not handle it anyway.  If it ever can, then
4881debfc3dSmrg      we need to revisit this here, to allow that situation.  */
4891debfc3dSmrg   if (ass_var
4901debfc3dSmrg       && !is_gimple_reg (ass_var)
4911debfc3dSmrg       && !auto_var_in_fn_p (ass_var, cfun->decl))
4921debfc3dSmrg     return;
4931debfc3dSmrg 
494a2dc1f3fSmrg   /* If the call might throw an exception that wouldn't propagate out of
495a2dc1f3fSmrg      cfun, we can't transform to a tail or sibling call (82081).  */
496c0a68be4Smrg   if (stmt_could_throw_p (cfun, stmt)
497c0a68be4Smrg       && !stmt_can_throw_external (cfun, stmt))
498a2dc1f3fSmrg     return;
499a2dc1f3fSmrg 
500a2dc1f3fSmrg   /* If the function returns a value, then at present, the tail call
501a2dc1f3fSmrg      must return the same type of value.  There is conceptually a copy
502a2dc1f3fSmrg      between the object returned by the tail call candidate and the
503a2dc1f3fSmrg      object returned by CFUN itself.
504a2dc1f3fSmrg 
505a2dc1f3fSmrg      This means that if we have:
506a2dc1f3fSmrg 
507a2dc1f3fSmrg 	 lhs = f (&<retval>);    // f reads from <retval>
508a2dc1f3fSmrg 				 // (lhs is usually also <retval>)
509a2dc1f3fSmrg 
510a2dc1f3fSmrg      there is a copy between the temporary object returned by f and lhs,
511a2dc1f3fSmrg      meaning that any use of <retval> in f occurs before the assignment
512a2dc1f3fSmrg      to lhs begins.  Thus the <retval> that is live on entry to the call
513a2dc1f3fSmrg      to f is really an independent local variable V that happens to be
514a2dc1f3fSmrg      stored in the RESULT_DECL rather than a local VAR_DECL.
515a2dc1f3fSmrg 
516a2dc1f3fSmrg      Turning this into a tail call would remove the copy and make the
517a2dc1f3fSmrg      lifetimes of the return value and V overlap.  The same applies to
518a2dc1f3fSmrg      tail recursion, since if f can read from <retval>, we have to assume
519a2dc1f3fSmrg      that CFUN might already have written to <retval> before the call.
520a2dc1f3fSmrg 
521a2dc1f3fSmrg      The problem doesn't apply when <retval> is passed by value, but that
522a2dc1f3fSmrg      isn't a case we handle anyway.  */
523a2dc1f3fSmrg   tree result_decl = DECL_RESULT (cfun->decl);
524a2dc1f3fSmrg   if (result_decl
525a2dc1f3fSmrg       && may_be_aliased (result_decl)
526a2dc1f3fSmrg       && ref_maybe_used_by_stmt_p (call, result_decl))
527a2dc1f3fSmrg     return;
528a2dc1f3fSmrg 
5291debfc3dSmrg   /* We found the call, check whether it is suitable.  */
5301debfc3dSmrg   tail_recursion = false;
5311debfc3dSmrg   func = gimple_call_fndecl (call);
5321debfc3dSmrg   if (func
533c0a68be4Smrg       && !fndecl_built_in_p (func)
5341debfc3dSmrg       && recursive_call_p (current_function_decl, func))
5351debfc3dSmrg     {
5361debfc3dSmrg       tree arg;
5371debfc3dSmrg 
5381debfc3dSmrg       for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
5391debfc3dSmrg 	   param && idx < gimple_call_num_args (call);
5401debfc3dSmrg 	   param = DECL_CHAIN (param), idx ++)
5411debfc3dSmrg 	{
5421debfc3dSmrg 	  arg = gimple_call_arg (call, idx);
5431debfc3dSmrg 	  if (param != arg)
5441debfc3dSmrg 	    {
5451debfc3dSmrg 	      /* Make sure there are no problems with copying.  The parameter
5461debfc3dSmrg 	         have a copyable type and the two arguments must have reasonably
5471debfc3dSmrg 	         equivalent types.  The latter requirement could be relaxed if
5481debfc3dSmrg 	         we emitted a suitable type conversion statement.  */
5491debfc3dSmrg 	      if (!is_gimple_reg_type (TREE_TYPE (param))
5501debfc3dSmrg 		  || !useless_type_conversion_p (TREE_TYPE (param),
5511debfc3dSmrg 					         TREE_TYPE (arg)))
5521debfc3dSmrg 		break;
5531debfc3dSmrg 
5541debfc3dSmrg 	      /* The parameter should be a real operand, so that phi node
5551debfc3dSmrg 		 created for it at the start of the function has the meaning
5561debfc3dSmrg 		 of copying the value.  This test implies is_gimple_reg_type
5571debfc3dSmrg 		 from the previous condition, however this one could be
5581debfc3dSmrg 		 relaxed by being more careful with copying the new value
5591debfc3dSmrg 		 of the parameter (emitting appropriate GIMPLE_ASSIGN and
5601debfc3dSmrg 		 updating the virtual operands).  */
5611debfc3dSmrg 	      if (!is_gimple_reg (param))
5621debfc3dSmrg 		break;
5631debfc3dSmrg 	    }
5641debfc3dSmrg 	}
5651debfc3dSmrg       if (idx == gimple_call_num_args (call) && !param)
5661debfc3dSmrg 	tail_recursion = true;
5671debfc3dSmrg     }
5681debfc3dSmrg 
569*8feb0f0bSmrg   /* Compute live vars if not computed yet.  */
570*8feb0f0bSmrg   if (live_vars == NULL)
571*8feb0f0bSmrg     {
572*8feb0f0bSmrg       unsigned int cnt = 0;
573*8feb0f0bSmrg       FOR_EACH_LOCAL_DECL (cfun, idx, var)
574*8feb0f0bSmrg 	if (VAR_P (var)
575*8feb0f0bSmrg 	    && auto_var_in_fn_p (var, cfun->decl)
576*8feb0f0bSmrg 	    && may_be_aliased (var))
577*8feb0f0bSmrg 	  {
578*8feb0f0bSmrg 	    if (live_vars == NULL)
579*8feb0f0bSmrg 	      live_vars = new live_vars_map;
580*8feb0f0bSmrg 	    live_vars->put (DECL_UID (var), cnt++);
581*8feb0f0bSmrg 	  }
582*8feb0f0bSmrg       if (live_vars)
583*8feb0f0bSmrg 	live_vars_vec = compute_live_vars (cfun, live_vars);
584*8feb0f0bSmrg     }
585*8feb0f0bSmrg 
586*8feb0f0bSmrg   /* Determine a bitmap of variables which are still in scope after the
587*8feb0f0bSmrg      call.  */
588*8feb0f0bSmrg   bitmap local_live_vars = NULL;
589*8feb0f0bSmrg   if (live_vars)
590*8feb0f0bSmrg     local_live_vars = live_vars_at_stmt (live_vars_vec, live_vars, call);
591*8feb0f0bSmrg 
5921debfc3dSmrg   /* Make sure the tail invocation of this function does not indirectly
5931debfc3dSmrg      refer to local variables.  (Passing variables directly by value
5941debfc3dSmrg      is OK.)  */
5951debfc3dSmrg   FOR_EACH_LOCAL_DECL (cfun, idx, var)
5961debfc3dSmrg     {
5971debfc3dSmrg       if (TREE_CODE (var) != PARM_DECL
5981debfc3dSmrg 	  && auto_var_in_fn_p (var, cfun->decl)
5991debfc3dSmrg 	  && may_be_aliased (var)
6001debfc3dSmrg 	  && (ref_maybe_used_by_stmt_p (call, var)
6011debfc3dSmrg 	      || call_may_clobber_ref_p (call, var)))
602*8feb0f0bSmrg 	{
603*8feb0f0bSmrg 	  if (!VAR_P (var))
604*8feb0f0bSmrg 	    {
605*8feb0f0bSmrg 	      if (local_live_vars)
606*8feb0f0bSmrg 		BITMAP_FREE (local_live_vars);
6071debfc3dSmrg 	      return;
6081debfc3dSmrg 	    }
609*8feb0f0bSmrg 	  else
610*8feb0f0bSmrg 	    {
611*8feb0f0bSmrg 	      unsigned int *v = live_vars->get (DECL_UID (var));
612*8feb0f0bSmrg 	      if (bitmap_bit_p (local_live_vars, *v))
613*8feb0f0bSmrg 		{
614*8feb0f0bSmrg 		  BITMAP_FREE (local_live_vars);
615*8feb0f0bSmrg 		  return;
616*8feb0f0bSmrg 		}
617*8feb0f0bSmrg 	    }
618*8feb0f0bSmrg 	}
619*8feb0f0bSmrg     }
620*8feb0f0bSmrg 
621*8feb0f0bSmrg   if (local_live_vars)
622*8feb0f0bSmrg     BITMAP_FREE (local_live_vars);
6231debfc3dSmrg 
6241debfc3dSmrg   /* Now check the statements after the call.  None of them has virtual
6251debfc3dSmrg      operands, so they may only depend on the call through its return
6261debfc3dSmrg      value.  The return value should also be dependent on each of them,
6271debfc3dSmrg      since we are running after dce.  */
6281debfc3dSmrg   m = NULL_TREE;
6291debfc3dSmrg   a = NULL_TREE;
630a2dc1f3fSmrg   auto_bitmap to_move_defs;
631a2dc1f3fSmrg   auto_vec<gimple *> to_move_stmts;
6321debfc3dSmrg 
6331debfc3dSmrg   abb = bb;
6341debfc3dSmrg   agsi = gsi;
6351debfc3dSmrg   while (1)
6361debfc3dSmrg     {
6371debfc3dSmrg       tree tmp_a = NULL_TREE;
6381debfc3dSmrg       tree tmp_m = NULL_TREE;
6391debfc3dSmrg       gsi_next (&agsi);
6401debfc3dSmrg 
6411debfc3dSmrg       while (gsi_end_p (agsi))
6421debfc3dSmrg 	{
6431debfc3dSmrg 	  ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
6441debfc3dSmrg 	  abb = single_succ (abb);
6451debfc3dSmrg 	  agsi = gsi_start_bb (abb);
6461debfc3dSmrg 	}
6471debfc3dSmrg 
6481debfc3dSmrg       stmt = gsi_stmt (agsi);
6491debfc3dSmrg       if (gimple_code (stmt) == GIMPLE_RETURN)
6501debfc3dSmrg 	break;
6511debfc3dSmrg 
652a2dc1f3fSmrg       if (gimple_code (stmt) == GIMPLE_LABEL
653a2dc1f3fSmrg 	  || gimple_code (stmt) == GIMPLE_NOP
654a2dc1f3fSmrg 	  || gimple_code (stmt) == GIMPLE_PREDICT
655a2dc1f3fSmrg 	  || gimple_clobber_p (stmt)
656a2dc1f3fSmrg 	  || is_gimple_debug (stmt))
6571debfc3dSmrg 	continue;
6581debfc3dSmrg 
6591debfc3dSmrg       if (gimple_code (stmt) != GIMPLE_ASSIGN)
6601debfc3dSmrg 	return;
6611debfc3dSmrg 
6621debfc3dSmrg       /* This is a gimple assign. */
663a2dc1f3fSmrg       par ret = process_assignment (as_a <gassign *> (stmt), gsi,
664a2dc1f3fSmrg 				    &tmp_m, &tmp_a, &ass_var, to_move_defs);
665a2dc1f3fSmrg       if (ret == FAIL)
6661debfc3dSmrg 	return;
667a2dc1f3fSmrg       else if (ret == TRY_MOVE)
668a2dc1f3fSmrg 	{
669a2dc1f3fSmrg 	  if (! tail_recursion)
670a2dc1f3fSmrg 	    return;
671a2dc1f3fSmrg 	  /* Do not deal with checking dominance, the real fix is to
672a2dc1f3fSmrg 	     do path isolation for the transform phase anyway, removing
673a2dc1f3fSmrg 	     the need to compute the accumulators with new stmts.  */
674a2dc1f3fSmrg 	  if (abb != bb)
675a2dc1f3fSmrg 	    return;
676a2dc1f3fSmrg 	  for (unsigned opno = 1; opno < gimple_num_ops (stmt); ++opno)
677a2dc1f3fSmrg 	    {
678a2dc1f3fSmrg 	      tree op = gimple_op (stmt, opno);
679a2dc1f3fSmrg 	      if (independent_of_stmt_p (op, stmt, gsi, to_move_defs) != op)
680a2dc1f3fSmrg 		return;
681a2dc1f3fSmrg 	    }
682a2dc1f3fSmrg 	  bitmap_set_bit (to_move_defs,
683a2dc1f3fSmrg 			  SSA_NAME_VERSION (gimple_assign_lhs (stmt)));
684a2dc1f3fSmrg 	  to_move_stmts.safe_push (stmt);
685a2dc1f3fSmrg 	  continue;
686a2dc1f3fSmrg 	}
6871debfc3dSmrg 
6881debfc3dSmrg       if (tmp_a)
6891debfc3dSmrg 	{
6901debfc3dSmrg 	  tree type = TREE_TYPE (tmp_a);
6911debfc3dSmrg 	  if (a)
6921debfc3dSmrg 	    a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
6931debfc3dSmrg 	  else
6941debfc3dSmrg 	    a = tmp_a;
6951debfc3dSmrg 	}
6961debfc3dSmrg       if (tmp_m)
6971debfc3dSmrg 	{
6981debfc3dSmrg 	  tree type = TREE_TYPE (tmp_m);
6991debfc3dSmrg 	  if (m)
7001debfc3dSmrg 	    m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
7011debfc3dSmrg 	  else
7021debfc3dSmrg 	    m = tmp_m;
7031debfc3dSmrg 
7041debfc3dSmrg 	  if (a)
7051debfc3dSmrg 	    a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
7061debfc3dSmrg 	}
7071debfc3dSmrg     }
7081debfc3dSmrg 
7091debfc3dSmrg   /* See if this is a tail call we can handle.  */
7101debfc3dSmrg   ret_var = gimple_return_retval (as_a <greturn *> (stmt));
7111debfc3dSmrg 
7121debfc3dSmrg   /* We may proceed if there either is no return value, or the return value
7131debfc3dSmrg      is identical to the call's return.  */
7141debfc3dSmrg   if (ret_var
7151debfc3dSmrg       && (ret_var != ass_var))
7161debfc3dSmrg     return;
7171debfc3dSmrg 
7181debfc3dSmrg   /* If this is not a tail recursive call, we cannot handle addends or
7191debfc3dSmrg      multiplicands.  */
7201debfc3dSmrg   if (!tail_recursion && (m || a))
7211debfc3dSmrg     return;
7221debfc3dSmrg 
7231debfc3dSmrg   /* For pointers only allow additions.  */
7241debfc3dSmrg   if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
7251debfc3dSmrg     return;
7261debfc3dSmrg 
727a2dc1f3fSmrg   /* Move queued defs.  */
728a2dc1f3fSmrg   if (tail_recursion)
729a2dc1f3fSmrg     {
730a2dc1f3fSmrg       unsigned i;
731a2dc1f3fSmrg       FOR_EACH_VEC_ELT (to_move_stmts, i, stmt)
732a2dc1f3fSmrg 	{
733a2dc1f3fSmrg 	  gimple_stmt_iterator mgsi = gsi_for_stmt (stmt);
734a2dc1f3fSmrg 	  gsi_move_before (&mgsi, &gsi);
735a2dc1f3fSmrg 	}
736*8feb0f0bSmrg       if (!tailr_arg_needs_copy)
737*8feb0f0bSmrg 	tailr_arg_needs_copy = BITMAP_ALLOC (NULL);
738*8feb0f0bSmrg       for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
739*8feb0f0bSmrg 	   param;
740*8feb0f0bSmrg 	   param = DECL_CHAIN (param), idx++)
741*8feb0f0bSmrg 	{
742*8feb0f0bSmrg 	  tree ddef, arg = gimple_call_arg (call, idx);
743*8feb0f0bSmrg 	  if (is_gimple_reg (param)
744*8feb0f0bSmrg 	      && (ddef = ssa_default_def (cfun, param))
745*8feb0f0bSmrg 	      && (arg != ddef))
746*8feb0f0bSmrg 	    bitmap_set_bit (tailr_arg_needs_copy, idx);
747*8feb0f0bSmrg 	}
748a2dc1f3fSmrg     }
749a2dc1f3fSmrg 
7501debfc3dSmrg   nw = XNEW (struct tailcall);
7511debfc3dSmrg 
7521debfc3dSmrg   nw->call_gsi = gsi;
7531debfc3dSmrg 
7541debfc3dSmrg   nw->tail_recursion = tail_recursion;
7551debfc3dSmrg 
7561debfc3dSmrg   nw->mult = m;
7571debfc3dSmrg   nw->add = a;
7581debfc3dSmrg 
7591debfc3dSmrg   nw->next = *ret;
7601debfc3dSmrg   *ret = nw;
7611debfc3dSmrg }
7621debfc3dSmrg 
7631debfc3dSmrg /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E.  */
7641debfc3dSmrg 
7651debfc3dSmrg static void
add_successor_phi_arg(edge e,tree var,tree phi_arg)7661debfc3dSmrg add_successor_phi_arg (edge e, tree var, tree phi_arg)
7671debfc3dSmrg {
7681debfc3dSmrg   gphi_iterator gsi;
7691debfc3dSmrg 
7701debfc3dSmrg   for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
7711debfc3dSmrg     if (PHI_RESULT (gsi.phi ()) == var)
7721debfc3dSmrg       break;
7731debfc3dSmrg 
7741debfc3dSmrg   gcc_assert (!gsi_end_p (gsi));
7751debfc3dSmrg   add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION);
7761debfc3dSmrg }
7771debfc3dSmrg 
7781debfc3dSmrg /* Creates a GIMPLE statement which computes the operation specified by
7791debfc3dSmrg    CODE, ACC and OP1 to a new variable with name LABEL and inserts the
7801debfc3dSmrg    statement in the position specified by GSI.  Returns the
7811debfc3dSmrg    tree node of the statement's result.  */
7821debfc3dSmrg 
7831debfc3dSmrg static tree
adjust_return_value_with_ops(enum tree_code code,const char * label,tree acc,tree op1,gimple_stmt_iterator gsi)7841debfc3dSmrg adjust_return_value_with_ops (enum tree_code code, const char *label,
7851debfc3dSmrg 			      tree acc, tree op1, gimple_stmt_iterator gsi)
7861debfc3dSmrg {
7871debfc3dSmrg 
7881debfc3dSmrg   tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
7891debfc3dSmrg   tree result = make_temp_ssa_name (ret_type, NULL, label);
7901debfc3dSmrg   gassign *stmt;
7911debfc3dSmrg 
7921debfc3dSmrg   if (POINTER_TYPE_P (ret_type))
7931debfc3dSmrg     {
7941debfc3dSmrg       gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
7951debfc3dSmrg       code = POINTER_PLUS_EXPR;
7961debfc3dSmrg     }
7971debfc3dSmrg   if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
7981debfc3dSmrg       && code != POINTER_PLUS_EXPR)
7991debfc3dSmrg     stmt = gimple_build_assign (result, code, acc, op1);
8001debfc3dSmrg   else
8011debfc3dSmrg     {
8021debfc3dSmrg       tree tem;
8031debfc3dSmrg       if (code == POINTER_PLUS_EXPR)
8041debfc3dSmrg 	tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
8051debfc3dSmrg       else
8061debfc3dSmrg 	tem = fold_build2 (code, TREE_TYPE (op1),
8071debfc3dSmrg 			   fold_convert (TREE_TYPE (op1), acc), op1);
8081debfc3dSmrg       tree rhs = fold_convert (ret_type, tem);
8091debfc3dSmrg       rhs = force_gimple_operand_gsi (&gsi, rhs,
8101debfc3dSmrg 				      false, NULL, true, GSI_SAME_STMT);
8111debfc3dSmrg       stmt = gimple_build_assign (result, rhs);
8121debfc3dSmrg     }
8131debfc3dSmrg 
8141debfc3dSmrg   gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
8151debfc3dSmrg   return result;
8161debfc3dSmrg }
8171debfc3dSmrg 
8181debfc3dSmrg /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
8191debfc3dSmrg    the computation specified by CODE and OP1 and insert the statement
8201debfc3dSmrg    at the position specified by GSI as a new statement.  Returns new SSA name
8211debfc3dSmrg    of updated accumulator.  */
8221debfc3dSmrg 
8231debfc3dSmrg static tree
update_accumulator_with_ops(enum tree_code code,tree acc,tree op1,gimple_stmt_iterator gsi)8241debfc3dSmrg update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
8251debfc3dSmrg 			     gimple_stmt_iterator gsi)
8261debfc3dSmrg {
8271debfc3dSmrg   gassign *stmt;
8281debfc3dSmrg   tree var = copy_ssa_name (acc);
8291debfc3dSmrg   if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
8301debfc3dSmrg     stmt = gimple_build_assign (var, code, acc, op1);
8311debfc3dSmrg   else
8321debfc3dSmrg     {
8331debfc3dSmrg       tree rhs = fold_convert (TREE_TYPE (acc),
8341debfc3dSmrg 			       fold_build2 (code,
8351debfc3dSmrg 					    TREE_TYPE (op1),
8361debfc3dSmrg 					    fold_convert (TREE_TYPE (op1), acc),
8371debfc3dSmrg 					    op1));
8381debfc3dSmrg       rhs = force_gimple_operand_gsi (&gsi, rhs,
8391debfc3dSmrg 				      false, NULL, false, GSI_CONTINUE_LINKING);
8401debfc3dSmrg       stmt = gimple_build_assign (var, rhs);
8411debfc3dSmrg     }
8421debfc3dSmrg   gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8431debfc3dSmrg   return var;
8441debfc3dSmrg }
8451debfc3dSmrg 
8461debfc3dSmrg /* Adjust the accumulator values according to A and M after GSI, and update
8471debfc3dSmrg    the phi nodes on edge BACK.  */
8481debfc3dSmrg 
8491debfc3dSmrg static void
adjust_accumulator_values(gimple_stmt_iterator gsi,tree m,tree a,edge back)8501debfc3dSmrg adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
8511debfc3dSmrg {
8521debfc3dSmrg   tree var, a_acc_arg, m_acc_arg;
8531debfc3dSmrg 
8541debfc3dSmrg   if (m)
8551debfc3dSmrg     m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
8561debfc3dSmrg   if (a)
8571debfc3dSmrg     a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
8581debfc3dSmrg 
8591debfc3dSmrg   a_acc_arg = a_acc;
8601debfc3dSmrg   m_acc_arg = m_acc;
8611debfc3dSmrg   if (a)
8621debfc3dSmrg     {
8631debfc3dSmrg       if (m_acc)
8641debfc3dSmrg 	{
8651debfc3dSmrg 	  if (integer_onep (a))
8661debfc3dSmrg 	    var = m_acc;
8671debfc3dSmrg 	  else
8681debfc3dSmrg 	    var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
8691debfc3dSmrg 						a, gsi);
8701debfc3dSmrg 	}
8711debfc3dSmrg       else
8721debfc3dSmrg 	var = a;
8731debfc3dSmrg 
8741debfc3dSmrg       a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
8751debfc3dSmrg     }
8761debfc3dSmrg 
8771debfc3dSmrg   if (m)
8781debfc3dSmrg     m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
8791debfc3dSmrg 
8801debfc3dSmrg   if (a_acc)
8811debfc3dSmrg     add_successor_phi_arg (back, a_acc, a_acc_arg);
8821debfc3dSmrg 
8831debfc3dSmrg   if (m_acc)
8841debfc3dSmrg     add_successor_phi_arg (back, m_acc, m_acc_arg);
8851debfc3dSmrg }
8861debfc3dSmrg 
8871debfc3dSmrg /* Adjust value of the return at the end of BB according to M and A
8881debfc3dSmrg    accumulators.  */
8891debfc3dSmrg 
8901debfc3dSmrg static void
adjust_return_value(basic_block bb,tree m,tree a)8911debfc3dSmrg adjust_return_value (basic_block bb, tree m, tree a)
8921debfc3dSmrg {
8931debfc3dSmrg   tree retval;
8941debfc3dSmrg   greturn *ret_stmt = as_a <greturn *> (gimple_seq_last_stmt (bb_seq (bb)));
8951debfc3dSmrg   gimple_stmt_iterator gsi = gsi_last_bb (bb);
8961debfc3dSmrg 
8971debfc3dSmrg   gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
8981debfc3dSmrg 
8991debfc3dSmrg   retval = gimple_return_retval (ret_stmt);
9001debfc3dSmrg   if (!retval || retval == error_mark_node)
9011debfc3dSmrg     return;
9021debfc3dSmrg 
9031debfc3dSmrg   if (m)
9041debfc3dSmrg     retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
9051debfc3dSmrg 					   gsi);
9061debfc3dSmrg   if (a)
9071debfc3dSmrg     retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
9081debfc3dSmrg 					   gsi);
9091debfc3dSmrg   gimple_return_set_retval (ret_stmt, retval);
9101debfc3dSmrg   update_stmt (ret_stmt);
9111debfc3dSmrg }
9121debfc3dSmrg 
9131debfc3dSmrg /* Subtract COUNT and FREQUENCY from the basic block and it's
9141debfc3dSmrg    outgoing edge.  */
9151debfc3dSmrg static void
decrease_profile(basic_block bb,profile_count count)916a2dc1f3fSmrg decrease_profile (basic_block bb, profile_count count)
9171debfc3dSmrg {
918a2dc1f3fSmrg   bb->count = bb->count - count;
9191debfc3dSmrg   if (!single_succ_p (bb))
9201debfc3dSmrg     {
9211debfc3dSmrg       gcc_assert (!EDGE_COUNT (bb->succs));
9221debfc3dSmrg       return;
9231debfc3dSmrg     }
9241debfc3dSmrg }
9251debfc3dSmrg 
9261debfc3dSmrg /* Eliminates tail call described by T.  TMP_VARS is a list of
9271debfc3dSmrg    temporary variables used to copy the function arguments.  */
9281debfc3dSmrg 
9291debfc3dSmrg static void
eliminate_tail_call(struct tailcall * t)9301debfc3dSmrg eliminate_tail_call (struct tailcall *t)
9311debfc3dSmrg {
9321debfc3dSmrg   tree param, rslt;
9331debfc3dSmrg   gimple *stmt, *call;
9341debfc3dSmrg   tree arg;
9351debfc3dSmrg   size_t idx;
9361debfc3dSmrg   basic_block bb, first;
9371debfc3dSmrg   edge e;
9381debfc3dSmrg   gphi *phi;
9391debfc3dSmrg   gphi_iterator gpi;
9401debfc3dSmrg   gimple_stmt_iterator gsi;
9411debfc3dSmrg   gimple *orig_stmt;
9421debfc3dSmrg 
9431debfc3dSmrg   stmt = orig_stmt = gsi_stmt (t->call_gsi);
9441debfc3dSmrg   bb = gsi_bb (t->call_gsi);
9451debfc3dSmrg 
9461debfc3dSmrg   if (dump_file && (dump_flags & TDF_DETAILS))
9471debfc3dSmrg     {
9481debfc3dSmrg       fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
9491debfc3dSmrg 	       bb->index);
9501debfc3dSmrg       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
9511debfc3dSmrg       fprintf (dump_file, "\n");
9521debfc3dSmrg     }
9531debfc3dSmrg 
9541debfc3dSmrg   gcc_assert (is_gimple_call (stmt));
9551debfc3dSmrg 
9561debfc3dSmrg   first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
9571debfc3dSmrg 
9581debfc3dSmrg   /* Remove the code after call_gsi that will become unreachable.  The
9591debfc3dSmrg      possibly unreachable code in other blocks is removed later in
9601debfc3dSmrg      cfg cleanup.  */
9611debfc3dSmrg   gsi = t->call_gsi;
9621debfc3dSmrg   gimple_stmt_iterator gsi2 = gsi_last_bb (gimple_bb (gsi_stmt (gsi)));
9631debfc3dSmrg   while (gsi_stmt (gsi2) != gsi_stmt (gsi))
9641debfc3dSmrg     {
9651debfc3dSmrg       gimple *t = gsi_stmt (gsi2);
9661debfc3dSmrg       /* Do not remove the return statement, so that redirect_edge_and_branch
9671debfc3dSmrg 	 sees how the block ends.  */
9681debfc3dSmrg       if (gimple_code (t) != GIMPLE_RETURN)
9691debfc3dSmrg 	{
9701debfc3dSmrg 	  gimple_stmt_iterator gsi3 = gsi2;
9711debfc3dSmrg 	  gsi_prev (&gsi2);
9721debfc3dSmrg 	  gsi_remove (&gsi3, true);
9731debfc3dSmrg 	  release_defs (t);
9741debfc3dSmrg 	}
9751debfc3dSmrg       else
9761debfc3dSmrg 	gsi_prev (&gsi2);
9771debfc3dSmrg     }
9781debfc3dSmrg 
9791debfc3dSmrg   /* Number of executions of function has reduced by the tailcall.  */
9801debfc3dSmrg   e = single_succ_edge (gsi_bb (t->call_gsi));
981a2dc1f3fSmrg 
982a2dc1f3fSmrg   profile_count count = e->count ();
983a2dc1f3fSmrg 
984a2dc1f3fSmrg   /* When profile is inconsistent and the recursion edge is more frequent
985a2dc1f3fSmrg      than number of executions of functions, scale it down, so we do not end
986a2dc1f3fSmrg      up with 0 executions of entry block.  */
987a2dc1f3fSmrg   if (count >= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
988a2dc1f3fSmrg     count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (7, 8);
989a2dc1f3fSmrg   decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), count);
990a2dc1f3fSmrg   decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), count);
9911debfc3dSmrg   if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
992a2dc1f3fSmrg     decrease_profile (e->dest, count);
9931debfc3dSmrg 
9941debfc3dSmrg   /* Replace the call by a jump to the start of function.  */
9951debfc3dSmrg   e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
9961debfc3dSmrg 				first);
9971debfc3dSmrg   gcc_assert (e);
9981debfc3dSmrg   PENDING_STMT (e) = NULL;
9991debfc3dSmrg 
10001debfc3dSmrg   /* Add phi node entries for arguments.  The ordering of the phi nodes should
10011debfc3dSmrg      be the same as the ordering of the arguments.  */
10021debfc3dSmrg   for (param = DECL_ARGUMENTS (current_function_decl),
10031debfc3dSmrg 	 idx = 0, gpi = gsi_start_phis (first);
10041debfc3dSmrg        param;
10051debfc3dSmrg        param = DECL_CHAIN (param), idx++)
10061debfc3dSmrg     {
1007*8feb0f0bSmrg       if (!bitmap_bit_p (tailr_arg_needs_copy, idx))
10081debfc3dSmrg 	continue;
10091debfc3dSmrg 
10101debfc3dSmrg       arg = gimple_call_arg (stmt, idx);
10111debfc3dSmrg       phi = gpi.phi ();
10121debfc3dSmrg       gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
10131debfc3dSmrg 
10141debfc3dSmrg       add_phi_arg (phi, arg, e, gimple_location (stmt));
10151debfc3dSmrg       gsi_next (&gpi);
10161debfc3dSmrg     }
10171debfc3dSmrg 
10181debfc3dSmrg   /* Update the values of accumulators.  */
10191debfc3dSmrg   adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
10201debfc3dSmrg 
10211debfc3dSmrg   call = gsi_stmt (t->call_gsi);
10221debfc3dSmrg   rslt = gimple_call_lhs (call);
10231debfc3dSmrg   if (rslt != NULL_TREE && TREE_CODE (rslt) == SSA_NAME)
10241debfc3dSmrg     {
10251debfc3dSmrg       /* Result of the call will no longer be defined.  So adjust the
10261debfc3dSmrg 	 SSA_NAME_DEF_STMT accordingly.  */
10271debfc3dSmrg       SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
10281debfc3dSmrg     }
10291debfc3dSmrg 
10301debfc3dSmrg   gsi_remove (&t->call_gsi, true);
10311debfc3dSmrg   release_defs (call);
10321debfc3dSmrg }
10331debfc3dSmrg 
10341debfc3dSmrg /* Optimizes the tailcall described by T.  If OPT_TAILCALLS is true, also
10351debfc3dSmrg    mark the tailcalls for the sibcall optimization.  */
10361debfc3dSmrg 
10371debfc3dSmrg static bool
optimize_tail_call(struct tailcall * t,bool opt_tailcalls)10381debfc3dSmrg optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
10391debfc3dSmrg {
10401debfc3dSmrg   if (t->tail_recursion)
10411debfc3dSmrg     {
10421debfc3dSmrg       eliminate_tail_call (t);
10431debfc3dSmrg       return true;
10441debfc3dSmrg     }
10451debfc3dSmrg 
10461debfc3dSmrg   if (opt_tailcalls)
10471debfc3dSmrg     {
10481debfc3dSmrg       gcall *stmt = as_a <gcall *> (gsi_stmt (t->call_gsi));
10491debfc3dSmrg 
10501debfc3dSmrg       gimple_call_set_tail (stmt, true);
10511debfc3dSmrg       cfun->tail_call_marked = true;
10521debfc3dSmrg       if (dump_file && (dump_flags & TDF_DETAILS))
10531debfc3dSmrg         {
10541debfc3dSmrg 	  fprintf (dump_file, "Found tail call ");
10551debfc3dSmrg 	  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
10561debfc3dSmrg 	  fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
10571debfc3dSmrg 	}
10581debfc3dSmrg     }
10591debfc3dSmrg 
10601debfc3dSmrg   return false;
10611debfc3dSmrg }
10621debfc3dSmrg 
10631debfc3dSmrg /* Creates a tail-call accumulator of the same type as the return type of the
10641debfc3dSmrg    current function.  LABEL is the name used to creating the temporary
10651debfc3dSmrg    variable for the accumulator.  The accumulator will be inserted in the
10661debfc3dSmrg    phis of a basic block BB with single predecessor with an initial value
10671debfc3dSmrg    INIT converted to the current function return type.  */
10681debfc3dSmrg 
10691debfc3dSmrg static tree
create_tailcall_accumulator(const char * label,basic_block bb,tree init)10701debfc3dSmrg create_tailcall_accumulator (const char *label, basic_block bb, tree init)
10711debfc3dSmrg {
10721debfc3dSmrg   tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
10731debfc3dSmrg   if (POINTER_TYPE_P (ret_type))
10741debfc3dSmrg     ret_type = sizetype;
10751debfc3dSmrg 
10761debfc3dSmrg   tree tmp = make_temp_ssa_name (ret_type, NULL, label);
10771debfc3dSmrg   gphi *phi;
10781debfc3dSmrg 
10791debfc3dSmrg   phi = create_phi_node (tmp, bb);
10801debfc3dSmrg   /* RET_TYPE can be a float when -ffast-maths is enabled.  */
10811debfc3dSmrg   add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
10821debfc3dSmrg 	       UNKNOWN_LOCATION);
10831debfc3dSmrg   return PHI_RESULT (phi);
10841debfc3dSmrg }
10851debfc3dSmrg 
10861debfc3dSmrg /* Optimizes tail calls in the function, turning the tail recursion
10871debfc3dSmrg    into iteration.  */
10881debfc3dSmrg 
10891debfc3dSmrg static unsigned int
tree_optimize_tail_calls_1(bool opt_tailcalls)10901debfc3dSmrg tree_optimize_tail_calls_1 (bool opt_tailcalls)
10911debfc3dSmrg {
10921debfc3dSmrg   edge e;
10931debfc3dSmrg   bool phis_constructed = false;
10941debfc3dSmrg   struct tailcall *tailcalls = NULL, *act, *next;
10951debfc3dSmrg   bool changed = false;
10961debfc3dSmrg   basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
10971debfc3dSmrg   tree param;
10981debfc3dSmrg   gimple *stmt;
10991debfc3dSmrg   edge_iterator ei;
11001debfc3dSmrg 
11011debfc3dSmrg   if (!suitable_for_tail_opt_p ())
11021debfc3dSmrg     return 0;
11031debfc3dSmrg   if (opt_tailcalls)
11041debfc3dSmrg     opt_tailcalls = suitable_for_tail_call_opt_p ();
11051debfc3dSmrg 
11061debfc3dSmrg   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
11071debfc3dSmrg     {
11081debfc3dSmrg       /* Only traverse the normal exits, i.e. those that end with return
11091debfc3dSmrg 	 statement.  */
11101debfc3dSmrg       stmt = last_stmt (e->src);
11111debfc3dSmrg 
11121debfc3dSmrg       if (stmt
11131debfc3dSmrg 	  && gimple_code (stmt) == GIMPLE_RETURN)
11141debfc3dSmrg 	find_tail_calls (e->src, &tailcalls);
11151debfc3dSmrg     }
11161debfc3dSmrg 
1117*8feb0f0bSmrg   if (live_vars)
1118*8feb0f0bSmrg     {
1119*8feb0f0bSmrg       destroy_live_vars (live_vars_vec);
1120*8feb0f0bSmrg       delete live_vars;
1121*8feb0f0bSmrg       live_vars = NULL;
1122*8feb0f0bSmrg     }
1123*8feb0f0bSmrg 
11241debfc3dSmrg   /* Construct the phi nodes and accumulators if necessary.  */
11251debfc3dSmrg   a_acc = m_acc = NULL_TREE;
11261debfc3dSmrg   for (act = tailcalls; act; act = act->next)
11271debfc3dSmrg     {
11281debfc3dSmrg       if (!act->tail_recursion)
11291debfc3dSmrg 	continue;
11301debfc3dSmrg 
11311debfc3dSmrg       if (!phis_constructed)
11321debfc3dSmrg 	{
11331debfc3dSmrg 	  /* Ensure that there is only one predecessor of the block
11341debfc3dSmrg 	     or if there are existing degenerate PHI nodes.  */
11351debfc3dSmrg 	  if (!single_pred_p (first)
11361debfc3dSmrg 	      || !gimple_seq_empty_p (phi_nodes (first)))
11371debfc3dSmrg 	    first =
11381debfc3dSmrg 	      split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
11391debfc3dSmrg 
11401debfc3dSmrg 	  /* Copy the args if needed.  */
1141*8feb0f0bSmrg 	  unsigned idx;
1142*8feb0f0bSmrg 	  for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
11431debfc3dSmrg 	       param;
1144*8feb0f0bSmrg 	       param = DECL_CHAIN (param), idx++)
1145*8feb0f0bSmrg 	    if (bitmap_bit_p (tailr_arg_needs_copy, idx))
11461debfc3dSmrg 	      {
11471debfc3dSmrg 		tree name = ssa_default_def (cfun, param);
11481debfc3dSmrg 		tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
11491debfc3dSmrg 		gphi *phi;
11501debfc3dSmrg 
11511debfc3dSmrg 		set_ssa_default_def (cfun, param, new_name);
11521debfc3dSmrg 		phi = create_phi_node (name, first);
11531debfc3dSmrg 		add_phi_arg (phi, new_name, single_pred_edge (first),
11541debfc3dSmrg 			     EXPR_LOCATION (param));
11551debfc3dSmrg 	      }
11561debfc3dSmrg 	  phis_constructed = true;
11571debfc3dSmrg 	}
11581debfc3dSmrg 
11591debfc3dSmrg       if (act->add && !a_acc)
11601debfc3dSmrg 	a_acc = create_tailcall_accumulator ("add_acc", first,
11611debfc3dSmrg 					     integer_zero_node);
11621debfc3dSmrg 
11631debfc3dSmrg       if (act->mult && !m_acc)
11641debfc3dSmrg 	m_acc = create_tailcall_accumulator ("mult_acc", first,
11651debfc3dSmrg 					     integer_one_node);
11661debfc3dSmrg     }
11671debfc3dSmrg 
11681debfc3dSmrg   if (a_acc || m_acc)
11691debfc3dSmrg     {
11701debfc3dSmrg       /* When the tail call elimination using accumulators is performed,
11711debfc3dSmrg 	 statements adding the accumulated value are inserted at all exits.
11721debfc3dSmrg 	 This turns all other tail calls to non-tail ones.  */
11731debfc3dSmrg       opt_tailcalls = false;
11741debfc3dSmrg     }
11751debfc3dSmrg 
11761debfc3dSmrg   for (; tailcalls; tailcalls = next)
11771debfc3dSmrg     {
11781debfc3dSmrg       next = tailcalls->next;
11791debfc3dSmrg       changed |= optimize_tail_call (tailcalls, opt_tailcalls);
11801debfc3dSmrg       free (tailcalls);
11811debfc3dSmrg     }
11821debfc3dSmrg 
11831debfc3dSmrg   if (a_acc || m_acc)
11841debfc3dSmrg     {
11851debfc3dSmrg       /* Modify the remaining return statements.  */
11861debfc3dSmrg       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
11871debfc3dSmrg 	{
11881debfc3dSmrg 	  stmt = last_stmt (e->src);
11891debfc3dSmrg 
11901debfc3dSmrg 	  if (stmt
11911debfc3dSmrg 	      && gimple_code (stmt) == GIMPLE_RETURN)
11921debfc3dSmrg 	    adjust_return_value (e->src, m_acc, a_acc);
11931debfc3dSmrg 	}
11941debfc3dSmrg     }
11951debfc3dSmrg 
11961debfc3dSmrg   if (changed)
11971debfc3dSmrg     {
11981debfc3dSmrg       /* We may have created new loops.  Make them magically appear.  */
11991debfc3dSmrg       loops_state_set (LOOPS_NEED_FIXUP);
12001debfc3dSmrg       free_dominance_info (CDI_DOMINATORS);
12011debfc3dSmrg     }
12021debfc3dSmrg 
12031debfc3dSmrg   /* Add phi nodes for the virtual operands defined in the function to the
12041debfc3dSmrg      header of the loop created by tail recursion elimination.  Do so
12051debfc3dSmrg      by triggering the SSA renamer.  */
12061debfc3dSmrg   if (phis_constructed)
12071debfc3dSmrg     mark_virtual_operands_for_renaming (cfun);
12081debfc3dSmrg 
1209*8feb0f0bSmrg   if (tailr_arg_needs_copy)
1210*8feb0f0bSmrg     BITMAP_FREE (tailr_arg_needs_copy);
1211*8feb0f0bSmrg 
12121debfc3dSmrg   if (changed)
12131debfc3dSmrg     return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
12141debfc3dSmrg   return 0;
12151debfc3dSmrg }
12161debfc3dSmrg 
12171debfc3dSmrg static bool
gate_tail_calls(void)12181debfc3dSmrg gate_tail_calls (void)
12191debfc3dSmrg {
12201debfc3dSmrg   return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
12211debfc3dSmrg }
12221debfc3dSmrg 
12231debfc3dSmrg static unsigned int
execute_tail_calls(void)12241debfc3dSmrg execute_tail_calls (void)
12251debfc3dSmrg {
12261debfc3dSmrg   return tree_optimize_tail_calls_1 (true);
12271debfc3dSmrg }
12281debfc3dSmrg 
12291debfc3dSmrg namespace {
12301debfc3dSmrg 
12311debfc3dSmrg const pass_data pass_data_tail_recursion =
12321debfc3dSmrg {
12331debfc3dSmrg   GIMPLE_PASS, /* type */
12341debfc3dSmrg   "tailr", /* name */
12351debfc3dSmrg   OPTGROUP_NONE, /* optinfo_flags */
12361debfc3dSmrg   TV_NONE, /* tv_id */
12371debfc3dSmrg   ( PROP_cfg | PROP_ssa ), /* properties_required */
12381debfc3dSmrg   0, /* properties_provided */
12391debfc3dSmrg   0, /* properties_destroyed */
12401debfc3dSmrg   0, /* todo_flags_start */
12411debfc3dSmrg   0, /* todo_flags_finish */
12421debfc3dSmrg };
12431debfc3dSmrg 
12441debfc3dSmrg class pass_tail_recursion : public gimple_opt_pass
12451debfc3dSmrg {
12461debfc3dSmrg public:
pass_tail_recursion(gcc::context * ctxt)12471debfc3dSmrg   pass_tail_recursion (gcc::context *ctxt)
12481debfc3dSmrg     : gimple_opt_pass (pass_data_tail_recursion, ctxt)
12491debfc3dSmrg   {}
12501debfc3dSmrg 
12511debfc3dSmrg   /* opt_pass methods: */
clone()12521debfc3dSmrg   opt_pass * clone () { return new pass_tail_recursion (m_ctxt); }
gate(function *)12531debfc3dSmrg   virtual bool gate (function *) { return gate_tail_calls (); }
execute(function *)12541debfc3dSmrg   virtual unsigned int execute (function *)
12551debfc3dSmrg     {
12561debfc3dSmrg       return tree_optimize_tail_calls_1 (false);
12571debfc3dSmrg     }
12581debfc3dSmrg 
12591debfc3dSmrg }; // class pass_tail_recursion
12601debfc3dSmrg 
12611debfc3dSmrg } // anon namespace
12621debfc3dSmrg 
12631debfc3dSmrg gimple_opt_pass *
make_pass_tail_recursion(gcc::context * ctxt)12641debfc3dSmrg make_pass_tail_recursion (gcc::context *ctxt)
12651debfc3dSmrg {
12661debfc3dSmrg   return new pass_tail_recursion (ctxt);
12671debfc3dSmrg }
12681debfc3dSmrg 
12691debfc3dSmrg namespace {
12701debfc3dSmrg 
12711debfc3dSmrg const pass_data pass_data_tail_calls =
12721debfc3dSmrg {
12731debfc3dSmrg   GIMPLE_PASS, /* type */
12741debfc3dSmrg   "tailc", /* name */
12751debfc3dSmrg   OPTGROUP_NONE, /* optinfo_flags */
12761debfc3dSmrg   TV_NONE, /* tv_id */
12771debfc3dSmrg   ( PROP_cfg | PROP_ssa ), /* properties_required */
12781debfc3dSmrg   0, /* properties_provided */
12791debfc3dSmrg   0, /* properties_destroyed */
12801debfc3dSmrg   0, /* todo_flags_start */
12811debfc3dSmrg   0, /* todo_flags_finish */
12821debfc3dSmrg };
12831debfc3dSmrg 
12841debfc3dSmrg class pass_tail_calls : public gimple_opt_pass
12851debfc3dSmrg {
12861debfc3dSmrg public:
pass_tail_calls(gcc::context * ctxt)12871debfc3dSmrg   pass_tail_calls (gcc::context *ctxt)
12881debfc3dSmrg     : gimple_opt_pass (pass_data_tail_calls, ctxt)
12891debfc3dSmrg   {}
12901debfc3dSmrg 
12911debfc3dSmrg   /* opt_pass methods: */
gate(function *)12921debfc3dSmrg   virtual bool gate (function *) { return gate_tail_calls (); }
execute(function *)12931debfc3dSmrg   virtual unsigned int execute (function *) { return execute_tail_calls (); }
12941debfc3dSmrg 
12951debfc3dSmrg }; // class pass_tail_calls
12961debfc3dSmrg 
12971debfc3dSmrg } // anon namespace
12981debfc3dSmrg 
12991debfc3dSmrg gimple_opt_pass *
make_pass_tail_calls(gcc::context * ctxt)13001debfc3dSmrg make_pass_tail_calls (gcc::context *ctxt)
13011debfc3dSmrg {
13021debfc3dSmrg   return new pass_tail_calls (ctxt);
13031debfc3dSmrg }
1304