xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-stdarg.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
11debfc3dSmrg /* Pass computing data for optimizing stdarg functions.
2*8feb0f0bSmrg    Copyright (C) 2004-2020 Free Software Foundation, Inc.
31debfc3dSmrg    Contributed by Jakub Jelinek <jakub@redhat.com>
41debfc3dSmrg 
51debfc3dSmrg This file is part of GCC.
61debfc3dSmrg 
71debfc3dSmrg GCC is free software; you can redistribute it and/or modify
81debfc3dSmrg it under the terms of the GNU General Public License as published by
91debfc3dSmrg the Free Software Foundation; either version 3, or (at your option)
101debfc3dSmrg any later version.
111debfc3dSmrg 
121debfc3dSmrg GCC is distributed in the hope that it will be useful,
131debfc3dSmrg but WITHOUT ANY WARRANTY; without even the implied warranty of
141debfc3dSmrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
151debfc3dSmrg GNU General Public License for more details.
161debfc3dSmrg 
171debfc3dSmrg You should have received a copy of the GNU General Public License
181debfc3dSmrg along with GCC; see the file COPYING3.  If not see
191debfc3dSmrg <http://www.gnu.org/licenses/>.  */
201debfc3dSmrg 
211debfc3dSmrg #include "config.h"
221debfc3dSmrg #include "system.h"
231debfc3dSmrg #include "coretypes.h"
241debfc3dSmrg #include "backend.h"
251debfc3dSmrg #include "target.h"
261debfc3dSmrg #include "tree.h"
271debfc3dSmrg #include "gimple.h"
281debfc3dSmrg #include "tree-pass.h"
291debfc3dSmrg #include "ssa.h"
301debfc3dSmrg #include "gimple-pretty-print.h"
311debfc3dSmrg #include "fold-const.h"
321debfc3dSmrg #include "langhooks.h"
331debfc3dSmrg #include "gimple-iterator.h"
341debfc3dSmrg #include "gimple-walk.h"
351debfc3dSmrg #include "gimplify.h"
361debfc3dSmrg #include "tree-into-ssa.h"
371debfc3dSmrg #include "tree-cfg.h"
381debfc3dSmrg #include "tree-stdarg.h"
391debfc3dSmrg 
401debfc3dSmrg /* A simple pass that attempts to optimize stdarg functions on architectures
411debfc3dSmrg    that need to save register arguments to stack on entry to stdarg functions.
421debfc3dSmrg    If the function doesn't use any va_start macros, no registers need to
431debfc3dSmrg    be saved.  If va_start macros are used, the va_list variables don't escape
441debfc3dSmrg    the function, it is only necessary to save registers that will be used
451debfc3dSmrg    in va_arg macros.  E.g. if va_arg is only used with integral types
461debfc3dSmrg    in the function, floating point registers don't need to be saved, etc.  */
471debfc3dSmrg 
481debfc3dSmrg 
491debfc3dSmrg /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
501debfc3dSmrg    is executed at most as many times as VA_START_BB.  */
511debfc3dSmrg 
521debfc3dSmrg static bool
reachable_at_most_once(basic_block va_arg_bb,basic_block va_start_bb)531debfc3dSmrg reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
541debfc3dSmrg {
551debfc3dSmrg   auto_vec<edge, 10> stack;
561debfc3dSmrg   edge e;
571debfc3dSmrg   edge_iterator ei;
581debfc3dSmrg   bool ret;
591debfc3dSmrg 
601debfc3dSmrg   if (va_arg_bb == va_start_bb)
611debfc3dSmrg     return true;
621debfc3dSmrg 
631debfc3dSmrg   if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
641debfc3dSmrg     return false;
651debfc3dSmrg 
661debfc3dSmrg   auto_sbitmap visited (last_basic_block_for_fn (cfun));
671debfc3dSmrg   bitmap_clear (visited);
681debfc3dSmrg   ret = true;
691debfc3dSmrg 
701debfc3dSmrg   FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
711debfc3dSmrg     stack.safe_push (e);
721debfc3dSmrg 
731debfc3dSmrg   while (! stack.is_empty ())
741debfc3dSmrg     {
751debfc3dSmrg       basic_block src;
761debfc3dSmrg 
771debfc3dSmrg       e = stack.pop ();
781debfc3dSmrg       src = e->src;
791debfc3dSmrg 
801debfc3dSmrg       if (e->flags & EDGE_COMPLEX)
811debfc3dSmrg 	{
821debfc3dSmrg 	  ret = false;
831debfc3dSmrg 	  break;
841debfc3dSmrg 	}
851debfc3dSmrg 
861debfc3dSmrg       if (src == va_start_bb)
871debfc3dSmrg 	continue;
881debfc3dSmrg 
891debfc3dSmrg       /* va_arg_bb can be executed more times than va_start_bb.  */
901debfc3dSmrg       if (src == va_arg_bb)
911debfc3dSmrg 	{
921debfc3dSmrg 	  ret = false;
931debfc3dSmrg 	  break;
941debfc3dSmrg 	}
951debfc3dSmrg 
961debfc3dSmrg       gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
971debfc3dSmrg 
981debfc3dSmrg       if (! bitmap_bit_p (visited, src->index))
991debfc3dSmrg 	{
1001debfc3dSmrg 	  bitmap_set_bit (visited, src->index);
1011debfc3dSmrg 	  FOR_EACH_EDGE (e, ei, src->preds)
1021debfc3dSmrg 	    stack.safe_push (e);
1031debfc3dSmrg 	}
1041debfc3dSmrg     }
1051debfc3dSmrg 
1061debfc3dSmrg   return ret;
1071debfc3dSmrg }
1081debfc3dSmrg 
1091debfc3dSmrg 
1101debfc3dSmrg /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
1111debfc3dSmrg    return constant, otherwise return HOST_WIDE_INT_M1U.
1121debfc3dSmrg    GPR_P is true if this is GPR counter.  */
1131debfc3dSmrg 
1141debfc3dSmrg static unsigned HOST_WIDE_INT
va_list_counter_bump(struct stdarg_info * si,tree counter,tree rhs,bool gpr_p)1151debfc3dSmrg va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
1161debfc3dSmrg 		      bool gpr_p)
1171debfc3dSmrg {
1181debfc3dSmrg   tree lhs, orig_lhs;
1191debfc3dSmrg   gimple *stmt;
1201debfc3dSmrg   unsigned HOST_WIDE_INT ret = 0, val, counter_val;
1211debfc3dSmrg   unsigned int max_size;
1221debfc3dSmrg 
1231debfc3dSmrg   if (si->offsets == NULL)
1241debfc3dSmrg     {
1251debfc3dSmrg       unsigned int i;
1261debfc3dSmrg 
1271debfc3dSmrg       si->offsets = XNEWVEC (int, num_ssa_names);
1281debfc3dSmrg       for (i = 0; i < num_ssa_names; ++i)
1291debfc3dSmrg 	si->offsets[i] = -1;
1301debfc3dSmrg     }
1311debfc3dSmrg 
1321debfc3dSmrg   counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
1331debfc3dSmrg   max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
1341debfc3dSmrg   orig_lhs = lhs = rhs;
1351debfc3dSmrg   while (lhs)
1361debfc3dSmrg     {
1371debfc3dSmrg       enum tree_code rhs_code;
1381debfc3dSmrg       tree rhs1;
1391debfc3dSmrg 
1401debfc3dSmrg       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
1411debfc3dSmrg 	{
1421debfc3dSmrg 	  if (counter_val >= max_size)
1431debfc3dSmrg 	    {
1441debfc3dSmrg 	      ret = max_size;
1451debfc3dSmrg 	      break;
1461debfc3dSmrg 	    }
1471debfc3dSmrg 
1481debfc3dSmrg 	  ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
1491debfc3dSmrg 	  break;
1501debfc3dSmrg 	}
1511debfc3dSmrg 
1521debfc3dSmrg       stmt = SSA_NAME_DEF_STMT (lhs);
1531debfc3dSmrg 
1541debfc3dSmrg       if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
1551debfc3dSmrg 	return HOST_WIDE_INT_M1U;
1561debfc3dSmrg 
1571debfc3dSmrg       rhs_code = gimple_assign_rhs_code (stmt);
1581debfc3dSmrg       rhs1 = gimple_assign_rhs1 (stmt);
1591debfc3dSmrg       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
1601debfc3dSmrg 	   || gimple_assign_cast_p (stmt))
1611debfc3dSmrg 	  && TREE_CODE (rhs1) == SSA_NAME)
1621debfc3dSmrg 	{
1631debfc3dSmrg 	  lhs = rhs1;
1641debfc3dSmrg 	  continue;
1651debfc3dSmrg 	}
1661debfc3dSmrg 
1671debfc3dSmrg       if ((rhs_code == POINTER_PLUS_EXPR
1681debfc3dSmrg 	   || rhs_code == PLUS_EXPR)
1691debfc3dSmrg 	  && TREE_CODE (rhs1) == SSA_NAME
1701debfc3dSmrg 	  && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
1711debfc3dSmrg 	{
1721debfc3dSmrg 	  ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
1731debfc3dSmrg 	  lhs = rhs1;
1741debfc3dSmrg 	  continue;
1751debfc3dSmrg 	}
1761debfc3dSmrg 
1771debfc3dSmrg       if (rhs_code == ADDR_EXPR
1781debfc3dSmrg 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
1791debfc3dSmrg 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
1801debfc3dSmrg 	  && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
1811debfc3dSmrg 	{
1821debfc3dSmrg 	  ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
1831debfc3dSmrg 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
1841debfc3dSmrg 	  continue;
1851debfc3dSmrg 	}
1861debfc3dSmrg 
1871debfc3dSmrg       if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
1881debfc3dSmrg 	return HOST_WIDE_INT_M1U;
1891debfc3dSmrg 
1901debfc3dSmrg       rhs = gimple_assign_rhs1 (stmt);
1911debfc3dSmrg       if (TREE_CODE (counter) != TREE_CODE (rhs))
1921debfc3dSmrg 	return HOST_WIDE_INT_M1U;
1931debfc3dSmrg 
1941debfc3dSmrg       if (TREE_CODE (counter) == COMPONENT_REF)
1951debfc3dSmrg 	{
1961debfc3dSmrg 	  if (get_base_address (counter) != get_base_address (rhs)
1971debfc3dSmrg 	      || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
1981debfc3dSmrg 	      || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
1991debfc3dSmrg 	    return HOST_WIDE_INT_M1U;
2001debfc3dSmrg 	}
2011debfc3dSmrg       else if (counter != rhs)
2021debfc3dSmrg 	return HOST_WIDE_INT_M1U;
2031debfc3dSmrg 
2041debfc3dSmrg       lhs = NULL;
2051debfc3dSmrg     }
2061debfc3dSmrg 
2071debfc3dSmrg   lhs = orig_lhs;
2081debfc3dSmrg   val = ret + counter_val;
2091debfc3dSmrg   while (lhs)
2101debfc3dSmrg     {
2111debfc3dSmrg       enum tree_code rhs_code;
2121debfc3dSmrg       tree rhs1;
2131debfc3dSmrg 
2141debfc3dSmrg       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
2151debfc3dSmrg 	break;
2161debfc3dSmrg 
2171debfc3dSmrg       if (val >= max_size)
2181debfc3dSmrg 	si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
2191debfc3dSmrg       else
2201debfc3dSmrg 	si->offsets[SSA_NAME_VERSION (lhs)] = val;
2211debfc3dSmrg 
2221debfc3dSmrg       stmt = SSA_NAME_DEF_STMT (lhs);
2231debfc3dSmrg 
2241debfc3dSmrg       rhs_code = gimple_assign_rhs_code (stmt);
2251debfc3dSmrg       rhs1 = gimple_assign_rhs1 (stmt);
2261debfc3dSmrg       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
2271debfc3dSmrg 	   || gimple_assign_cast_p (stmt))
2281debfc3dSmrg 	  && TREE_CODE (rhs1) == SSA_NAME)
2291debfc3dSmrg 	{
2301debfc3dSmrg 	  lhs = rhs1;
2311debfc3dSmrg 	  continue;
2321debfc3dSmrg 	}
2331debfc3dSmrg 
2341debfc3dSmrg       if ((rhs_code == POINTER_PLUS_EXPR
2351debfc3dSmrg 	   || rhs_code == PLUS_EXPR)
2361debfc3dSmrg 	  && TREE_CODE (rhs1) == SSA_NAME
2371debfc3dSmrg 	  && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
2381debfc3dSmrg 	{
2391debfc3dSmrg 	  val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
2401debfc3dSmrg 	  lhs = rhs1;
2411debfc3dSmrg 	  continue;
2421debfc3dSmrg 	}
2431debfc3dSmrg 
2441debfc3dSmrg       if (rhs_code == ADDR_EXPR
2451debfc3dSmrg 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
2461debfc3dSmrg 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
2471debfc3dSmrg 	  && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
2481debfc3dSmrg 	{
2491debfc3dSmrg 	  val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
2501debfc3dSmrg 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
2511debfc3dSmrg 	  continue;
2521debfc3dSmrg 	}
2531debfc3dSmrg 
2541debfc3dSmrg       lhs = NULL;
2551debfc3dSmrg     }
2561debfc3dSmrg 
2571debfc3dSmrg   return ret;
2581debfc3dSmrg }
2591debfc3dSmrg 
2601debfc3dSmrg 
2611debfc3dSmrg /* Called by walk_tree to look for references to va_list variables.  */
2621debfc3dSmrg 
2631debfc3dSmrg static tree
find_va_list_reference(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)2641debfc3dSmrg find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2651debfc3dSmrg 			void *data)
2661debfc3dSmrg {
2671debfc3dSmrg   bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
2681debfc3dSmrg   tree var = *tp;
2691debfc3dSmrg 
2701debfc3dSmrg   if (TREE_CODE (var) == SSA_NAME)
2711debfc3dSmrg     {
2721debfc3dSmrg       if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
2731debfc3dSmrg 	return var;
2741debfc3dSmrg     }
2751debfc3dSmrg   else if (VAR_P (var))
2761debfc3dSmrg     {
2771debfc3dSmrg       if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
2781debfc3dSmrg 	return var;
2791debfc3dSmrg     }
2801debfc3dSmrg 
2811debfc3dSmrg   return NULL_TREE;
2821debfc3dSmrg }
2831debfc3dSmrg 
2841debfc3dSmrg 
2851debfc3dSmrg /* Helper function of va_list_counter_struct_op.  Compute
2861debfc3dSmrg    cfun->va_list_{g,f}pr_size.  AP is a va_list GPR/FPR counter,
2871debfc3dSmrg    if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
2881debfc3dSmrg    statement.  GPR_P is true if AP is a GPR counter, false if it is
2891debfc3dSmrg    a FPR counter.  */
2901debfc3dSmrg 
2911debfc3dSmrg static void
va_list_counter_op(struct stdarg_info * si,tree ap,tree var,bool gpr_p,bool write_p)2921debfc3dSmrg va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
2931debfc3dSmrg 		    bool write_p)
2941debfc3dSmrg {
2951debfc3dSmrg   unsigned HOST_WIDE_INT increment;
2961debfc3dSmrg 
2971debfc3dSmrg   if (si->compute_sizes < 0)
2981debfc3dSmrg     {
2991debfc3dSmrg       si->compute_sizes = 0;
3001debfc3dSmrg       if (si->va_start_count == 1
3011debfc3dSmrg 	  && reachable_at_most_once (si->bb, si->va_start_bb))
3021debfc3dSmrg 	si->compute_sizes = 1;
3031debfc3dSmrg 
3041debfc3dSmrg       if (dump_file && (dump_flags & TDF_DETAILS))
3051debfc3dSmrg 	fprintf (dump_file,
3061debfc3dSmrg 		 "bb%d will %sbe executed at most once for each va_start "
3071debfc3dSmrg 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
3081debfc3dSmrg 		 si->va_start_bb->index);
3091debfc3dSmrg     }
3101debfc3dSmrg 
3111debfc3dSmrg   if (write_p
3121debfc3dSmrg       && si->compute_sizes
3131debfc3dSmrg       && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
3141debfc3dSmrg     {
3151debfc3dSmrg       if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
3161debfc3dSmrg 	{
3171debfc3dSmrg 	  cfun->va_list_gpr_size += increment;
3181debfc3dSmrg 	  return;
3191debfc3dSmrg 	}
3201debfc3dSmrg 
3211debfc3dSmrg       if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
3221debfc3dSmrg 	{
3231debfc3dSmrg 	  cfun->va_list_fpr_size += increment;
3241debfc3dSmrg 	  return;
3251debfc3dSmrg 	}
3261debfc3dSmrg     }
3271debfc3dSmrg 
3281debfc3dSmrg   if (write_p || !si->compute_sizes)
3291debfc3dSmrg     {
3301debfc3dSmrg       if (gpr_p)
3311debfc3dSmrg 	cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3321debfc3dSmrg       else
3331debfc3dSmrg 	cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3341debfc3dSmrg     }
3351debfc3dSmrg }
3361debfc3dSmrg 
3371debfc3dSmrg 
3381debfc3dSmrg /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
3391debfc3dSmrg    If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
3401debfc3dSmrg    is false, AP has been seen in VAR = AP assignment.
3411debfc3dSmrg    Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
3421debfc3dSmrg    va_arg operation that doesn't cause the va_list variable to escape
3431debfc3dSmrg    current function.  */
3441debfc3dSmrg 
3451debfc3dSmrg static bool
va_list_counter_struct_op(struct stdarg_info * si,tree ap,tree var,bool write_p)3461debfc3dSmrg va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
3471debfc3dSmrg 			   bool write_p)
3481debfc3dSmrg {
3491debfc3dSmrg   tree base;
3501debfc3dSmrg 
3511debfc3dSmrg   if (TREE_CODE (ap) != COMPONENT_REF
3521debfc3dSmrg       || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
3531debfc3dSmrg     return false;
3541debfc3dSmrg 
3551debfc3dSmrg   if (TREE_CODE (var) != SSA_NAME
3561debfc3dSmrg       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
3571debfc3dSmrg     return false;
3581debfc3dSmrg 
3591debfc3dSmrg   base = get_base_address (ap);
3601debfc3dSmrg   if (!VAR_P (base)
3611debfc3dSmrg       || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
3621debfc3dSmrg     return false;
3631debfc3dSmrg 
3641debfc3dSmrg   if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
3651debfc3dSmrg     va_list_counter_op (si, ap, var, true, write_p);
3661debfc3dSmrg   else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
3671debfc3dSmrg     va_list_counter_op (si, ap, var, false, write_p);
3681debfc3dSmrg 
3691debfc3dSmrg   return true;
3701debfc3dSmrg }
3711debfc3dSmrg 
3721debfc3dSmrg 
3731debfc3dSmrg /* Check for TEM = AP.  Return true if found and the caller shouldn't
3741debfc3dSmrg    search for va_list references in the statement.  */
3751debfc3dSmrg 
3761debfc3dSmrg static bool
va_list_ptr_read(struct stdarg_info * si,tree ap,tree tem)3771debfc3dSmrg va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
3781debfc3dSmrg {
3791debfc3dSmrg   if (!VAR_P (ap)
3801debfc3dSmrg       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
3811debfc3dSmrg     return false;
3821debfc3dSmrg 
3831debfc3dSmrg   if (TREE_CODE (tem) != SSA_NAME
3841debfc3dSmrg       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
3851debfc3dSmrg     return false;
3861debfc3dSmrg 
3871debfc3dSmrg   if (si->compute_sizes < 0)
3881debfc3dSmrg     {
3891debfc3dSmrg       si->compute_sizes = 0;
3901debfc3dSmrg       if (si->va_start_count == 1
3911debfc3dSmrg 	  && reachable_at_most_once (si->bb, si->va_start_bb))
3921debfc3dSmrg 	si->compute_sizes = 1;
3931debfc3dSmrg 
3941debfc3dSmrg       if (dump_file && (dump_flags & TDF_DETAILS))
3951debfc3dSmrg 	fprintf (dump_file,
3961debfc3dSmrg 		 "bb%d will %sbe executed at most once for each va_start "
3971debfc3dSmrg 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
3981debfc3dSmrg 		 si->va_start_bb->index);
3991debfc3dSmrg     }
4001debfc3dSmrg 
4011debfc3dSmrg   /* For void * or char * va_list types, there is just one counter.
4021debfc3dSmrg      If va_arg is used in a loop, we don't know how many registers need
4031debfc3dSmrg      saving.  */
4041debfc3dSmrg   if (! si->compute_sizes)
4051debfc3dSmrg     return false;
4061debfc3dSmrg 
4071debfc3dSmrg   if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
4081debfc3dSmrg     return false;
4091debfc3dSmrg 
4101debfc3dSmrg   /* Note the temporary, as we need to track whether it doesn't escape
4111debfc3dSmrg      the current function.  */
4121debfc3dSmrg   bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
4131debfc3dSmrg 
4141debfc3dSmrg   return true;
4151debfc3dSmrg }
4161debfc3dSmrg 
4171debfc3dSmrg 
4181debfc3dSmrg /* Check for:
4191debfc3dSmrg      tem1 = AP;
4201debfc3dSmrg      TEM2 = tem1 + CST;
4211debfc3dSmrg      AP = TEM2;
4221debfc3dSmrg    sequence and update cfun->va_list_gpr_size.  Return true if found.  */
4231debfc3dSmrg 
4241debfc3dSmrg static bool
va_list_ptr_write(struct stdarg_info * si,tree ap,tree tem2)4251debfc3dSmrg va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
4261debfc3dSmrg {
4271debfc3dSmrg   unsigned HOST_WIDE_INT increment;
4281debfc3dSmrg 
4291debfc3dSmrg   if (!VAR_P (ap)
4301debfc3dSmrg       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
4311debfc3dSmrg     return false;
4321debfc3dSmrg 
4331debfc3dSmrg   if (TREE_CODE (tem2) != SSA_NAME
4341debfc3dSmrg       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
4351debfc3dSmrg     return false;
4361debfc3dSmrg 
4371debfc3dSmrg   if (si->compute_sizes <= 0)
4381debfc3dSmrg     return false;
4391debfc3dSmrg 
4401debfc3dSmrg   increment = va_list_counter_bump (si, ap, tem2, true);
4411debfc3dSmrg   if (increment + 1 <= 1)
4421debfc3dSmrg     return false;
4431debfc3dSmrg 
4441debfc3dSmrg   if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
4451debfc3dSmrg     cfun->va_list_gpr_size += increment;
4461debfc3dSmrg   else
4471debfc3dSmrg     cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4481debfc3dSmrg 
4491debfc3dSmrg   return true;
4501debfc3dSmrg }
4511debfc3dSmrg 
4521debfc3dSmrg 
4531debfc3dSmrg /* If RHS is X, (some type *) X or X + CST for X a temporary variable
4541debfc3dSmrg    containing value of some va_list variable plus optionally some constant,
4551debfc3dSmrg    either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
4561debfc3dSmrg    depending whether LHS is a function local temporary.  */
4571debfc3dSmrg 
4581debfc3dSmrg static void
check_va_list_escapes(struct stdarg_info * si,tree lhs,tree rhs)4591debfc3dSmrg check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
4601debfc3dSmrg {
4611debfc3dSmrg   if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
4621debfc3dSmrg     return;
4631debfc3dSmrg 
4641debfc3dSmrg   if (TREE_CODE (rhs) == SSA_NAME)
4651debfc3dSmrg     {
4661debfc3dSmrg       if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
4671debfc3dSmrg 	return;
4681debfc3dSmrg     }
4691debfc3dSmrg   else if (TREE_CODE (rhs) == ADDR_EXPR
4701debfc3dSmrg 	   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
4711debfc3dSmrg 	   && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
4721debfc3dSmrg     {
4731debfc3dSmrg       tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
4741debfc3dSmrg       if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
4751debfc3dSmrg 	return;
4761debfc3dSmrg     }
4771debfc3dSmrg   else
4781debfc3dSmrg     return;
4791debfc3dSmrg 
4801debfc3dSmrg   if (TREE_CODE (lhs) != SSA_NAME)
4811debfc3dSmrg     {
4821debfc3dSmrg       si->va_list_escapes = true;
4831debfc3dSmrg       return;
4841debfc3dSmrg     }
4851debfc3dSmrg 
4861debfc3dSmrg   if (si->compute_sizes < 0)
4871debfc3dSmrg     {
4881debfc3dSmrg       si->compute_sizes = 0;
4891debfc3dSmrg       if (si->va_start_count == 1
4901debfc3dSmrg 	  && reachable_at_most_once (si->bb, si->va_start_bb))
4911debfc3dSmrg 	si->compute_sizes = 1;
4921debfc3dSmrg 
4931debfc3dSmrg       if (dump_file && (dump_flags & TDF_DETAILS))
4941debfc3dSmrg 	fprintf (dump_file,
4951debfc3dSmrg 		 "bb%d will %sbe executed at most once for each va_start "
4961debfc3dSmrg 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
4971debfc3dSmrg 		 si->va_start_bb->index);
4981debfc3dSmrg     }
4991debfc3dSmrg 
5001debfc3dSmrg   /* For void * or char * va_list types, there is just one counter.
5011debfc3dSmrg      If va_arg is used in a loop, we don't know how many registers need
5021debfc3dSmrg      saving.  */
5031debfc3dSmrg   if (! si->compute_sizes)
5041debfc3dSmrg     {
5051debfc3dSmrg       si->va_list_escapes = true;
5061debfc3dSmrg       return;
5071debfc3dSmrg     }
5081debfc3dSmrg 
5091debfc3dSmrg   if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
5101debfc3dSmrg       == HOST_WIDE_INT_M1U)
5111debfc3dSmrg     {
5121debfc3dSmrg       si->va_list_escapes = true;
5131debfc3dSmrg       return;
5141debfc3dSmrg     }
5151debfc3dSmrg 
5161debfc3dSmrg   bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
5171debfc3dSmrg }
5181debfc3dSmrg 
5191debfc3dSmrg 
5201debfc3dSmrg /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
5211debfc3dSmrg    Return true if va_list might be escaping.  */
5221debfc3dSmrg 
5231debfc3dSmrg static bool
check_all_va_list_escapes(struct stdarg_info * si)5241debfc3dSmrg check_all_va_list_escapes (struct stdarg_info *si)
5251debfc3dSmrg {
5261debfc3dSmrg   basic_block bb;
5271debfc3dSmrg 
5281debfc3dSmrg   FOR_EACH_BB_FN (bb, cfun)
5291debfc3dSmrg     {
5301debfc3dSmrg       for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
5311debfc3dSmrg 	   gsi_next (&i))
5321debfc3dSmrg 	{
5331debfc3dSmrg 	  tree lhs;
5341debfc3dSmrg 	  use_operand_p uop;
5351debfc3dSmrg 	  ssa_op_iter soi;
5361debfc3dSmrg 	  gphi *phi = i.phi ();
5371debfc3dSmrg 
5381debfc3dSmrg 	  lhs = PHI_RESULT (phi);
5391debfc3dSmrg 	  if (virtual_operand_p (lhs)
5401debfc3dSmrg 	      || bitmap_bit_p (si->va_list_escape_vars,
5411debfc3dSmrg 			       SSA_NAME_VERSION (lhs)))
5421debfc3dSmrg 	    continue;
5431debfc3dSmrg 
5441debfc3dSmrg 	  FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
5451debfc3dSmrg 	    {
5461debfc3dSmrg 	      tree rhs = USE_FROM_PTR (uop);
5471debfc3dSmrg 	      if (TREE_CODE (rhs) == SSA_NAME
5481debfc3dSmrg 		  && bitmap_bit_p (si->va_list_escape_vars,
5491debfc3dSmrg 				SSA_NAME_VERSION (rhs)))
5501debfc3dSmrg 		{
5511debfc3dSmrg 		  if (dump_file && (dump_flags & TDF_DETAILS))
5521debfc3dSmrg 		    {
5531debfc3dSmrg 		      fputs ("va_list escapes in ", dump_file);
5541debfc3dSmrg 		      print_gimple_stmt (dump_file, phi, 0, dump_flags);
5551debfc3dSmrg 		      fputc ('\n', dump_file);
5561debfc3dSmrg 		    }
5571debfc3dSmrg 		  return true;
5581debfc3dSmrg 		}
5591debfc3dSmrg 	    }
5601debfc3dSmrg 	}
5611debfc3dSmrg 
5621debfc3dSmrg       for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
5631debfc3dSmrg 	   gsi_next (&i))
5641debfc3dSmrg 	{
5651debfc3dSmrg 	  gimple *stmt = gsi_stmt (i);
5661debfc3dSmrg 	  tree use;
5671debfc3dSmrg 	  ssa_op_iter iter;
5681debfc3dSmrg 
5691debfc3dSmrg 	  if (is_gimple_debug (stmt))
5701debfc3dSmrg 	    continue;
5711debfc3dSmrg 
5721debfc3dSmrg 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
5731debfc3dSmrg 	    {
5741debfc3dSmrg 	      if (! bitmap_bit_p (si->va_list_escape_vars,
5751debfc3dSmrg 				  SSA_NAME_VERSION (use)))
5761debfc3dSmrg 		continue;
5771debfc3dSmrg 
5781debfc3dSmrg 	      if (is_gimple_assign (stmt))
5791debfc3dSmrg 		{
5801debfc3dSmrg 		  tree rhs = gimple_assign_rhs1 (stmt);
5811debfc3dSmrg 		  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
5821debfc3dSmrg 
5831debfc3dSmrg 		  /* x = *ap_temp;  */
5841debfc3dSmrg 		  if (rhs_code == MEM_REF
5851debfc3dSmrg 		      && TREE_OPERAND (rhs, 0) == use
5861debfc3dSmrg 		      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
5871debfc3dSmrg 		      && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
5881debfc3dSmrg 		      && si->offsets[SSA_NAME_VERSION (use)] != -1)
5891debfc3dSmrg 		    {
5901debfc3dSmrg 		      unsigned HOST_WIDE_INT gpr_size;
5911debfc3dSmrg 		      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
5921debfc3dSmrg 
5931debfc3dSmrg 		      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
5941debfc3dSmrg 			  	 + tree_to_shwi (TREE_OPERAND (rhs, 1))
5951debfc3dSmrg 				 + tree_to_uhwi (access_size);
5961debfc3dSmrg 		      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
5971debfc3dSmrg 			cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
5981debfc3dSmrg 		      else if (gpr_size > cfun->va_list_gpr_size)
5991debfc3dSmrg 			cfun->va_list_gpr_size = gpr_size;
6001debfc3dSmrg 		      continue;
6011debfc3dSmrg 		    }
6021debfc3dSmrg 
6031debfc3dSmrg 		  /* va_arg sequences may contain
6041debfc3dSmrg 		     other_ap_temp = ap_temp;
6051debfc3dSmrg 		     other_ap_temp = ap_temp + constant;
6061debfc3dSmrg 		     other_ap_temp = (some_type *) ap_temp;
6071debfc3dSmrg 		     ap = ap_temp;
6081debfc3dSmrg 		     statements.  */
6091debfc3dSmrg 		  if (rhs == use
6101debfc3dSmrg 		      && ((rhs_code == POINTER_PLUS_EXPR
6111debfc3dSmrg 			   && (TREE_CODE (gimple_assign_rhs2 (stmt))
6121debfc3dSmrg 			       == INTEGER_CST))
6131debfc3dSmrg 			  || gimple_assign_cast_p (stmt)
6141debfc3dSmrg 			  || (get_gimple_rhs_class (rhs_code)
6151debfc3dSmrg 			      == GIMPLE_SINGLE_RHS)))
6161debfc3dSmrg 		    {
6171debfc3dSmrg 		      tree lhs = gimple_assign_lhs (stmt);
6181debfc3dSmrg 
6191debfc3dSmrg 		      if (TREE_CODE (lhs) == SSA_NAME
6201debfc3dSmrg 			  && bitmap_bit_p (si->va_list_escape_vars,
6211debfc3dSmrg 					   SSA_NAME_VERSION (lhs)))
6221debfc3dSmrg 			continue;
6231debfc3dSmrg 
6241debfc3dSmrg 		      if (VAR_P (lhs)
6251debfc3dSmrg 			  && bitmap_bit_p (si->va_list_vars,
6261debfc3dSmrg 					   DECL_UID (lhs) + num_ssa_names))
6271debfc3dSmrg 			continue;
6281debfc3dSmrg 		    }
6291debfc3dSmrg 		  else if (rhs_code == ADDR_EXPR
6301debfc3dSmrg 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
6311debfc3dSmrg 			   && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
6321debfc3dSmrg 		    {
6331debfc3dSmrg 		      tree lhs = gimple_assign_lhs (stmt);
6341debfc3dSmrg 
6351debfc3dSmrg 		      if (bitmap_bit_p (si->va_list_escape_vars,
6361debfc3dSmrg 					SSA_NAME_VERSION (lhs)))
6371debfc3dSmrg 			continue;
6381debfc3dSmrg 		    }
6391debfc3dSmrg 		}
6401debfc3dSmrg 
6411debfc3dSmrg 	      if (dump_file && (dump_flags & TDF_DETAILS))
6421debfc3dSmrg 		{
6431debfc3dSmrg 		  fputs ("va_list escapes in ", dump_file);
6441debfc3dSmrg 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6451debfc3dSmrg 		  fputc ('\n', dump_file);
6461debfc3dSmrg 		}
6471debfc3dSmrg 	      return true;
6481debfc3dSmrg 	    }
6491debfc3dSmrg 	}
6501debfc3dSmrg     }
6511debfc3dSmrg 
6521debfc3dSmrg   return false;
6531debfc3dSmrg }
6541debfc3dSmrg 
6551debfc3dSmrg /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size.  */
6561debfc3dSmrg 
6571debfc3dSmrg static void
optimize_va_list_gpr_fpr_size(function * fun)6581debfc3dSmrg optimize_va_list_gpr_fpr_size (function *fun)
6591debfc3dSmrg {
6601debfc3dSmrg   basic_block bb;
6611debfc3dSmrg   bool va_list_escapes = false;
6621debfc3dSmrg   bool va_list_simple_ptr;
6631debfc3dSmrg   struct stdarg_info si;
6641debfc3dSmrg   struct walk_stmt_info wi;
6651debfc3dSmrg   const char *funcname = NULL;
6661debfc3dSmrg   tree cfun_va_list;
6671debfc3dSmrg 
6681debfc3dSmrg   fun->va_list_gpr_size = 0;
6691debfc3dSmrg   fun->va_list_fpr_size = 0;
6701debfc3dSmrg   memset (&si, 0, sizeof (si));
6711debfc3dSmrg   si.va_list_vars = BITMAP_ALLOC (NULL);
6721debfc3dSmrg   si.va_list_escape_vars = BITMAP_ALLOC (NULL);
6731debfc3dSmrg 
6741debfc3dSmrg   if (dump_file)
6751debfc3dSmrg     funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
6761debfc3dSmrg 
6771debfc3dSmrg   cfun_va_list = targetm.fn_abi_va_list (fun->decl);
6781debfc3dSmrg   va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
6791debfc3dSmrg 		       && (TREE_TYPE (cfun_va_list) == void_type_node
6801debfc3dSmrg 			   || TREE_TYPE (cfun_va_list) == char_type_node);
6811debfc3dSmrg   gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
6821debfc3dSmrg 
6831debfc3dSmrg   FOR_EACH_BB_FN (bb, fun)
6841debfc3dSmrg     {
6851debfc3dSmrg       gimple_stmt_iterator i;
6861debfc3dSmrg 
6871debfc3dSmrg       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
6881debfc3dSmrg 	{
6891debfc3dSmrg 	  gimple *stmt = gsi_stmt (i);
6901debfc3dSmrg 	  tree callee, ap;
6911debfc3dSmrg 
6921debfc3dSmrg 	  if (!is_gimple_call (stmt))
6931debfc3dSmrg 	    continue;
6941debfc3dSmrg 
6951debfc3dSmrg 	  callee = gimple_call_fndecl (stmt);
6961debfc3dSmrg 	  if (!callee
697c0a68be4Smrg 	      || !fndecl_built_in_p (callee, BUILT_IN_NORMAL))
6981debfc3dSmrg 	    continue;
6991debfc3dSmrg 
7001debfc3dSmrg 	  switch (DECL_FUNCTION_CODE (callee))
7011debfc3dSmrg 	    {
7021debfc3dSmrg 	    case BUILT_IN_VA_START:
7031debfc3dSmrg 	      break;
7041debfc3dSmrg 	      /* If old style builtins are used, don't optimize anything.  */
7051debfc3dSmrg 	    case BUILT_IN_SAVEREGS:
7061debfc3dSmrg 	    case BUILT_IN_NEXT_ARG:
7071debfc3dSmrg 	      va_list_escapes = true;
7081debfc3dSmrg 	      continue;
7091debfc3dSmrg 	    default:
7101debfc3dSmrg 	      continue;
7111debfc3dSmrg 	    }
7121debfc3dSmrg 
7131debfc3dSmrg 	  si.va_start_count++;
7141debfc3dSmrg 	  ap = gimple_call_arg (stmt, 0);
7151debfc3dSmrg 
7161debfc3dSmrg 	  if (TREE_CODE (ap) != ADDR_EXPR)
7171debfc3dSmrg 	    {
7181debfc3dSmrg 	      va_list_escapes = true;
7191debfc3dSmrg 	      break;
7201debfc3dSmrg 	    }
7211debfc3dSmrg 	  ap = TREE_OPERAND (ap, 0);
7221debfc3dSmrg 	  if (TREE_CODE (ap) == ARRAY_REF)
7231debfc3dSmrg 	    {
7241debfc3dSmrg 	      if (! integer_zerop (TREE_OPERAND (ap, 1)))
7251debfc3dSmrg 	        {
7261debfc3dSmrg 	          va_list_escapes = true;
7271debfc3dSmrg 	          break;
7281debfc3dSmrg 		}
7291debfc3dSmrg 	      ap = TREE_OPERAND (ap, 0);
7301debfc3dSmrg 	    }
7311debfc3dSmrg 	  if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
7321debfc3dSmrg 	      != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
7331debfc3dSmrg 	      || !VAR_P (ap))
7341debfc3dSmrg 	    {
7351debfc3dSmrg 	      va_list_escapes = true;
7361debfc3dSmrg 	      break;
7371debfc3dSmrg 	    }
7381debfc3dSmrg 
7391debfc3dSmrg 	  if (is_global_var (ap))
7401debfc3dSmrg 	    {
7411debfc3dSmrg 	      va_list_escapes = true;
7421debfc3dSmrg 	      break;
7431debfc3dSmrg 	    }
7441debfc3dSmrg 
7451debfc3dSmrg 	  bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
7461debfc3dSmrg 
7471debfc3dSmrg 	  /* VA_START_BB and VA_START_AP will be only used if there is just
7481debfc3dSmrg 	     one va_start in the function.  */
7491debfc3dSmrg 	  si.va_start_bb = bb;
7501debfc3dSmrg 	  si.va_start_ap = ap;
7511debfc3dSmrg 	}
7521debfc3dSmrg 
7531debfc3dSmrg       if (va_list_escapes)
7541debfc3dSmrg 	break;
7551debfc3dSmrg     }
7561debfc3dSmrg 
7571debfc3dSmrg   /* If there were no va_start uses in the function, there is no need to
7581debfc3dSmrg      save anything.  */
7591debfc3dSmrg   if (si.va_start_count == 0)
7601debfc3dSmrg     goto finish;
7611debfc3dSmrg 
7621debfc3dSmrg   /* If some va_list arguments weren't local, we can't optimize.  */
7631debfc3dSmrg   if (va_list_escapes)
7641debfc3dSmrg     goto finish;
7651debfc3dSmrg 
7661debfc3dSmrg   /* For void * or char * va_list, something useful can be done only
7671debfc3dSmrg      if there is just one va_start.  */
7681debfc3dSmrg   if (va_list_simple_ptr && si.va_start_count > 1)
7691debfc3dSmrg     {
7701debfc3dSmrg       va_list_escapes = true;
7711debfc3dSmrg       goto finish;
7721debfc3dSmrg     }
7731debfc3dSmrg 
7741debfc3dSmrg   /* For struct * va_list, if the backend didn't tell us what the counter fields
7751debfc3dSmrg      are, there is nothing more we can do.  */
7761debfc3dSmrg   if (!va_list_simple_ptr
7771debfc3dSmrg       && va_list_gpr_counter_field == NULL_TREE
7781debfc3dSmrg       && va_list_fpr_counter_field == NULL_TREE)
7791debfc3dSmrg     {
7801debfc3dSmrg       va_list_escapes = true;
7811debfc3dSmrg       goto finish;
7821debfc3dSmrg     }
7831debfc3dSmrg 
7841debfc3dSmrg   /* For void * or char * va_list there is just one counter
7851debfc3dSmrg      (va_list itself).  Use VA_LIST_GPR_SIZE for it.  */
7861debfc3dSmrg   if (va_list_simple_ptr)
7871debfc3dSmrg     fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
7881debfc3dSmrg 
7891debfc3dSmrg   calculate_dominance_info (CDI_DOMINATORS);
7901debfc3dSmrg   memset (&wi, 0, sizeof (wi));
7911debfc3dSmrg   wi.info = si.va_list_vars;
7921debfc3dSmrg 
7931debfc3dSmrg   FOR_EACH_BB_FN (bb, fun)
7941debfc3dSmrg     {
7951debfc3dSmrg       si.compute_sizes = -1;
7961debfc3dSmrg       si.bb = bb;
7971debfc3dSmrg 
7981debfc3dSmrg       /* For va_list_simple_ptr, we have to check PHI nodes too.  We treat
7991debfc3dSmrg 	 them as assignments for the purpose of escape analysis.  This is
8001debfc3dSmrg 	 not needed for non-simple va_list because virtual phis don't perform
8011debfc3dSmrg 	 any real data movement.  Also, check PHI nodes for taking address of
8021debfc3dSmrg 	 the va_list vars.  */
8031debfc3dSmrg       tree lhs, rhs;
8041debfc3dSmrg       use_operand_p uop;
8051debfc3dSmrg       ssa_op_iter soi;
8061debfc3dSmrg 
8071debfc3dSmrg       for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
8081debfc3dSmrg 	   gsi_next (&i))
8091debfc3dSmrg 	{
8101debfc3dSmrg 	  gphi *phi = i.phi ();
8111debfc3dSmrg 	  lhs = PHI_RESULT (phi);
8121debfc3dSmrg 
8131debfc3dSmrg 	  if (virtual_operand_p (lhs))
8141debfc3dSmrg 	    continue;
8151debfc3dSmrg 
8161debfc3dSmrg 	  if (va_list_simple_ptr)
8171debfc3dSmrg 	    {
8181debfc3dSmrg 	      FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
8191debfc3dSmrg 		{
8201debfc3dSmrg 		  rhs = USE_FROM_PTR (uop);
8211debfc3dSmrg 		  if (va_list_ptr_read (&si, rhs, lhs))
8221debfc3dSmrg 		    continue;
8231debfc3dSmrg 		  else if (va_list_ptr_write (&si, lhs, rhs))
8241debfc3dSmrg 		    continue;
8251debfc3dSmrg 		  else
8261debfc3dSmrg 		    check_va_list_escapes (&si, lhs, rhs);
8271debfc3dSmrg 
8281debfc3dSmrg 		  if (si.va_list_escapes)
8291debfc3dSmrg 		    {
8301debfc3dSmrg 		      if (dump_file && (dump_flags & TDF_DETAILS))
8311debfc3dSmrg 			{
8321debfc3dSmrg 			  fputs ("va_list escapes in ", dump_file);
8331debfc3dSmrg 			  print_gimple_stmt (dump_file, phi, 0, dump_flags);
8341debfc3dSmrg 			  fputc ('\n', dump_file);
8351debfc3dSmrg 			}
8361debfc3dSmrg 		      va_list_escapes = true;
8371debfc3dSmrg 		    }
8381debfc3dSmrg 		}
8391debfc3dSmrg 	    }
8401debfc3dSmrg 
8411debfc3dSmrg 	  for (unsigned j = 0; !va_list_escapes
8421debfc3dSmrg 			       && j < gimple_phi_num_args (phi); ++j)
8431debfc3dSmrg 	    if ((!va_list_simple_ptr
8441debfc3dSmrg 		 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
8451debfc3dSmrg 		&& walk_tree (gimple_phi_arg_def_ptr (phi, j),
8461debfc3dSmrg 			      find_va_list_reference, &wi, NULL))
8471debfc3dSmrg 	      {
8481debfc3dSmrg 		if (dump_file && (dump_flags & TDF_DETAILS))
8491debfc3dSmrg 		  {
8501debfc3dSmrg 		    fputs ("va_list escapes in ", dump_file);
8511debfc3dSmrg 		    print_gimple_stmt (dump_file, phi, 0, dump_flags);
8521debfc3dSmrg 		    fputc ('\n', dump_file);
8531debfc3dSmrg 		  }
8541debfc3dSmrg 		va_list_escapes = true;
8551debfc3dSmrg 	      }
8561debfc3dSmrg 	}
8571debfc3dSmrg 
8581debfc3dSmrg       for (gimple_stmt_iterator i = gsi_start_bb (bb);
8591debfc3dSmrg 	   !gsi_end_p (i) && !va_list_escapes;
8601debfc3dSmrg 	   gsi_next (&i))
8611debfc3dSmrg 	{
8621debfc3dSmrg 	  gimple *stmt = gsi_stmt (i);
8631debfc3dSmrg 
8641debfc3dSmrg 	  /* Don't look at __builtin_va_{start,end}, they are ok.  */
8651debfc3dSmrg 	  if (is_gimple_call (stmt))
8661debfc3dSmrg 	    {
8671debfc3dSmrg 	      tree callee = gimple_call_fndecl (stmt);
8681debfc3dSmrg 
8691debfc3dSmrg 	      if (callee
870c0a68be4Smrg 		  && (fndecl_built_in_p (callee, BUILT_IN_VA_START)
871c0a68be4Smrg 		      || fndecl_built_in_p (callee, BUILT_IN_VA_END)))
8721debfc3dSmrg 		continue;
8731debfc3dSmrg 	    }
8741debfc3dSmrg 
8751debfc3dSmrg 	  if (is_gimple_assign (stmt))
8761debfc3dSmrg 	    {
8771debfc3dSmrg 	      lhs = gimple_assign_lhs (stmt);
8781debfc3dSmrg 	      rhs = gimple_assign_rhs1 (stmt);
8791debfc3dSmrg 
8801debfc3dSmrg 	      if (va_list_simple_ptr)
8811debfc3dSmrg 		{
8821debfc3dSmrg 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
8831debfc3dSmrg 		      == GIMPLE_SINGLE_RHS)
8841debfc3dSmrg 		    {
8851debfc3dSmrg 		      /* Check for ap ={v} {}.  */
8861debfc3dSmrg 		      if (TREE_CLOBBER_P (rhs))
8871debfc3dSmrg 			continue;
8881debfc3dSmrg 
8891debfc3dSmrg 		      /* Check for tem = ap.  */
8901debfc3dSmrg 		      else if (va_list_ptr_read (&si, rhs, lhs))
8911debfc3dSmrg 			continue;
8921debfc3dSmrg 
8931debfc3dSmrg 		      /* Check for the last insn in:
8941debfc3dSmrg 			 tem1 = ap;
8951debfc3dSmrg 			 tem2 = tem1 + CST;
8961debfc3dSmrg 			 ap = tem2;
8971debfc3dSmrg 			 sequence.  */
8981debfc3dSmrg 		      else if (va_list_ptr_write (&si, lhs, rhs))
8991debfc3dSmrg 			continue;
9001debfc3dSmrg 		    }
9011debfc3dSmrg 
9021debfc3dSmrg 		  if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
9031debfc3dSmrg 		       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
9041debfc3dSmrg 		      || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
9051debfc3dSmrg 		      || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
9061debfc3dSmrg 			  == GIMPLE_SINGLE_RHS))
9071debfc3dSmrg 		    check_va_list_escapes (&si, lhs, rhs);
9081debfc3dSmrg 		}
9091debfc3dSmrg 	      else
9101debfc3dSmrg 		{
9111debfc3dSmrg 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
9121debfc3dSmrg 		      == GIMPLE_SINGLE_RHS)
9131debfc3dSmrg 		    {
9141debfc3dSmrg 		      /* Check for ap ={v} {}.  */
9151debfc3dSmrg 		      if (TREE_CLOBBER_P (rhs))
9161debfc3dSmrg 			continue;
9171debfc3dSmrg 
9181debfc3dSmrg 		      /* Check for ap[0].field = temp.  */
9191debfc3dSmrg 		      else if (va_list_counter_struct_op (&si, lhs, rhs, true))
9201debfc3dSmrg 			continue;
9211debfc3dSmrg 
9221debfc3dSmrg 		      /* Check for temp = ap[0].field.  */
9231debfc3dSmrg 		      else if (va_list_counter_struct_op (&si, rhs, lhs,
9241debfc3dSmrg 							  false))
9251debfc3dSmrg 			continue;
9261debfc3dSmrg 		    }
9271debfc3dSmrg 
9281debfc3dSmrg 		  /* Do any architecture specific checking.  */
9291debfc3dSmrg 		  if (targetm.stdarg_optimize_hook
9301debfc3dSmrg 		      && targetm.stdarg_optimize_hook (&si, stmt))
9311debfc3dSmrg 		    continue;
9321debfc3dSmrg 		}
9331debfc3dSmrg 	    }
9341debfc3dSmrg 	  else if (is_gimple_debug (stmt))
9351debfc3dSmrg 	    continue;
9361debfc3dSmrg 
9371debfc3dSmrg 	  /* All other uses of va_list are either va_copy (that is not handled
9381debfc3dSmrg 	     in this optimization), taking address of va_list variable or
9391debfc3dSmrg 	     passing va_list to other functions (in that case va_list might
9401debfc3dSmrg 	     escape the function and therefore va_start needs to set it up
9411debfc3dSmrg 	     fully), or some unexpected use of va_list.  None of these should
9421debfc3dSmrg 	     happen in a gimplified VA_ARG_EXPR.  */
9431debfc3dSmrg 	  if (si.va_list_escapes
9441debfc3dSmrg 	      || walk_gimple_op (stmt, find_va_list_reference, &wi))
9451debfc3dSmrg 	    {
9461debfc3dSmrg 	      if (dump_file && (dump_flags & TDF_DETAILS))
9471debfc3dSmrg 		{
9481debfc3dSmrg 		  fputs ("va_list escapes in ", dump_file);
9491debfc3dSmrg 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
9501debfc3dSmrg 		  fputc ('\n', dump_file);
9511debfc3dSmrg 		}
9521debfc3dSmrg 	      va_list_escapes = true;
9531debfc3dSmrg 	    }
9541debfc3dSmrg 	}
9551debfc3dSmrg 
9561debfc3dSmrg       if (va_list_escapes)
9571debfc3dSmrg 	break;
9581debfc3dSmrg     }
9591debfc3dSmrg 
9601debfc3dSmrg   if (! va_list_escapes
9611debfc3dSmrg       && va_list_simple_ptr
9621debfc3dSmrg       && ! bitmap_empty_p (si.va_list_escape_vars)
9631debfc3dSmrg       && check_all_va_list_escapes (&si))
9641debfc3dSmrg     va_list_escapes = true;
9651debfc3dSmrg 
9661debfc3dSmrg finish:
9671debfc3dSmrg   if (va_list_escapes)
9681debfc3dSmrg     {
9691debfc3dSmrg       fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
9701debfc3dSmrg       fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
9711debfc3dSmrg     }
9721debfc3dSmrg   BITMAP_FREE (si.va_list_vars);
9731debfc3dSmrg   BITMAP_FREE (si.va_list_escape_vars);
9741debfc3dSmrg   free (si.offsets);
9751debfc3dSmrg   if (dump_file)
9761debfc3dSmrg     {
9771debfc3dSmrg       fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
9781debfc3dSmrg 	       funcname, (int) va_list_escapes);
9791debfc3dSmrg       if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
9801debfc3dSmrg 	fputs ("all", dump_file);
9811debfc3dSmrg       else
9821debfc3dSmrg 	fprintf (dump_file, "%d", cfun->va_list_gpr_size);
9831debfc3dSmrg       fputs (" GPR units and ", dump_file);
9841debfc3dSmrg       if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
9851debfc3dSmrg 	fputs ("all", dump_file);
9861debfc3dSmrg       else
9871debfc3dSmrg 	fprintf (dump_file, "%d", cfun->va_list_fpr_size);
9881debfc3dSmrg       fputs (" FPR units.\n", dump_file);
9891debfc3dSmrg     }
9901debfc3dSmrg }
9911debfc3dSmrg 
9921debfc3dSmrg /* Expand IFN_VA_ARGs in FUN.  */
9931debfc3dSmrg 
9941debfc3dSmrg static void
expand_ifn_va_arg_1(function * fun)9951debfc3dSmrg expand_ifn_va_arg_1 (function *fun)
9961debfc3dSmrg {
9971debfc3dSmrg   bool modified = false;
9981debfc3dSmrg   basic_block bb;
9991debfc3dSmrg   gimple_stmt_iterator i;
10001debfc3dSmrg   location_t saved_location;
10011debfc3dSmrg 
10021debfc3dSmrg   FOR_EACH_BB_FN (bb, fun)
10031debfc3dSmrg     for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
10041debfc3dSmrg       {
10051debfc3dSmrg 	gimple *stmt = gsi_stmt (i);
10061debfc3dSmrg 	tree ap, aptype, expr, lhs, type;
10071debfc3dSmrg 	gimple_seq pre = NULL, post = NULL;
10081debfc3dSmrg 
10091debfc3dSmrg 	if (!gimple_call_internal_p (stmt, IFN_VA_ARG))
10101debfc3dSmrg 	  continue;
10111debfc3dSmrg 
10121debfc3dSmrg 	modified = true;
10131debfc3dSmrg 
10141debfc3dSmrg 	type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
10151debfc3dSmrg 	ap = gimple_call_arg (stmt, 0);
10161debfc3dSmrg 	aptype = TREE_TYPE (gimple_call_arg (stmt, 2));
10171debfc3dSmrg 	gcc_assert (POINTER_TYPE_P (aptype));
10181debfc3dSmrg 
10191debfc3dSmrg 	/* Balanced out the &ap, usually added by build_va_arg.  */
10201debfc3dSmrg 	ap = build2 (MEM_REF, TREE_TYPE (aptype), ap,
10211debfc3dSmrg 		     build_int_cst (aptype, 0));
10221debfc3dSmrg 
10231debfc3dSmrg 	push_gimplify_context (false);
10241debfc3dSmrg 	saved_location = input_location;
10251debfc3dSmrg 	input_location = gimple_location (stmt);
10261debfc3dSmrg 
10271debfc3dSmrg 	/* Make it easier for the backends by protecting the valist argument
10281debfc3dSmrg 	   from multiple evaluations.  */
10291debfc3dSmrg 	gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
10301debfc3dSmrg 
10311debfc3dSmrg 	expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
10321debfc3dSmrg 
10331debfc3dSmrg 	lhs = gimple_call_lhs (stmt);
10341debfc3dSmrg 	if (lhs != NULL_TREE)
10351debfc3dSmrg 	  {
10361debfc3dSmrg 	    unsigned int nargs = gimple_call_num_args (stmt);
10371debfc3dSmrg 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
10381debfc3dSmrg 
10391debfc3dSmrg 	    if (nargs == 4)
10401debfc3dSmrg 	      {
10411debfc3dSmrg 		/* We've transported the size of with WITH_SIZE_EXPR here as
10421debfc3dSmrg 		   the last argument of the internal fn call.  Now reinstate
10431debfc3dSmrg 		   it.  */
10441debfc3dSmrg 		tree size = gimple_call_arg (stmt, nargs - 1);
10451debfc3dSmrg 		expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
10461debfc3dSmrg 	      }
10471debfc3dSmrg 
10481debfc3dSmrg 	    /* We use gimplify_assign here, rather than gimple_build_assign,
10491debfc3dSmrg 	       because gimple_assign knows how to deal with variable-sized
10501debfc3dSmrg 	       types.  */
10511debfc3dSmrg 	    gimplify_assign (lhs, expr, &pre);
10521debfc3dSmrg 	  }
10531debfc3dSmrg 	else
10541debfc3dSmrg 	  gimplify_and_add (expr, &pre);
10551debfc3dSmrg 
10561debfc3dSmrg 	input_location = saved_location;
10571debfc3dSmrg 	pop_gimplify_context (NULL);
10581debfc3dSmrg 
10591debfc3dSmrg 	gimple_seq_add_seq (&pre, post);
10601debfc3dSmrg 	update_modified_stmts (pre);
10611debfc3dSmrg 
10621debfc3dSmrg 	/* Add the sequence after IFN_VA_ARG.  This splits the bb right
10631debfc3dSmrg 	   after IFN_VA_ARG, and adds the sequence in one or more new bbs
10641debfc3dSmrg 	   inbetween.  */
10651debfc3dSmrg 	gimple_find_sub_bbs (pre, &i);
10661debfc3dSmrg 
10671debfc3dSmrg 	/* Remove the IFN_VA_ARG gimple_call.  It's the last stmt in the
10681debfc3dSmrg 	   bb.  */
10691debfc3dSmrg 	unlink_stmt_vdef (stmt);
10701debfc3dSmrg 	release_ssa_name_fn (fun, gimple_vdef (stmt));
10711debfc3dSmrg 	gsi_remove (&i, true);
10721debfc3dSmrg 	gcc_assert (gsi_end_p (i));
10731debfc3dSmrg 
10741debfc3dSmrg 	/* We're walking here into the bbs which contain the expansion of
10751debfc3dSmrg 	   IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
10761debfc3dSmrg 	   expanding.  We could try to skip walking these bbs, perhaps by
10771debfc3dSmrg 	   walking backwards over gimples and bbs.  */
10781debfc3dSmrg 	break;
10791debfc3dSmrg       }
10801debfc3dSmrg 
10811debfc3dSmrg   if (!modified)
10821debfc3dSmrg     return;
10831debfc3dSmrg 
10841debfc3dSmrg   free_dominance_info (CDI_DOMINATORS);
10851debfc3dSmrg   update_ssa (TODO_update_ssa);
10861debfc3dSmrg }
10871debfc3dSmrg 
10881debfc3dSmrg /* Expand IFN_VA_ARGs in FUN, if necessary.  */
10891debfc3dSmrg 
10901debfc3dSmrg static void
expand_ifn_va_arg(function * fun)10911debfc3dSmrg expand_ifn_va_arg (function *fun)
10921debfc3dSmrg {
10931debfc3dSmrg   if ((fun->curr_properties & PROP_gimple_lva) == 0)
10941debfc3dSmrg     expand_ifn_va_arg_1 (fun);
10951debfc3dSmrg 
10961debfc3dSmrg   if (flag_checking)
10971debfc3dSmrg     {
10981debfc3dSmrg       basic_block bb;
10991debfc3dSmrg       gimple_stmt_iterator i;
11001debfc3dSmrg       FOR_EACH_BB_FN (bb, fun)
11011debfc3dSmrg 	for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
11021debfc3dSmrg 	  gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG));
11031debfc3dSmrg     }
11041debfc3dSmrg }
11051debfc3dSmrg 
11061debfc3dSmrg namespace {
11071debfc3dSmrg 
11081debfc3dSmrg const pass_data pass_data_stdarg =
11091debfc3dSmrg {
11101debfc3dSmrg   GIMPLE_PASS, /* type */
11111debfc3dSmrg   "stdarg", /* name */
11121debfc3dSmrg   OPTGROUP_NONE, /* optinfo_flags */
11131debfc3dSmrg   TV_NONE, /* tv_id */
11141debfc3dSmrg   ( PROP_cfg | PROP_ssa ), /* properties_required */
11151debfc3dSmrg   PROP_gimple_lva, /* properties_provided */
11161debfc3dSmrg   0, /* properties_destroyed */
11171debfc3dSmrg   0, /* todo_flags_start */
11181debfc3dSmrg   0, /* todo_flags_finish */
11191debfc3dSmrg };
11201debfc3dSmrg 
11211debfc3dSmrg class pass_stdarg : public gimple_opt_pass
11221debfc3dSmrg {
11231debfc3dSmrg public:
pass_stdarg(gcc::context * ctxt)11241debfc3dSmrg   pass_stdarg (gcc::context *ctxt)
11251debfc3dSmrg     : gimple_opt_pass (pass_data_stdarg, ctxt)
11261debfc3dSmrg   {}
11271debfc3dSmrg 
11281debfc3dSmrg   /* opt_pass methods: */
gate(function *)11291debfc3dSmrg   virtual bool gate (function *)
11301debfc3dSmrg     {
11311debfc3dSmrg       /* Always run this pass, in order to expand va_arg internal_fns.  We
11321debfc3dSmrg 	 also need to do that if fun->stdarg == 0, because a va_arg may also
11331debfc3dSmrg 	 occur in a function without varargs, f.i. if when passing a va_list to
11341debfc3dSmrg 	 another function.  */
11351debfc3dSmrg       return true;
11361debfc3dSmrg     }
11371debfc3dSmrg 
11381debfc3dSmrg   virtual unsigned int execute (function *);
11391debfc3dSmrg 
11401debfc3dSmrg }; // class pass_stdarg
11411debfc3dSmrg 
11421debfc3dSmrg unsigned int
execute(function * fun)11431debfc3dSmrg pass_stdarg::execute (function *fun)
11441debfc3dSmrg {
11451debfc3dSmrg   /* TODO: Postpone expand_ifn_va_arg till after
11461debfc3dSmrg      optimize_va_list_gpr_fpr_size.  */
11471debfc3dSmrg   expand_ifn_va_arg (fun);
11481debfc3dSmrg 
11491debfc3dSmrg   if (flag_stdarg_opt
11501debfc3dSmrg       /* This optimization is only for stdarg functions.  */
11511debfc3dSmrg       && fun->stdarg != 0)
11521debfc3dSmrg     optimize_va_list_gpr_fpr_size (fun);
11531debfc3dSmrg 
11541debfc3dSmrg   return 0;
11551debfc3dSmrg }
11561debfc3dSmrg 
11571debfc3dSmrg } // anon namespace
11581debfc3dSmrg 
11591debfc3dSmrg gimple_opt_pass *
make_pass_stdarg(gcc::context * ctxt)11601debfc3dSmrg make_pass_stdarg (gcc::context *ctxt)
11611debfc3dSmrg {
11621debfc3dSmrg   return new pass_stdarg (ctxt);
11631debfc3dSmrg }
11641debfc3dSmrg 
11651debfc3dSmrg namespace {
11661debfc3dSmrg 
11671debfc3dSmrg const pass_data pass_data_lower_vaarg =
11681debfc3dSmrg {
11691debfc3dSmrg   GIMPLE_PASS, /* type */
11701debfc3dSmrg   "lower_vaarg", /* name */
11711debfc3dSmrg   OPTGROUP_NONE, /* optinfo_flags */
11721debfc3dSmrg   TV_NONE, /* tv_id */
11731debfc3dSmrg   ( PROP_cfg | PROP_ssa ), /* properties_required */
11741debfc3dSmrg   PROP_gimple_lva, /* properties_provided */
11751debfc3dSmrg   0, /* properties_destroyed */
11761debfc3dSmrg   0, /* todo_flags_start */
11771debfc3dSmrg   0, /* todo_flags_finish */
11781debfc3dSmrg };
11791debfc3dSmrg 
11801debfc3dSmrg class pass_lower_vaarg : public gimple_opt_pass
11811debfc3dSmrg {
11821debfc3dSmrg public:
pass_lower_vaarg(gcc::context * ctxt)11831debfc3dSmrg   pass_lower_vaarg (gcc::context *ctxt)
11841debfc3dSmrg     : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
11851debfc3dSmrg   {}
11861debfc3dSmrg 
11871debfc3dSmrg   /* opt_pass methods: */
gate(function *)11881debfc3dSmrg   virtual bool gate (function *)
11891debfc3dSmrg     {
11901debfc3dSmrg       return (cfun->curr_properties & PROP_gimple_lva) == 0;
11911debfc3dSmrg     }
11921debfc3dSmrg 
11931debfc3dSmrg   virtual unsigned int execute (function *);
11941debfc3dSmrg 
11951debfc3dSmrg }; // class pass_lower_vaarg
11961debfc3dSmrg 
11971debfc3dSmrg unsigned int
execute(function * fun)11981debfc3dSmrg pass_lower_vaarg::execute (function *fun)
11991debfc3dSmrg {
12001debfc3dSmrg   expand_ifn_va_arg (fun);
12011debfc3dSmrg   return 0;
12021debfc3dSmrg }
12031debfc3dSmrg 
12041debfc3dSmrg } // anon namespace
12051debfc3dSmrg 
12061debfc3dSmrg gimple_opt_pass *
make_pass_lower_vaarg(gcc::context * ctxt)12071debfc3dSmrg make_pass_lower_vaarg (gcc::context *ctxt)
12081debfc3dSmrg {
12091debfc3dSmrg   return new pass_lower_vaarg (ctxt);
12101debfc3dSmrg }
1211