xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-stdarg.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* Pass computing data for optimizing stdarg functions.
2*38fd1498Szrj    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3*38fd1498Szrj    Contributed by Jakub Jelinek <jakub@redhat.com>
4*38fd1498Szrj 
5*38fd1498Szrj This file is part of GCC.
6*38fd1498Szrj 
7*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
8*38fd1498Szrj it under the terms of the GNU General Public License as published by
9*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
10*38fd1498Szrj any later version.
11*38fd1498Szrj 
12*38fd1498Szrj GCC is distributed in the hope that it will be useful,
13*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
14*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15*38fd1498Szrj GNU General Public License for more details.
16*38fd1498Szrj 
17*38fd1498Szrj You should have received a copy of the GNU General Public License
18*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
19*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
20*38fd1498Szrj 
21*38fd1498Szrj #include "config.h"
22*38fd1498Szrj #include "system.h"
23*38fd1498Szrj #include "coretypes.h"
24*38fd1498Szrj #include "backend.h"
25*38fd1498Szrj #include "target.h"
26*38fd1498Szrj #include "tree.h"
27*38fd1498Szrj #include "gimple.h"
28*38fd1498Szrj #include "tree-pass.h"
29*38fd1498Szrj #include "ssa.h"
30*38fd1498Szrj #include "gimple-pretty-print.h"
31*38fd1498Szrj #include "fold-const.h"
32*38fd1498Szrj #include "langhooks.h"
33*38fd1498Szrj #include "gimple-iterator.h"
34*38fd1498Szrj #include "gimple-walk.h"
35*38fd1498Szrj #include "gimplify.h"
36*38fd1498Szrj #include "tree-into-ssa.h"
37*38fd1498Szrj #include "tree-cfg.h"
38*38fd1498Szrj #include "tree-stdarg.h"
39*38fd1498Szrj #include "tree-chkp.h"
40*38fd1498Szrj 
41*38fd1498Szrj /* A simple pass that attempts to optimize stdarg functions on architectures
42*38fd1498Szrj    that need to save register arguments to stack on entry to stdarg functions.
43*38fd1498Szrj    If the function doesn't use any va_start macros, no registers need to
44*38fd1498Szrj    be saved.  If va_start macros are used, the va_list variables don't escape
45*38fd1498Szrj    the function, it is only necessary to save registers that will be used
46*38fd1498Szrj    in va_arg macros.  E.g. if va_arg is only used with integral types
47*38fd1498Szrj    in the function, floating point registers don't need to be saved, etc.  */
48*38fd1498Szrj 
49*38fd1498Szrj 
50*38fd1498Szrj /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
51*38fd1498Szrj    is executed at most as many times as VA_START_BB.  */
52*38fd1498Szrj 
53*38fd1498Szrj static bool
reachable_at_most_once(basic_block va_arg_bb,basic_block va_start_bb)54*38fd1498Szrj reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
55*38fd1498Szrj {
56*38fd1498Szrj   auto_vec<edge, 10> stack;
57*38fd1498Szrj   edge e;
58*38fd1498Szrj   edge_iterator ei;
59*38fd1498Szrj   bool ret;
60*38fd1498Szrj 
61*38fd1498Szrj   if (va_arg_bb == va_start_bb)
62*38fd1498Szrj     return true;
63*38fd1498Szrj 
64*38fd1498Szrj   if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
65*38fd1498Szrj     return false;
66*38fd1498Szrj 
67*38fd1498Szrj   auto_sbitmap visited (last_basic_block_for_fn (cfun));
68*38fd1498Szrj   bitmap_clear (visited);
69*38fd1498Szrj   ret = true;
70*38fd1498Szrj 
71*38fd1498Szrj   FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
72*38fd1498Szrj     stack.safe_push (e);
73*38fd1498Szrj 
74*38fd1498Szrj   while (! stack.is_empty ())
75*38fd1498Szrj     {
76*38fd1498Szrj       basic_block src;
77*38fd1498Szrj 
78*38fd1498Szrj       e = stack.pop ();
79*38fd1498Szrj       src = e->src;
80*38fd1498Szrj 
81*38fd1498Szrj       if (e->flags & EDGE_COMPLEX)
82*38fd1498Szrj 	{
83*38fd1498Szrj 	  ret = false;
84*38fd1498Szrj 	  break;
85*38fd1498Szrj 	}
86*38fd1498Szrj 
87*38fd1498Szrj       if (src == va_start_bb)
88*38fd1498Szrj 	continue;
89*38fd1498Szrj 
90*38fd1498Szrj       /* va_arg_bb can be executed more times than va_start_bb.  */
91*38fd1498Szrj       if (src == va_arg_bb)
92*38fd1498Szrj 	{
93*38fd1498Szrj 	  ret = false;
94*38fd1498Szrj 	  break;
95*38fd1498Szrj 	}
96*38fd1498Szrj 
97*38fd1498Szrj       gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
98*38fd1498Szrj 
99*38fd1498Szrj       if (! bitmap_bit_p (visited, src->index))
100*38fd1498Szrj 	{
101*38fd1498Szrj 	  bitmap_set_bit (visited, src->index);
102*38fd1498Szrj 	  FOR_EACH_EDGE (e, ei, src->preds)
103*38fd1498Szrj 	    stack.safe_push (e);
104*38fd1498Szrj 	}
105*38fd1498Szrj     }
106*38fd1498Szrj 
107*38fd1498Szrj   return ret;
108*38fd1498Szrj }
109*38fd1498Szrj 
110*38fd1498Szrj 
111*38fd1498Szrj /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
112*38fd1498Szrj    return constant, otherwise return HOST_WIDE_INT_M1U.
113*38fd1498Szrj    GPR_P is true if this is GPR counter.  */
114*38fd1498Szrj 
115*38fd1498Szrj static unsigned HOST_WIDE_INT
va_list_counter_bump(struct stdarg_info * si,tree counter,tree rhs,bool gpr_p)116*38fd1498Szrj va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
117*38fd1498Szrj 		      bool gpr_p)
118*38fd1498Szrj {
119*38fd1498Szrj   tree lhs, orig_lhs;
120*38fd1498Szrj   gimple *stmt;
121*38fd1498Szrj   unsigned HOST_WIDE_INT ret = 0, val, counter_val;
122*38fd1498Szrj   unsigned int max_size;
123*38fd1498Szrj 
124*38fd1498Szrj   if (si->offsets == NULL)
125*38fd1498Szrj     {
126*38fd1498Szrj       unsigned int i;
127*38fd1498Szrj 
128*38fd1498Szrj       si->offsets = XNEWVEC (int, num_ssa_names);
129*38fd1498Szrj       for (i = 0; i < num_ssa_names; ++i)
130*38fd1498Szrj 	si->offsets[i] = -1;
131*38fd1498Szrj     }
132*38fd1498Szrj 
133*38fd1498Szrj   counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
134*38fd1498Szrj   max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
135*38fd1498Szrj   orig_lhs = lhs = rhs;
136*38fd1498Szrj   while (lhs)
137*38fd1498Szrj     {
138*38fd1498Szrj       enum tree_code rhs_code;
139*38fd1498Szrj       tree rhs1;
140*38fd1498Szrj 
141*38fd1498Szrj       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
142*38fd1498Szrj 	{
143*38fd1498Szrj 	  if (counter_val >= max_size)
144*38fd1498Szrj 	    {
145*38fd1498Szrj 	      ret = max_size;
146*38fd1498Szrj 	      break;
147*38fd1498Szrj 	    }
148*38fd1498Szrj 
149*38fd1498Szrj 	  ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
150*38fd1498Szrj 	  break;
151*38fd1498Szrj 	}
152*38fd1498Szrj 
153*38fd1498Szrj       stmt = SSA_NAME_DEF_STMT (lhs);
154*38fd1498Szrj 
155*38fd1498Szrj       if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
156*38fd1498Szrj 	return HOST_WIDE_INT_M1U;
157*38fd1498Szrj 
158*38fd1498Szrj       rhs_code = gimple_assign_rhs_code (stmt);
159*38fd1498Szrj       rhs1 = gimple_assign_rhs1 (stmt);
160*38fd1498Szrj       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
161*38fd1498Szrj 	   || gimple_assign_cast_p (stmt))
162*38fd1498Szrj 	  && TREE_CODE (rhs1) == SSA_NAME)
163*38fd1498Szrj 	{
164*38fd1498Szrj 	  lhs = rhs1;
165*38fd1498Szrj 	  continue;
166*38fd1498Szrj 	}
167*38fd1498Szrj 
168*38fd1498Szrj       if ((rhs_code == POINTER_PLUS_EXPR
169*38fd1498Szrj 	   || rhs_code == PLUS_EXPR)
170*38fd1498Szrj 	  && TREE_CODE (rhs1) == SSA_NAME
171*38fd1498Szrj 	  && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
172*38fd1498Szrj 	{
173*38fd1498Szrj 	  ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
174*38fd1498Szrj 	  lhs = rhs1;
175*38fd1498Szrj 	  continue;
176*38fd1498Szrj 	}
177*38fd1498Szrj 
178*38fd1498Szrj       if (rhs_code == ADDR_EXPR
179*38fd1498Szrj 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
180*38fd1498Szrj 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
181*38fd1498Szrj 	  && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
182*38fd1498Szrj 	{
183*38fd1498Szrj 	  ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
184*38fd1498Szrj 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
185*38fd1498Szrj 	  continue;
186*38fd1498Szrj 	}
187*38fd1498Szrj 
188*38fd1498Szrj       if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
189*38fd1498Szrj 	return HOST_WIDE_INT_M1U;
190*38fd1498Szrj 
191*38fd1498Szrj       rhs = gimple_assign_rhs1 (stmt);
192*38fd1498Szrj       if (TREE_CODE (counter) != TREE_CODE (rhs))
193*38fd1498Szrj 	return HOST_WIDE_INT_M1U;
194*38fd1498Szrj 
195*38fd1498Szrj       if (TREE_CODE (counter) == COMPONENT_REF)
196*38fd1498Szrj 	{
197*38fd1498Szrj 	  if (get_base_address (counter) != get_base_address (rhs)
198*38fd1498Szrj 	      || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
199*38fd1498Szrj 	      || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
200*38fd1498Szrj 	    return HOST_WIDE_INT_M1U;
201*38fd1498Szrj 	}
202*38fd1498Szrj       else if (counter != rhs)
203*38fd1498Szrj 	return HOST_WIDE_INT_M1U;
204*38fd1498Szrj 
205*38fd1498Szrj       lhs = NULL;
206*38fd1498Szrj     }
207*38fd1498Szrj 
208*38fd1498Szrj   lhs = orig_lhs;
209*38fd1498Szrj   val = ret + counter_val;
210*38fd1498Szrj   while (lhs)
211*38fd1498Szrj     {
212*38fd1498Szrj       enum tree_code rhs_code;
213*38fd1498Szrj       tree rhs1;
214*38fd1498Szrj 
215*38fd1498Szrj       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
216*38fd1498Szrj 	break;
217*38fd1498Szrj 
218*38fd1498Szrj       if (val >= max_size)
219*38fd1498Szrj 	si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
220*38fd1498Szrj       else
221*38fd1498Szrj 	si->offsets[SSA_NAME_VERSION (lhs)] = val;
222*38fd1498Szrj 
223*38fd1498Szrj       stmt = SSA_NAME_DEF_STMT (lhs);
224*38fd1498Szrj 
225*38fd1498Szrj       rhs_code = gimple_assign_rhs_code (stmt);
226*38fd1498Szrj       rhs1 = gimple_assign_rhs1 (stmt);
227*38fd1498Szrj       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
228*38fd1498Szrj 	   || gimple_assign_cast_p (stmt))
229*38fd1498Szrj 	  && TREE_CODE (rhs1) == SSA_NAME)
230*38fd1498Szrj 	{
231*38fd1498Szrj 	  lhs = rhs1;
232*38fd1498Szrj 	  continue;
233*38fd1498Szrj 	}
234*38fd1498Szrj 
235*38fd1498Szrj       if ((rhs_code == POINTER_PLUS_EXPR
236*38fd1498Szrj 	   || rhs_code == PLUS_EXPR)
237*38fd1498Szrj 	  && TREE_CODE (rhs1) == SSA_NAME
238*38fd1498Szrj 	  && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
239*38fd1498Szrj 	{
240*38fd1498Szrj 	  val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
241*38fd1498Szrj 	  lhs = rhs1;
242*38fd1498Szrj 	  continue;
243*38fd1498Szrj 	}
244*38fd1498Szrj 
245*38fd1498Szrj       if (rhs_code == ADDR_EXPR
246*38fd1498Szrj 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
247*38fd1498Szrj 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
248*38fd1498Szrj 	  && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
249*38fd1498Szrj 	{
250*38fd1498Szrj 	  val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
251*38fd1498Szrj 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
252*38fd1498Szrj 	  continue;
253*38fd1498Szrj 	}
254*38fd1498Szrj 
255*38fd1498Szrj       lhs = NULL;
256*38fd1498Szrj     }
257*38fd1498Szrj 
258*38fd1498Szrj   return ret;
259*38fd1498Szrj }
260*38fd1498Szrj 
261*38fd1498Szrj 
262*38fd1498Szrj /* Called by walk_tree to look for references to va_list variables.  */
263*38fd1498Szrj 
264*38fd1498Szrj static tree
find_va_list_reference(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)265*38fd1498Szrj find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
266*38fd1498Szrj 			void *data)
267*38fd1498Szrj {
268*38fd1498Szrj   bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
269*38fd1498Szrj   tree var = *tp;
270*38fd1498Szrj 
271*38fd1498Szrj   if (TREE_CODE (var) == SSA_NAME)
272*38fd1498Szrj     {
273*38fd1498Szrj       if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
274*38fd1498Szrj 	return var;
275*38fd1498Szrj     }
276*38fd1498Szrj   else if (VAR_P (var))
277*38fd1498Szrj     {
278*38fd1498Szrj       if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
279*38fd1498Szrj 	return var;
280*38fd1498Szrj     }
281*38fd1498Szrj 
282*38fd1498Szrj   return NULL_TREE;
283*38fd1498Szrj }
284*38fd1498Szrj 
285*38fd1498Szrj 
286*38fd1498Szrj /* Helper function of va_list_counter_struct_op.  Compute
287*38fd1498Szrj    cfun->va_list_{g,f}pr_size.  AP is a va_list GPR/FPR counter,
288*38fd1498Szrj    if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
289*38fd1498Szrj    statement.  GPR_P is true if AP is a GPR counter, false if it is
290*38fd1498Szrj    a FPR counter.  */
291*38fd1498Szrj 
292*38fd1498Szrj static void
va_list_counter_op(struct stdarg_info * si,tree ap,tree var,bool gpr_p,bool write_p)293*38fd1498Szrj va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
294*38fd1498Szrj 		    bool write_p)
295*38fd1498Szrj {
296*38fd1498Szrj   unsigned HOST_WIDE_INT increment;
297*38fd1498Szrj 
298*38fd1498Szrj   if (si->compute_sizes < 0)
299*38fd1498Szrj     {
300*38fd1498Szrj       si->compute_sizes = 0;
301*38fd1498Szrj       if (si->va_start_count == 1
302*38fd1498Szrj 	  && reachable_at_most_once (si->bb, si->va_start_bb))
303*38fd1498Szrj 	si->compute_sizes = 1;
304*38fd1498Szrj 
305*38fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
306*38fd1498Szrj 	fprintf (dump_file,
307*38fd1498Szrj 		 "bb%d will %sbe executed at most once for each va_start "
308*38fd1498Szrj 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
309*38fd1498Szrj 		 si->va_start_bb->index);
310*38fd1498Szrj     }
311*38fd1498Szrj 
312*38fd1498Szrj   if (write_p
313*38fd1498Szrj       && si->compute_sizes
314*38fd1498Szrj       && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
315*38fd1498Szrj     {
316*38fd1498Szrj       if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
317*38fd1498Szrj 	{
318*38fd1498Szrj 	  cfun->va_list_gpr_size += increment;
319*38fd1498Szrj 	  return;
320*38fd1498Szrj 	}
321*38fd1498Szrj 
322*38fd1498Szrj       if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
323*38fd1498Szrj 	{
324*38fd1498Szrj 	  cfun->va_list_fpr_size += increment;
325*38fd1498Szrj 	  return;
326*38fd1498Szrj 	}
327*38fd1498Szrj     }
328*38fd1498Szrj 
329*38fd1498Szrj   if (write_p || !si->compute_sizes)
330*38fd1498Szrj     {
331*38fd1498Szrj       if (gpr_p)
332*38fd1498Szrj 	cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
333*38fd1498Szrj       else
334*38fd1498Szrj 	cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
335*38fd1498Szrj     }
336*38fd1498Szrj }
337*38fd1498Szrj 
338*38fd1498Szrj 
339*38fd1498Szrj /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
340*38fd1498Szrj    If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
341*38fd1498Szrj    is false, AP has been seen in VAR = AP assignment.
342*38fd1498Szrj    Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
343*38fd1498Szrj    va_arg operation that doesn't cause the va_list variable to escape
344*38fd1498Szrj    current function.  */
345*38fd1498Szrj 
346*38fd1498Szrj static bool
va_list_counter_struct_op(struct stdarg_info * si,tree ap,tree var,bool write_p)347*38fd1498Szrj va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
348*38fd1498Szrj 			   bool write_p)
349*38fd1498Szrj {
350*38fd1498Szrj   tree base;
351*38fd1498Szrj 
352*38fd1498Szrj   if (TREE_CODE (ap) != COMPONENT_REF
353*38fd1498Szrj       || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
354*38fd1498Szrj     return false;
355*38fd1498Szrj 
356*38fd1498Szrj   if (TREE_CODE (var) != SSA_NAME
357*38fd1498Szrj       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
358*38fd1498Szrj     return false;
359*38fd1498Szrj 
360*38fd1498Szrj   base = get_base_address (ap);
361*38fd1498Szrj   if (!VAR_P (base)
362*38fd1498Szrj       || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
363*38fd1498Szrj     return false;
364*38fd1498Szrj 
365*38fd1498Szrj   if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
366*38fd1498Szrj     va_list_counter_op (si, ap, var, true, write_p);
367*38fd1498Szrj   else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
368*38fd1498Szrj     va_list_counter_op (si, ap, var, false, write_p);
369*38fd1498Szrj 
370*38fd1498Szrj   return true;
371*38fd1498Szrj }
372*38fd1498Szrj 
373*38fd1498Szrj 
374*38fd1498Szrj /* Check for TEM = AP.  Return true if found and the caller shouldn't
375*38fd1498Szrj    search for va_list references in the statement.  */
376*38fd1498Szrj 
377*38fd1498Szrj static bool
va_list_ptr_read(struct stdarg_info * si,tree ap,tree tem)378*38fd1498Szrj va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
379*38fd1498Szrj {
380*38fd1498Szrj   if (!VAR_P (ap)
381*38fd1498Szrj       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
382*38fd1498Szrj     return false;
383*38fd1498Szrj 
384*38fd1498Szrj   if (TREE_CODE (tem) != SSA_NAME
385*38fd1498Szrj       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
386*38fd1498Szrj     return false;
387*38fd1498Szrj 
388*38fd1498Szrj   if (si->compute_sizes < 0)
389*38fd1498Szrj     {
390*38fd1498Szrj       si->compute_sizes = 0;
391*38fd1498Szrj       if (si->va_start_count == 1
392*38fd1498Szrj 	  && reachable_at_most_once (si->bb, si->va_start_bb))
393*38fd1498Szrj 	si->compute_sizes = 1;
394*38fd1498Szrj 
395*38fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
396*38fd1498Szrj 	fprintf (dump_file,
397*38fd1498Szrj 		 "bb%d will %sbe executed at most once for each va_start "
398*38fd1498Szrj 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
399*38fd1498Szrj 		 si->va_start_bb->index);
400*38fd1498Szrj     }
401*38fd1498Szrj 
402*38fd1498Szrj   /* For void * or char * va_list types, there is just one counter.
403*38fd1498Szrj      If va_arg is used in a loop, we don't know how many registers need
404*38fd1498Szrj      saving.  */
405*38fd1498Szrj   if (! si->compute_sizes)
406*38fd1498Szrj     return false;
407*38fd1498Szrj 
408*38fd1498Szrj   if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
409*38fd1498Szrj     return false;
410*38fd1498Szrj 
411*38fd1498Szrj   /* Note the temporary, as we need to track whether it doesn't escape
412*38fd1498Szrj      the current function.  */
413*38fd1498Szrj   bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
414*38fd1498Szrj 
415*38fd1498Szrj   return true;
416*38fd1498Szrj }
417*38fd1498Szrj 
418*38fd1498Szrj 
419*38fd1498Szrj /* Check for:
420*38fd1498Szrj      tem1 = AP;
421*38fd1498Szrj      TEM2 = tem1 + CST;
422*38fd1498Szrj      AP = TEM2;
423*38fd1498Szrj    sequence and update cfun->va_list_gpr_size.  Return true if found.  */
424*38fd1498Szrj 
425*38fd1498Szrj static bool
va_list_ptr_write(struct stdarg_info * si,tree ap,tree tem2)426*38fd1498Szrj va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
427*38fd1498Szrj {
428*38fd1498Szrj   unsigned HOST_WIDE_INT increment;
429*38fd1498Szrj 
430*38fd1498Szrj   if (!VAR_P (ap)
431*38fd1498Szrj       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
432*38fd1498Szrj     return false;
433*38fd1498Szrj 
434*38fd1498Szrj   if (TREE_CODE (tem2) != SSA_NAME
435*38fd1498Szrj       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
436*38fd1498Szrj     return false;
437*38fd1498Szrj 
438*38fd1498Szrj   if (si->compute_sizes <= 0)
439*38fd1498Szrj     return false;
440*38fd1498Szrj 
441*38fd1498Szrj   increment = va_list_counter_bump (si, ap, tem2, true);
442*38fd1498Szrj   if (increment + 1 <= 1)
443*38fd1498Szrj     return false;
444*38fd1498Szrj 
445*38fd1498Szrj   if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
446*38fd1498Szrj     cfun->va_list_gpr_size += increment;
447*38fd1498Szrj   else
448*38fd1498Szrj     cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
449*38fd1498Szrj 
450*38fd1498Szrj   return true;
451*38fd1498Szrj }
452*38fd1498Szrj 
453*38fd1498Szrj 
454*38fd1498Szrj /* If RHS is X, (some type *) X or X + CST for X a temporary variable
455*38fd1498Szrj    containing value of some va_list variable plus optionally some constant,
456*38fd1498Szrj    either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
457*38fd1498Szrj    depending whether LHS is a function local temporary.  */
458*38fd1498Szrj 
459*38fd1498Szrj static void
check_va_list_escapes(struct stdarg_info * si,tree lhs,tree rhs)460*38fd1498Szrj check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
461*38fd1498Szrj {
462*38fd1498Szrj   if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
463*38fd1498Szrj     return;
464*38fd1498Szrj 
465*38fd1498Szrj   if (TREE_CODE (rhs) == SSA_NAME)
466*38fd1498Szrj     {
467*38fd1498Szrj       if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
468*38fd1498Szrj 	return;
469*38fd1498Szrj     }
470*38fd1498Szrj   else if (TREE_CODE (rhs) == ADDR_EXPR
471*38fd1498Szrj 	   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
472*38fd1498Szrj 	   && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
473*38fd1498Szrj     {
474*38fd1498Szrj       tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
475*38fd1498Szrj       if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
476*38fd1498Szrj 	return;
477*38fd1498Szrj     }
478*38fd1498Szrj   else
479*38fd1498Szrj     return;
480*38fd1498Szrj 
481*38fd1498Szrj   if (TREE_CODE (lhs) != SSA_NAME)
482*38fd1498Szrj     {
483*38fd1498Szrj       si->va_list_escapes = true;
484*38fd1498Szrj       return;
485*38fd1498Szrj     }
486*38fd1498Szrj 
487*38fd1498Szrj   if (si->compute_sizes < 0)
488*38fd1498Szrj     {
489*38fd1498Szrj       si->compute_sizes = 0;
490*38fd1498Szrj       if (si->va_start_count == 1
491*38fd1498Szrj 	  && reachable_at_most_once (si->bb, si->va_start_bb))
492*38fd1498Szrj 	si->compute_sizes = 1;
493*38fd1498Szrj 
494*38fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
495*38fd1498Szrj 	fprintf (dump_file,
496*38fd1498Szrj 		 "bb%d will %sbe executed at most once for each va_start "
497*38fd1498Szrj 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
498*38fd1498Szrj 		 si->va_start_bb->index);
499*38fd1498Szrj     }
500*38fd1498Szrj 
501*38fd1498Szrj   /* For void * or char * va_list types, there is just one counter.
502*38fd1498Szrj      If va_arg is used in a loop, we don't know how many registers need
503*38fd1498Szrj      saving.  */
504*38fd1498Szrj   if (! si->compute_sizes)
505*38fd1498Szrj     {
506*38fd1498Szrj       si->va_list_escapes = true;
507*38fd1498Szrj       return;
508*38fd1498Szrj     }
509*38fd1498Szrj 
510*38fd1498Szrj   if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
511*38fd1498Szrj       == HOST_WIDE_INT_M1U)
512*38fd1498Szrj     {
513*38fd1498Szrj       si->va_list_escapes = true;
514*38fd1498Szrj       return;
515*38fd1498Szrj     }
516*38fd1498Szrj 
517*38fd1498Szrj   bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
518*38fd1498Szrj }
519*38fd1498Szrj 
520*38fd1498Szrj 
521*38fd1498Szrj /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
522*38fd1498Szrj    Return true if va_list might be escaping.  */
523*38fd1498Szrj 
524*38fd1498Szrj static bool
check_all_va_list_escapes(struct stdarg_info * si)525*38fd1498Szrj check_all_va_list_escapes (struct stdarg_info *si)
526*38fd1498Szrj {
527*38fd1498Szrj   basic_block bb;
528*38fd1498Szrj 
529*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
530*38fd1498Szrj     {
531*38fd1498Szrj       for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
532*38fd1498Szrj 	   gsi_next (&i))
533*38fd1498Szrj 	{
534*38fd1498Szrj 	  tree lhs;
535*38fd1498Szrj 	  use_operand_p uop;
536*38fd1498Szrj 	  ssa_op_iter soi;
537*38fd1498Szrj 	  gphi *phi = i.phi ();
538*38fd1498Szrj 
539*38fd1498Szrj 	  lhs = PHI_RESULT (phi);
540*38fd1498Szrj 	  if (virtual_operand_p (lhs)
541*38fd1498Szrj 	      || bitmap_bit_p (si->va_list_escape_vars,
542*38fd1498Szrj 			       SSA_NAME_VERSION (lhs)))
543*38fd1498Szrj 	    continue;
544*38fd1498Szrj 
545*38fd1498Szrj 	  FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
546*38fd1498Szrj 	    {
547*38fd1498Szrj 	      tree rhs = USE_FROM_PTR (uop);
548*38fd1498Szrj 	      if (TREE_CODE (rhs) == SSA_NAME
549*38fd1498Szrj 		  && bitmap_bit_p (si->va_list_escape_vars,
550*38fd1498Szrj 				SSA_NAME_VERSION (rhs)))
551*38fd1498Szrj 		{
552*38fd1498Szrj 		  if (dump_file && (dump_flags & TDF_DETAILS))
553*38fd1498Szrj 		    {
554*38fd1498Szrj 		      fputs ("va_list escapes in ", dump_file);
555*38fd1498Szrj 		      print_gimple_stmt (dump_file, phi, 0, dump_flags);
556*38fd1498Szrj 		      fputc ('\n', dump_file);
557*38fd1498Szrj 		    }
558*38fd1498Szrj 		  return true;
559*38fd1498Szrj 		}
560*38fd1498Szrj 	    }
561*38fd1498Szrj 	}
562*38fd1498Szrj 
563*38fd1498Szrj       for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
564*38fd1498Szrj 	   gsi_next (&i))
565*38fd1498Szrj 	{
566*38fd1498Szrj 	  gimple *stmt = gsi_stmt (i);
567*38fd1498Szrj 	  tree use;
568*38fd1498Szrj 	  ssa_op_iter iter;
569*38fd1498Szrj 
570*38fd1498Szrj 	  if (is_gimple_debug (stmt))
571*38fd1498Szrj 	    continue;
572*38fd1498Szrj 
573*38fd1498Szrj 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
574*38fd1498Szrj 	    {
575*38fd1498Szrj 	      if (! bitmap_bit_p (si->va_list_escape_vars,
576*38fd1498Szrj 				  SSA_NAME_VERSION (use)))
577*38fd1498Szrj 		continue;
578*38fd1498Szrj 
579*38fd1498Szrj 	      if (is_gimple_assign (stmt))
580*38fd1498Szrj 		{
581*38fd1498Szrj 		  tree rhs = gimple_assign_rhs1 (stmt);
582*38fd1498Szrj 		  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
583*38fd1498Szrj 
584*38fd1498Szrj 		  /* x = *ap_temp;  */
585*38fd1498Szrj 		  if (rhs_code == MEM_REF
586*38fd1498Szrj 		      && TREE_OPERAND (rhs, 0) == use
587*38fd1498Szrj 		      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
588*38fd1498Szrj 		      && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
589*38fd1498Szrj 		      && si->offsets[SSA_NAME_VERSION (use)] != -1)
590*38fd1498Szrj 		    {
591*38fd1498Szrj 		      unsigned HOST_WIDE_INT gpr_size;
592*38fd1498Szrj 		      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
593*38fd1498Szrj 
594*38fd1498Szrj 		      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
595*38fd1498Szrj 			  	 + tree_to_shwi (TREE_OPERAND (rhs, 1))
596*38fd1498Szrj 				 + tree_to_uhwi (access_size);
597*38fd1498Szrj 		      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
598*38fd1498Szrj 			cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
599*38fd1498Szrj 		      else if (gpr_size > cfun->va_list_gpr_size)
600*38fd1498Szrj 			cfun->va_list_gpr_size = gpr_size;
601*38fd1498Szrj 		      continue;
602*38fd1498Szrj 		    }
603*38fd1498Szrj 
604*38fd1498Szrj 		  /* va_arg sequences may contain
605*38fd1498Szrj 		     other_ap_temp = ap_temp;
606*38fd1498Szrj 		     other_ap_temp = ap_temp + constant;
607*38fd1498Szrj 		     other_ap_temp = (some_type *) ap_temp;
608*38fd1498Szrj 		     ap = ap_temp;
609*38fd1498Szrj 		     statements.  */
610*38fd1498Szrj 		  if (rhs == use
611*38fd1498Szrj 		      && ((rhs_code == POINTER_PLUS_EXPR
612*38fd1498Szrj 			   && (TREE_CODE (gimple_assign_rhs2 (stmt))
613*38fd1498Szrj 			       == INTEGER_CST))
614*38fd1498Szrj 			  || gimple_assign_cast_p (stmt)
615*38fd1498Szrj 			  || (get_gimple_rhs_class (rhs_code)
616*38fd1498Szrj 			      == GIMPLE_SINGLE_RHS)))
617*38fd1498Szrj 		    {
618*38fd1498Szrj 		      tree lhs = gimple_assign_lhs (stmt);
619*38fd1498Szrj 
620*38fd1498Szrj 		      if (TREE_CODE (lhs) == SSA_NAME
621*38fd1498Szrj 			  && bitmap_bit_p (si->va_list_escape_vars,
622*38fd1498Szrj 					   SSA_NAME_VERSION (lhs)))
623*38fd1498Szrj 			continue;
624*38fd1498Szrj 
625*38fd1498Szrj 		      if (VAR_P (lhs)
626*38fd1498Szrj 			  && bitmap_bit_p (si->va_list_vars,
627*38fd1498Szrj 					   DECL_UID (lhs) + num_ssa_names))
628*38fd1498Szrj 			continue;
629*38fd1498Szrj 		    }
630*38fd1498Szrj 		  else if (rhs_code == ADDR_EXPR
631*38fd1498Szrj 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
632*38fd1498Szrj 			   && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
633*38fd1498Szrj 		    {
634*38fd1498Szrj 		      tree lhs = gimple_assign_lhs (stmt);
635*38fd1498Szrj 
636*38fd1498Szrj 		      if (bitmap_bit_p (si->va_list_escape_vars,
637*38fd1498Szrj 					SSA_NAME_VERSION (lhs)))
638*38fd1498Szrj 			continue;
639*38fd1498Szrj 		    }
640*38fd1498Szrj 		}
641*38fd1498Szrj 
642*38fd1498Szrj 	      if (dump_file && (dump_flags & TDF_DETAILS))
643*38fd1498Szrj 		{
644*38fd1498Szrj 		  fputs ("va_list escapes in ", dump_file);
645*38fd1498Szrj 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
646*38fd1498Szrj 		  fputc ('\n', dump_file);
647*38fd1498Szrj 		}
648*38fd1498Szrj 	      return true;
649*38fd1498Szrj 	    }
650*38fd1498Szrj 	}
651*38fd1498Szrj     }
652*38fd1498Szrj 
653*38fd1498Szrj   return false;
654*38fd1498Szrj }
655*38fd1498Szrj 
656*38fd1498Szrj /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size.  */
657*38fd1498Szrj 
658*38fd1498Szrj static void
optimize_va_list_gpr_fpr_size(function * fun)659*38fd1498Szrj optimize_va_list_gpr_fpr_size (function *fun)
660*38fd1498Szrj {
661*38fd1498Szrj   basic_block bb;
662*38fd1498Szrj   bool va_list_escapes = false;
663*38fd1498Szrj   bool va_list_simple_ptr;
664*38fd1498Szrj   struct stdarg_info si;
665*38fd1498Szrj   struct walk_stmt_info wi;
666*38fd1498Szrj   const char *funcname = NULL;
667*38fd1498Szrj   tree cfun_va_list;
668*38fd1498Szrj 
669*38fd1498Szrj   fun->va_list_gpr_size = 0;
670*38fd1498Szrj   fun->va_list_fpr_size = 0;
671*38fd1498Szrj   memset (&si, 0, sizeof (si));
672*38fd1498Szrj   si.va_list_vars = BITMAP_ALLOC (NULL);
673*38fd1498Szrj   si.va_list_escape_vars = BITMAP_ALLOC (NULL);
674*38fd1498Szrj 
675*38fd1498Szrj   if (dump_file)
676*38fd1498Szrj     funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
677*38fd1498Szrj 
678*38fd1498Szrj   cfun_va_list = targetm.fn_abi_va_list (fun->decl);
679*38fd1498Szrj   va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
680*38fd1498Szrj 		       && (TREE_TYPE (cfun_va_list) == void_type_node
681*38fd1498Szrj 			   || TREE_TYPE (cfun_va_list) == char_type_node);
682*38fd1498Szrj   gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
683*38fd1498Szrj 
684*38fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
685*38fd1498Szrj     {
686*38fd1498Szrj       gimple_stmt_iterator i;
687*38fd1498Szrj 
688*38fd1498Szrj       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
689*38fd1498Szrj 	{
690*38fd1498Szrj 	  gimple *stmt = gsi_stmt (i);
691*38fd1498Szrj 	  tree callee, ap;
692*38fd1498Szrj 
693*38fd1498Szrj 	  if (!is_gimple_call (stmt))
694*38fd1498Szrj 	    continue;
695*38fd1498Szrj 
696*38fd1498Szrj 	  callee = gimple_call_fndecl (stmt);
697*38fd1498Szrj 	  if (!callee
698*38fd1498Szrj 	      || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
699*38fd1498Szrj 	    continue;
700*38fd1498Szrj 
701*38fd1498Szrj 	  switch (DECL_FUNCTION_CODE (callee))
702*38fd1498Szrj 	    {
703*38fd1498Szrj 	    case BUILT_IN_VA_START:
704*38fd1498Szrj 	      break;
705*38fd1498Szrj 	      /* If old style builtins are used, don't optimize anything.  */
706*38fd1498Szrj 	    case BUILT_IN_SAVEREGS:
707*38fd1498Szrj 	    case BUILT_IN_NEXT_ARG:
708*38fd1498Szrj 	      va_list_escapes = true;
709*38fd1498Szrj 	      continue;
710*38fd1498Szrj 	    default:
711*38fd1498Szrj 	      continue;
712*38fd1498Szrj 	    }
713*38fd1498Szrj 
714*38fd1498Szrj 	  si.va_start_count++;
715*38fd1498Szrj 	  ap = gimple_call_arg (stmt, 0);
716*38fd1498Szrj 
717*38fd1498Szrj 	  if (TREE_CODE (ap) != ADDR_EXPR)
718*38fd1498Szrj 	    {
719*38fd1498Szrj 	      va_list_escapes = true;
720*38fd1498Szrj 	      break;
721*38fd1498Szrj 	    }
722*38fd1498Szrj 	  ap = TREE_OPERAND (ap, 0);
723*38fd1498Szrj 	  if (TREE_CODE (ap) == ARRAY_REF)
724*38fd1498Szrj 	    {
725*38fd1498Szrj 	      if (! integer_zerop (TREE_OPERAND (ap, 1)))
726*38fd1498Szrj 	        {
727*38fd1498Szrj 	          va_list_escapes = true;
728*38fd1498Szrj 	          break;
729*38fd1498Szrj 		}
730*38fd1498Szrj 	      ap = TREE_OPERAND (ap, 0);
731*38fd1498Szrj 	    }
732*38fd1498Szrj 	  if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
733*38fd1498Szrj 	      != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
734*38fd1498Szrj 	      || !VAR_P (ap))
735*38fd1498Szrj 	    {
736*38fd1498Szrj 	      va_list_escapes = true;
737*38fd1498Szrj 	      break;
738*38fd1498Szrj 	    }
739*38fd1498Szrj 
740*38fd1498Szrj 	  if (is_global_var (ap))
741*38fd1498Szrj 	    {
742*38fd1498Szrj 	      va_list_escapes = true;
743*38fd1498Szrj 	      break;
744*38fd1498Szrj 	    }
745*38fd1498Szrj 
746*38fd1498Szrj 	  bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
747*38fd1498Szrj 
748*38fd1498Szrj 	  /* VA_START_BB and VA_START_AP will be only used if there is just
749*38fd1498Szrj 	     one va_start in the function.  */
750*38fd1498Szrj 	  si.va_start_bb = bb;
751*38fd1498Szrj 	  si.va_start_ap = ap;
752*38fd1498Szrj 	}
753*38fd1498Szrj 
754*38fd1498Szrj       if (va_list_escapes)
755*38fd1498Szrj 	break;
756*38fd1498Szrj     }
757*38fd1498Szrj 
758*38fd1498Szrj   /* If there were no va_start uses in the function, there is no need to
759*38fd1498Szrj      save anything.  */
760*38fd1498Szrj   if (si.va_start_count == 0)
761*38fd1498Szrj     goto finish;
762*38fd1498Szrj 
763*38fd1498Szrj   /* If some va_list arguments weren't local, we can't optimize.  */
764*38fd1498Szrj   if (va_list_escapes)
765*38fd1498Szrj     goto finish;
766*38fd1498Szrj 
767*38fd1498Szrj   /* For void * or char * va_list, something useful can be done only
768*38fd1498Szrj      if there is just one va_start.  */
769*38fd1498Szrj   if (va_list_simple_ptr && si.va_start_count > 1)
770*38fd1498Szrj     {
771*38fd1498Szrj       va_list_escapes = true;
772*38fd1498Szrj       goto finish;
773*38fd1498Szrj     }
774*38fd1498Szrj 
775*38fd1498Szrj   /* For struct * va_list, if the backend didn't tell us what the counter fields
776*38fd1498Szrj      are, there is nothing more we can do.  */
777*38fd1498Szrj   if (!va_list_simple_ptr
778*38fd1498Szrj       && va_list_gpr_counter_field == NULL_TREE
779*38fd1498Szrj       && va_list_fpr_counter_field == NULL_TREE)
780*38fd1498Szrj     {
781*38fd1498Szrj       va_list_escapes = true;
782*38fd1498Szrj       goto finish;
783*38fd1498Szrj     }
784*38fd1498Szrj 
785*38fd1498Szrj   /* For void * or char * va_list there is just one counter
786*38fd1498Szrj      (va_list itself).  Use VA_LIST_GPR_SIZE for it.  */
787*38fd1498Szrj   if (va_list_simple_ptr)
788*38fd1498Szrj     fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
789*38fd1498Szrj 
790*38fd1498Szrj   calculate_dominance_info (CDI_DOMINATORS);
791*38fd1498Szrj   memset (&wi, 0, sizeof (wi));
792*38fd1498Szrj   wi.info = si.va_list_vars;
793*38fd1498Szrj 
794*38fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
795*38fd1498Szrj     {
796*38fd1498Szrj       si.compute_sizes = -1;
797*38fd1498Szrj       si.bb = bb;
798*38fd1498Szrj 
799*38fd1498Szrj       /* For va_list_simple_ptr, we have to check PHI nodes too.  We treat
800*38fd1498Szrj 	 them as assignments for the purpose of escape analysis.  This is
801*38fd1498Szrj 	 not needed for non-simple va_list because virtual phis don't perform
802*38fd1498Szrj 	 any real data movement.  Also, check PHI nodes for taking address of
803*38fd1498Szrj 	 the va_list vars.  */
804*38fd1498Szrj       tree lhs, rhs;
805*38fd1498Szrj       use_operand_p uop;
806*38fd1498Szrj       ssa_op_iter soi;
807*38fd1498Szrj 
808*38fd1498Szrj       for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
809*38fd1498Szrj 	   gsi_next (&i))
810*38fd1498Szrj 	{
811*38fd1498Szrj 	  gphi *phi = i.phi ();
812*38fd1498Szrj 	  lhs = PHI_RESULT (phi);
813*38fd1498Szrj 
814*38fd1498Szrj 	  if (virtual_operand_p (lhs))
815*38fd1498Szrj 	    continue;
816*38fd1498Szrj 
817*38fd1498Szrj 	  if (va_list_simple_ptr)
818*38fd1498Szrj 	    {
819*38fd1498Szrj 	      FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
820*38fd1498Szrj 		{
821*38fd1498Szrj 		  rhs = USE_FROM_PTR (uop);
822*38fd1498Szrj 		  if (va_list_ptr_read (&si, rhs, lhs))
823*38fd1498Szrj 		    continue;
824*38fd1498Szrj 		  else if (va_list_ptr_write (&si, lhs, rhs))
825*38fd1498Szrj 		    continue;
826*38fd1498Szrj 		  else
827*38fd1498Szrj 		    check_va_list_escapes (&si, lhs, rhs);
828*38fd1498Szrj 
829*38fd1498Szrj 		  if (si.va_list_escapes)
830*38fd1498Szrj 		    {
831*38fd1498Szrj 		      if (dump_file && (dump_flags & TDF_DETAILS))
832*38fd1498Szrj 			{
833*38fd1498Szrj 			  fputs ("va_list escapes in ", dump_file);
834*38fd1498Szrj 			  print_gimple_stmt (dump_file, phi, 0, dump_flags);
835*38fd1498Szrj 			  fputc ('\n', dump_file);
836*38fd1498Szrj 			}
837*38fd1498Szrj 		      va_list_escapes = true;
838*38fd1498Szrj 		    }
839*38fd1498Szrj 		}
840*38fd1498Szrj 	    }
841*38fd1498Szrj 
842*38fd1498Szrj 	  for (unsigned j = 0; !va_list_escapes
843*38fd1498Szrj 			       && j < gimple_phi_num_args (phi); ++j)
844*38fd1498Szrj 	    if ((!va_list_simple_ptr
845*38fd1498Szrj 		 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
846*38fd1498Szrj 		&& walk_tree (gimple_phi_arg_def_ptr (phi, j),
847*38fd1498Szrj 			      find_va_list_reference, &wi, NULL))
848*38fd1498Szrj 	      {
849*38fd1498Szrj 		if (dump_file && (dump_flags & TDF_DETAILS))
850*38fd1498Szrj 		  {
851*38fd1498Szrj 		    fputs ("va_list escapes in ", dump_file);
852*38fd1498Szrj 		    print_gimple_stmt (dump_file, phi, 0, dump_flags);
853*38fd1498Szrj 		    fputc ('\n', dump_file);
854*38fd1498Szrj 		  }
855*38fd1498Szrj 		va_list_escapes = true;
856*38fd1498Szrj 	      }
857*38fd1498Szrj 	}
858*38fd1498Szrj 
859*38fd1498Szrj       for (gimple_stmt_iterator i = gsi_start_bb (bb);
860*38fd1498Szrj 	   !gsi_end_p (i) && !va_list_escapes;
861*38fd1498Szrj 	   gsi_next (&i))
862*38fd1498Szrj 	{
863*38fd1498Szrj 	  gimple *stmt = gsi_stmt (i);
864*38fd1498Szrj 
865*38fd1498Szrj 	  /* Don't look at __builtin_va_{start,end}, they are ok.  */
866*38fd1498Szrj 	  if (is_gimple_call (stmt))
867*38fd1498Szrj 	    {
868*38fd1498Szrj 	      tree callee = gimple_call_fndecl (stmt);
869*38fd1498Szrj 
870*38fd1498Szrj 	      if (callee
871*38fd1498Szrj 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
872*38fd1498Szrj 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
873*38fd1498Szrj 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
874*38fd1498Szrj 		continue;
875*38fd1498Szrj 	    }
876*38fd1498Szrj 
877*38fd1498Szrj 	  if (is_gimple_assign (stmt))
878*38fd1498Szrj 	    {
879*38fd1498Szrj 	      lhs = gimple_assign_lhs (stmt);
880*38fd1498Szrj 	      rhs = gimple_assign_rhs1 (stmt);
881*38fd1498Szrj 
882*38fd1498Szrj 	      if (va_list_simple_ptr)
883*38fd1498Szrj 		{
884*38fd1498Szrj 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
885*38fd1498Szrj 		      == GIMPLE_SINGLE_RHS)
886*38fd1498Szrj 		    {
887*38fd1498Szrj 		      /* Check for ap ={v} {}.  */
888*38fd1498Szrj 		      if (TREE_CLOBBER_P (rhs))
889*38fd1498Szrj 			continue;
890*38fd1498Szrj 
891*38fd1498Szrj 		      /* Check for tem = ap.  */
892*38fd1498Szrj 		      else if (va_list_ptr_read (&si, rhs, lhs))
893*38fd1498Szrj 			continue;
894*38fd1498Szrj 
895*38fd1498Szrj 		      /* Check for the last insn in:
896*38fd1498Szrj 			 tem1 = ap;
897*38fd1498Szrj 			 tem2 = tem1 + CST;
898*38fd1498Szrj 			 ap = tem2;
899*38fd1498Szrj 			 sequence.  */
900*38fd1498Szrj 		      else if (va_list_ptr_write (&si, lhs, rhs))
901*38fd1498Szrj 			continue;
902*38fd1498Szrj 		    }
903*38fd1498Szrj 
904*38fd1498Szrj 		  if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
905*38fd1498Szrj 		       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
906*38fd1498Szrj 		      || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
907*38fd1498Szrj 		      || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
908*38fd1498Szrj 			  == GIMPLE_SINGLE_RHS))
909*38fd1498Szrj 		    check_va_list_escapes (&si, lhs, rhs);
910*38fd1498Szrj 		}
911*38fd1498Szrj 	      else
912*38fd1498Szrj 		{
913*38fd1498Szrj 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
914*38fd1498Szrj 		      == GIMPLE_SINGLE_RHS)
915*38fd1498Szrj 		    {
916*38fd1498Szrj 		      /* Check for ap ={v} {}.  */
917*38fd1498Szrj 		      if (TREE_CLOBBER_P (rhs))
918*38fd1498Szrj 			continue;
919*38fd1498Szrj 
920*38fd1498Szrj 		      /* Check for ap[0].field = temp.  */
921*38fd1498Szrj 		      else if (va_list_counter_struct_op (&si, lhs, rhs, true))
922*38fd1498Szrj 			continue;
923*38fd1498Szrj 
924*38fd1498Szrj 		      /* Check for temp = ap[0].field.  */
925*38fd1498Szrj 		      else if (va_list_counter_struct_op (&si, rhs, lhs,
926*38fd1498Szrj 							  false))
927*38fd1498Szrj 			continue;
928*38fd1498Szrj 		    }
929*38fd1498Szrj 
930*38fd1498Szrj 		  /* Do any architecture specific checking.  */
931*38fd1498Szrj 		  if (targetm.stdarg_optimize_hook
932*38fd1498Szrj 		      && targetm.stdarg_optimize_hook (&si, stmt))
933*38fd1498Szrj 		    continue;
934*38fd1498Szrj 		}
935*38fd1498Szrj 	    }
936*38fd1498Szrj 	  else if (is_gimple_debug (stmt))
937*38fd1498Szrj 	    continue;
938*38fd1498Szrj 
939*38fd1498Szrj 	  /* All other uses of va_list are either va_copy (that is not handled
940*38fd1498Szrj 	     in this optimization), taking address of va_list variable or
941*38fd1498Szrj 	     passing va_list to other functions (in that case va_list might
942*38fd1498Szrj 	     escape the function and therefore va_start needs to set it up
943*38fd1498Szrj 	     fully), or some unexpected use of va_list.  None of these should
944*38fd1498Szrj 	     happen in a gimplified VA_ARG_EXPR.  */
945*38fd1498Szrj 	  if (si.va_list_escapes
946*38fd1498Szrj 	      || walk_gimple_op (stmt, find_va_list_reference, &wi))
947*38fd1498Szrj 	    {
948*38fd1498Szrj 	      if (dump_file && (dump_flags & TDF_DETAILS))
949*38fd1498Szrj 		{
950*38fd1498Szrj 		  fputs ("va_list escapes in ", dump_file);
951*38fd1498Szrj 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
952*38fd1498Szrj 		  fputc ('\n', dump_file);
953*38fd1498Szrj 		}
954*38fd1498Szrj 	      va_list_escapes = true;
955*38fd1498Szrj 	    }
956*38fd1498Szrj 	}
957*38fd1498Szrj 
958*38fd1498Szrj       if (va_list_escapes)
959*38fd1498Szrj 	break;
960*38fd1498Szrj     }
961*38fd1498Szrj 
962*38fd1498Szrj   if (! va_list_escapes
963*38fd1498Szrj       && va_list_simple_ptr
964*38fd1498Szrj       && ! bitmap_empty_p (si.va_list_escape_vars)
965*38fd1498Szrj       && check_all_va_list_escapes (&si))
966*38fd1498Szrj     va_list_escapes = true;
967*38fd1498Szrj 
968*38fd1498Szrj finish:
969*38fd1498Szrj   if (va_list_escapes)
970*38fd1498Szrj     {
971*38fd1498Szrj       fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
972*38fd1498Szrj       fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
973*38fd1498Szrj     }
974*38fd1498Szrj   BITMAP_FREE (si.va_list_vars);
975*38fd1498Szrj   BITMAP_FREE (si.va_list_escape_vars);
976*38fd1498Szrj   free (si.offsets);
977*38fd1498Szrj   if (dump_file)
978*38fd1498Szrj     {
979*38fd1498Szrj       fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
980*38fd1498Szrj 	       funcname, (int) va_list_escapes);
981*38fd1498Szrj       if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
982*38fd1498Szrj 	fputs ("all", dump_file);
983*38fd1498Szrj       else
984*38fd1498Szrj 	fprintf (dump_file, "%d", cfun->va_list_gpr_size);
985*38fd1498Szrj       fputs (" GPR units and ", dump_file);
986*38fd1498Szrj       if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
987*38fd1498Szrj 	fputs ("all", dump_file);
988*38fd1498Szrj       else
989*38fd1498Szrj 	fprintf (dump_file, "%d", cfun->va_list_fpr_size);
990*38fd1498Szrj       fputs (" FPR units.\n", dump_file);
991*38fd1498Szrj     }
992*38fd1498Szrj }
993*38fd1498Szrj 
994*38fd1498Szrj /* Expand IFN_VA_ARGs in FUN.  */
995*38fd1498Szrj 
996*38fd1498Szrj static void
expand_ifn_va_arg_1(function * fun)997*38fd1498Szrj expand_ifn_va_arg_1 (function *fun)
998*38fd1498Szrj {
999*38fd1498Szrj   bool modified = false;
1000*38fd1498Szrj   basic_block bb;
1001*38fd1498Szrj   gimple_stmt_iterator i;
1002*38fd1498Szrj   location_t saved_location;
1003*38fd1498Szrj 
1004*38fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
1005*38fd1498Szrj     for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1006*38fd1498Szrj       {
1007*38fd1498Szrj 	gimple *stmt = gsi_stmt (i);
1008*38fd1498Szrj 	tree ap, aptype, expr, lhs, type;
1009*38fd1498Szrj 	gimple_seq pre = NULL, post = NULL;
1010*38fd1498Szrj 
1011*38fd1498Szrj 	if (!gimple_call_internal_p (stmt, IFN_VA_ARG))
1012*38fd1498Szrj 	  continue;
1013*38fd1498Szrj 
1014*38fd1498Szrj 	modified = true;
1015*38fd1498Szrj 
1016*38fd1498Szrj 	type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1017*38fd1498Szrj 	ap = gimple_call_arg (stmt, 0);
1018*38fd1498Szrj 	aptype = TREE_TYPE (gimple_call_arg (stmt, 2));
1019*38fd1498Szrj 	gcc_assert (POINTER_TYPE_P (aptype));
1020*38fd1498Szrj 
1021*38fd1498Szrj 	/* Balanced out the &ap, usually added by build_va_arg.  */
1022*38fd1498Szrj 	ap = build2 (MEM_REF, TREE_TYPE (aptype), ap,
1023*38fd1498Szrj 		     build_int_cst (aptype, 0));
1024*38fd1498Szrj 
1025*38fd1498Szrj 	push_gimplify_context (false);
1026*38fd1498Szrj 	saved_location = input_location;
1027*38fd1498Szrj 	input_location = gimple_location (stmt);
1028*38fd1498Szrj 
1029*38fd1498Szrj 	/* Make it easier for the backends by protecting the valist argument
1030*38fd1498Szrj 	   from multiple evaluations.  */
1031*38fd1498Szrj 	gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1032*38fd1498Szrj 
1033*38fd1498Szrj 	expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1034*38fd1498Szrj 
1035*38fd1498Szrj 	lhs = gimple_call_lhs (stmt);
1036*38fd1498Szrj 	if (lhs != NULL_TREE)
1037*38fd1498Szrj 	  {
1038*38fd1498Szrj 	    unsigned int nargs = gimple_call_num_args (stmt);
1039*38fd1498Szrj 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1040*38fd1498Szrj 
1041*38fd1498Szrj 	    /* We replace call with a new expr.  This may require
1042*38fd1498Szrj 	       corresponding bndret call fixup.  */
1043*38fd1498Szrj 	    if (chkp_function_instrumented_p (fun->decl))
1044*38fd1498Szrj 	      chkp_fixup_inlined_call (lhs, expr);
1045*38fd1498Szrj 
1046*38fd1498Szrj 	    if (nargs == 4)
1047*38fd1498Szrj 	      {
1048*38fd1498Szrj 		/* We've transported the size of with WITH_SIZE_EXPR here as
1049*38fd1498Szrj 		   the last argument of the internal fn call.  Now reinstate
1050*38fd1498Szrj 		   it.  */
1051*38fd1498Szrj 		tree size = gimple_call_arg (stmt, nargs - 1);
1052*38fd1498Szrj 		expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1053*38fd1498Szrj 	      }
1054*38fd1498Szrj 
1055*38fd1498Szrj 	    /* We use gimplify_assign here, rather than gimple_build_assign,
1056*38fd1498Szrj 	       because gimple_assign knows how to deal with variable-sized
1057*38fd1498Szrj 	       types.  */
1058*38fd1498Szrj 	    gimplify_assign (lhs, expr, &pre);
1059*38fd1498Szrj 	  }
1060*38fd1498Szrj 	else
1061*38fd1498Szrj 	  gimplify_and_add (expr, &pre);
1062*38fd1498Szrj 
1063*38fd1498Szrj 	input_location = saved_location;
1064*38fd1498Szrj 	pop_gimplify_context (NULL);
1065*38fd1498Szrj 
1066*38fd1498Szrj 	gimple_seq_add_seq (&pre, post);
1067*38fd1498Szrj 	update_modified_stmts (pre);
1068*38fd1498Szrj 
1069*38fd1498Szrj 	/* Add the sequence after IFN_VA_ARG.  This splits the bb right
1070*38fd1498Szrj 	   after IFN_VA_ARG, and adds the sequence in one or more new bbs
1071*38fd1498Szrj 	   inbetween.  */
1072*38fd1498Szrj 	gimple_find_sub_bbs (pre, &i);
1073*38fd1498Szrj 
1074*38fd1498Szrj 	/* Remove the IFN_VA_ARG gimple_call.  It's the last stmt in the
1075*38fd1498Szrj 	   bb.  */
1076*38fd1498Szrj 	unlink_stmt_vdef (stmt);
1077*38fd1498Szrj 	release_ssa_name_fn (fun, gimple_vdef (stmt));
1078*38fd1498Szrj 	gsi_remove (&i, true);
1079*38fd1498Szrj 	gcc_assert (gsi_end_p (i));
1080*38fd1498Szrj 
1081*38fd1498Szrj 	/* We're walking here into the bbs which contain the expansion of
1082*38fd1498Szrj 	   IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1083*38fd1498Szrj 	   expanding.  We could try to skip walking these bbs, perhaps by
1084*38fd1498Szrj 	   walking backwards over gimples and bbs.  */
1085*38fd1498Szrj 	break;
1086*38fd1498Szrj       }
1087*38fd1498Szrj 
1088*38fd1498Szrj   if (!modified)
1089*38fd1498Szrj     return;
1090*38fd1498Szrj 
1091*38fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
1092*38fd1498Szrj   update_ssa (TODO_update_ssa);
1093*38fd1498Szrj }
1094*38fd1498Szrj 
1095*38fd1498Szrj /* Expand IFN_VA_ARGs in FUN, if necessary.  */
1096*38fd1498Szrj 
1097*38fd1498Szrj static void
expand_ifn_va_arg(function * fun)1098*38fd1498Szrj expand_ifn_va_arg (function *fun)
1099*38fd1498Szrj {
1100*38fd1498Szrj   if ((fun->curr_properties & PROP_gimple_lva) == 0)
1101*38fd1498Szrj     expand_ifn_va_arg_1 (fun);
1102*38fd1498Szrj 
1103*38fd1498Szrj   if (flag_checking)
1104*38fd1498Szrj     {
1105*38fd1498Szrj       basic_block bb;
1106*38fd1498Szrj       gimple_stmt_iterator i;
1107*38fd1498Szrj       FOR_EACH_BB_FN (bb, fun)
1108*38fd1498Szrj 	for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1109*38fd1498Szrj 	  gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG));
1110*38fd1498Szrj     }
1111*38fd1498Szrj }
1112*38fd1498Szrj 
1113*38fd1498Szrj namespace {
1114*38fd1498Szrj 
1115*38fd1498Szrj const pass_data pass_data_stdarg =
1116*38fd1498Szrj {
1117*38fd1498Szrj   GIMPLE_PASS, /* type */
1118*38fd1498Szrj   "stdarg", /* name */
1119*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
1120*38fd1498Szrj   TV_NONE, /* tv_id */
1121*38fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
1122*38fd1498Szrj   PROP_gimple_lva, /* properties_provided */
1123*38fd1498Szrj   0, /* properties_destroyed */
1124*38fd1498Szrj   0, /* todo_flags_start */
1125*38fd1498Szrj   0, /* todo_flags_finish */
1126*38fd1498Szrj };
1127*38fd1498Szrj 
1128*38fd1498Szrj class pass_stdarg : public gimple_opt_pass
1129*38fd1498Szrj {
1130*38fd1498Szrj public:
pass_stdarg(gcc::context * ctxt)1131*38fd1498Szrj   pass_stdarg (gcc::context *ctxt)
1132*38fd1498Szrj     : gimple_opt_pass (pass_data_stdarg, ctxt)
1133*38fd1498Szrj   {}
1134*38fd1498Szrj 
1135*38fd1498Szrj   /* opt_pass methods: */
gate(function *)1136*38fd1498Szrj   virtual bool gate (function *)
1137*38fd1498Szrj     {
1138*38fd1498Szrj       /* Always run this pass, in order to expand va_arg internal_fns.  We
1139*38fd1498Szrj 	 also need to do that if fun->stdarg == 0, because a va_arg may also
1140*38fd1498Szrj 	 occur in a function without varargs, f.i. if when passing a va_list to
1141*38fd1498Szrj 	 another function.  */
1142*38fd1498Szrj       return true;
1143*38fd1498Szrj     }
1144*38fd1498Szrj 
1145*38fd1498Szrj   virtual unsigned int execute (function *);
1146*38fd1498Szrj 
1147*38fd1498Szrj }; // class pass_stdarg
1148*38fd1498Szrj 
1149*38fd1498Szrj unsigned int
execute(function * fun)1150*38fd1498Szrj pass_stdarg::execute (function *fun)
1151*38fd1498Szrj {
1152*38fd1498Szrj   /* TODO: Postpone expand_ifn_va_arg till after
1153*38fd1498Szrj      optimize_va_list_gpr_fpr_size.  */
1154*38fd1498Szrj   expand_ifn_va_arg (fun);
1155*38fd1498Szrj 
1156*38fd1498Szrj   if (flag_stdarg_opt
1157*38fd1498Szrj       /* This optimization is only for stdarg functions.  */
1158*38fd1498Szrj       && fun->stdarg != 0)
1159*38fd1498Szrj     optimize_va_list_gpr_fpr_size (fun);
1160*38fd1498Szrj 
1161*38fd1498Szrj   return 0;
1162*38fd1498Szrj }
1163*38fd1498Szrj 
1164*38fd1498Szrj } // anon namespace
1165*38fd1498Szrj 
1166*38fd1498Szrj gimple_opt_pass *
make_pass_stdarg(gcc::context * ctxt)1167*38fd1498Szrj make_pass_stdarg (gcc::context *ctxt)
1168*38fd1498Szrj {
1169*38fd1498Szrj   return new pass_stdarg (ctxt);
1170*38fd1498Szrj }
1171*38fd1498Szrj 
1172*38fd1498Szrj namespace {
1173*38fd1498Szrj 
1174*38fd1498Szrj const pass_data pass_data_lower_vaarg =
1175*38fd1498Szrj {
1176*38fd1498Szrj   GIMPLE_PASS, /* type */
1177*38fd1498Szrj   "lower_vaarg", /* name */
1178*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
1179*38fd1498Szrj   TV_NONE, /* tv_id */
1180*38fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
1181*38fd1498Szrj   PROP_gimple_lva, /* properties_provided */
1182*38fd1498Szrj   0, /* properties_destroyed */
1183*38fd1498Szrj   0, /* todo_flags_start */
1184*38fd1498Szrj   0, /* todo_flags_finish */
1185*38fd1498Szrj };
1186*38fd1498Szrj 
1187*38fd1498Szrj class pass_lower_vaarg : public gimple_opt_pass
1188*38fd1498Szrj {
1189*38fd1498Szrj public:
pass_lower_vaarg(gcc::context * ctxt)1190*38fd1498Szrj   pass_lower_vaarg (gcc::context *ctxt)
1191*38fd1498Szrj     : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1192*38fd1498Szrj   {}
1193*38fd1498Szrj 
1194*38fd1498Szrj   /* opt_pass methods: */
gate(function *)1195*38fd1498Szrj   virtual bool gate (function *)
1196*38fd1498Szrj     {
1197*38fd1498Szrj       return (cfun->curr_properties & PROP_gimple_lva) == 0;
1198*38fd1498Szrj     }
1199*38fd1498Szrj 
1200*38fd1498Szrj   virtual unsigned int execute (function *);
1201*38fd1498Szrj 
1202*38fd1498Szrj }; // class pass_lower_vaarg
1203*38fd1498Szrj 
1204*38fd1498Szrj unsigned int
execute(function * fun)1205*38fd1498Szrj pass_lower_vaarg::execute (function *fun)
1206*38fd1498Szrj {
1207*38fd1498Szrj   expand_ifn_va_arg (fun);
1208*38fd1498Szrj   return 0;
1209*38fd1498Szrj }
1210*38fd1498Szrj 
1211*38fd1498Szrj } // anon namespace
1212*38fd1498Szrj 
1213*38fd1498Szrj gimple_opt_pass *
make_pass_lower_vaarg(gcc::context * ctxt)1214*38fd1498Szrj make_pass_lower_vaarg (gcc::context *ctxt)
1215*38fd1498Szrj {
1216*38fd1498Szrj   return new pass_lower_vaarg (ctxt);
1217*38fd1498Szrj }
1218