xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-stdarg.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Pass computing data for optimizing stdarg functions.
2    Copyright (C) 2004-2015 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "hard-reg-set.h"
37 #include "input.h"
38 #include "function.h"
39 #include "langhooks.h"
40 #include "gimple-pretty-print.h"
41 #include "target.h"
42 #include "bitmap.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
50 #include "is-a.h"
51 #include "gimple.h"
52 #include "gimple-iterator.h"
53 #include "gimple-walk.h"
54 #include "gimple-ssa.h"
55 #include "tree-phinodes.h"
56 #include "ssa-iterators.h"
57 #include "stringpool.h"
58 #include "tree-ssanames.h"
59 #include "sbitmap.h"
60 #include "tree-pass.h"
61 #include "tree-stdarg.h"
62 
63 /* A simple pass that attempts to optimize stdarg functions on architectures
64    that need to save register arguments to stack on entry to stdarg functions.
65    If the function doesn't use any va_start macros, no registers need to
66    be saved.  If va_start macros are used, the va_list variables don't escape
67    the function, it is only necessary to save registers that will be used
68    in va_arg macros.  E.g. if va_arg is only used with integral types
69    in the function, floating point registers don't need to be saved, etc.  */
70 
71 
72 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
73    is executed at most as many times as VA_START_BB.  */
74 
75 static bool
76 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
77 {
78   vec<edge> stack = vNULL;
79   edge e;
80   edge_iterator ei;
81   sbitmap visited;
82   bool ret;
83 
84   if (va_arg_bb == va_start_bb)
85     return true;
86 
87   if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
88     return false;
89 
90   visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
91   bitmap_clear (visited);
92   ret = true;
93 
94   FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
95     stack.safe_push (e);
96 
97   while (! stack.is_empty ())
98     {
99       basic_block src;
100 
101       e = stack.pop ();
102       src = e->src;
103 
104       if (e->flags & EDGE_COMPLEX)
105 	{
106 	  ret = false;
107 	  break;
108 	}
109 
110       if (src == va_start_bb)
111 	continue;
112 
113       /* va_arg_bb can be executed more times than va_start_bb.  */
114       if (src == va_arg_bb)
115 	{
116 	  ret = false;
117 	  break;
118 	}
119 
120       gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
121 
122       if (! bitmap_bit_p (visited, src->index))
123 	{
124 	  bitmap_set_bit (visited, src->index);
125 	  FOR_EACH_EDGE (e, ei, src->preds)
126 	    stack.safe_push (e);
127 	}
128     }
129 
130   stack.release ();
131   sbitmap_free (visited);
132   return ret;
133 }
134 
135 
136 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
137    return constant, otherwise return HOST_WIDE_INT_M1U.
138    GPR_P is true if this is GPR counter.  */
139 
140 static unsigned HOST_WIDE_INT
141 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
142 		      bool gpr_p)
143 {
144   tree lhs, orig_lhs;
145   gimple stmt;
146   unsigned HOST_WIDE_INT ret = 0, val, counter_val;
147   unsigned int max_size;
148 
149   if (si->offsets == NULL)
150     {
151       unsigned int i;
152 
153       si->offsets = XNEWVEC (int, num_ssa_names);
154       for (i = 0; i < num_ssa_names; ++i)
155 	si->offsets[i] = -1;
156     }
157 
158   counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
159   max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
160   orig_lhs = lhs = rhs;
161   while (lhs)
162     {
163       enum tree_code rhs_code;
164       tree rhs1;
165 
166       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
167 	{
168 	  if (counter_val >= max_size)
169 	    {
170 	      ret = max_size;
171 	      break;
172 	    }
173 
174 	  ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
175 	  break;
176 	}
177 
178       stmt = SSA_NAME_DEF_STMT (lhs);
179 
180       if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
181 	return HOST_WIDE_INT_M1U;
182 
183       rhs_code = gimple_assign_rhs_code (stmt);
184       rhs1 = gimple_assign_rhs1 (stmt);
185       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
186 	   || gimple_assign_cast_p (stmt))
187 	  && TREE_CODE (rhs1) == SSA_NAME)
188 	{
189 	  lhs = rhs1;
190 	  continue;
191 	}
192 
193       if ((rhs_code == POINTER_PLUS_EXPR
194 	   || rhs_code == PLUS_EXPR)
195 	  && TREE_CODE (rhs1) == SSA_NAME
196 	  && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
197 	{
198 	  ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
199 	  lhs = rhs1;
200 	  continue;
201 	}
202 
203       if (rhs_code == ADDR_EXPR
204 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
205 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
206 	  && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
207 	{
208 	  ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
209 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
210 	  continue;
211 	}
212 
213       if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
214 	return HOST_WIDE_INT_M1U;
215 
216       rhs = gimple_assign_rhs1 (stmt);
217       if (TREE_CODE (counter) != TREE_CODE (rhs))
218 	return HOST_WIDE_INT_M1U;
219 
220       if (TREE_CODE (counter) == COMPONENT_REF)
221 	{
222 	  if (get_base_address (counter) != get_base_address (rhs)
223 	      || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
224 	      || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
225 	    return HOST_WIDE_INT_M1U;
226 	}
227       else if (counter != rhs)
228 	return HOST_WIDE_INT_M1U;
229 
230       lhs = NULL;
231     }
232 
233   lhs = orig_lhs;
234   val = ret + counter_val;
235   while (lhs)
236     {
237       enum tree_code rhs_code;
238       tree rhs1;
239 
240       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
241 	break;
242 
243       if (val >= max_size)
244 	si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
245       else
246 	si->offsets[SSA_NAME_VERSION (lhs)] = val;
247 
248       stmt = SSA_NAME_DEF_STMT (lhs);
249 
250       rhs_code = gimple_assign_rhs_code (stmt);
251       rhs1 = gimple_assign_rhs1 (stmt);
252       if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
253 	   || gimple_assign_cast_p (stmt))
254 	  && TREE_CODE (rhs1) == SSA_NAME)
255 	{
256 	  lhs = rhs1;
257 	  continue;
258 	}
259 
260       if ((rhs_code == POINTER_PLUS_EXPR
261 	   || rhs_code == PLUS_EXPR)
262 	  && TREE_CODE (rhs1) == SSA_NAME
263 	  && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
264 	{
265 	  val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
266 	  lhs = rhs1;
267 	  continue;
268 	}
269 
270       if (rhs_code == ADDR_EXPR
271 	  && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
272 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
273 	  && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
274 	{
275 	  val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
276 	  lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
277 	  continue;
278 	}
279 
280       lhs = NULL;
281     }
282 
283   return ret;
284 }
285 
286 
287 /* Called by walk_tree to look for references to va_list variables.  */
288 
289 static tree
290 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
291 			void *data)
292 {
293   bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
294   tree var = *tp;
295 
296   if (TREE_CODE (var) == SSA_NAME)
297     {
298       if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
299 	return var;
300     }
301   else if (TREE_CODE (var) == VAR_DECL)
302     {
303       if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
304 	return var;
305     }
306 
307   return NULL_TREE;
308 }
309 
310 
311 /* Helper function of va_list_counter_struct_op.  Compute
312    cfun->va_list_{g,f}pr_size.  AP is a va_list GPR/FPR counter,
313    if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
314    statement.  GPR_P is true if AP is a GPR counter, false if it is
315    a FPR counter.  */
316 
317 static void
318 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
319 		    bool write_p)
320 {
321   unsigned HOST_WIDE_INT increment;
322 
323   if (si->compute_sizes < 0)
324     {
325       si->compute_sizes = 0;
326       if (si->va_start_count == 1
327 	  && reachable_at_most_once (si->bb, si->va_start_bb))
328 	si->compute_sizes = 1;
329 
330       if (dump_file && (dump_flags & TDF_DETAILS))
331 	fprintf (dump_file,
332 		 "bb%d will %sbe executed at most once for each va_start "
333 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
334 		 si->va_start_bb->index);
335     }
336 
337   if (write_p
338       && si->compute_sizes
339       && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
340     {
341       if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
342 	{
343 	  cfun->va_list_gpr_size += increment;
344 	  return;
345 	}
346 
347       if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
348 	{
349 	  cfun->va_list_fpr_size += increment;
350 	  return;
351 	}
352     }
353 
354   if (write_p || !si->compute_sizes)
355     {
356       if (gpr_p)
357 	cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
358       else
359 	cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
360     }
361 }
362 
363 
364 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
365    If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
366    is false, AP has been seen in VAR = AP assignment.
367    Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
368    va_arg operation that doesn't cause the va_list variable to escape
369    current function.  */
370 
371 static bool
372 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
373 			   bool write_p)
374 {
375   tree base;
376 
377   if (TREE_CODE (ap) != COMPONENT_REF
378       || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
379     return false;
380 
381   if (TREE_CODE (var) != SSA_NAME
382       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
383     return false;
384 
385   base = get_base_address (ap);
386   if (TREE_CODE (base) != VAR_DECL
387       || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
388     return false;
389 
390   if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
391     va_list_counter_op (si, ap, var, true, write_p);
392   else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
393     va_list_counter_op (si, ap, var, false, write_p);
394 
395   return true;
396 }
397 
398 
399 /* Check for TEM = AP.  Return true if found and the caller shouldn't
400    search for va_list references in the statement.  */
401 
402 static bool
403 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
404 {
405   if (TREE_CODE (ap) != VAR_DECL
406       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
407     return false;
408 
409   if (TREE_CODE (tem) != SSA_NAME
410       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
411     return false;
412 
413   if (si->compute_sizes < 0)
414     {
415       si->compute_sizes = 0;
416       if (si->va_start_count == 1
417 	  && reachable_at_most_once (si->bb, si->va_start_bb))
418 	si->compute_sizes = 1;
419 
420       if (dump_file && (dump_flags & TDF_DETAILS))
421 	fprintf (dump_file,
422 		 "bb%d will %sbe executed at most once for each va_start "
423 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
424 		 si->va_start_bb->index);
425     }
426 
427   /* For void * or char * va_list types, there is just one counter.
428      If va_arg is used in a loop, we don't know how many registers need
429      saving.  */
430   if (! si->compute_sizes)
431     return false;
432 
433   if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
434     return false;
435 
436   /* Note the temporary, as we need to track whether it doesn't escape
437      the current function.  */
438   bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
439 
440   return true;
441 }
442 
443 
444 /* Check for:
445      tem1 = AP;
446      TEM2 = tem1 + CST;
447      AP = TEM2;
448    sequence and update cfun->va_list_gpr_size.  Return true if found.  */
449 
450 static bool
451 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
452 {
453   unsigned HOST_WIDE_INT increment;
454 
455   if (TREE_CODE (ap) != VAR_DECL
456       || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
457     return false;
458 
459   if (TREE_CODE (tem2) != SSA_NAME
460       || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
461     return false;
462 
463   if (si->compute_sizes <= 0)
464     return false;
465 
466   increment = va_list_counter_bump (si, ap, tem2, true);
467   if (increment + 1 <= 1)
468     return false;
469 
470   if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
471     cfun->va_list_gpr_size += increment;
472   else
473     cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
474 
475   return true;
476 }
477 
478 
479 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
480    containing value of some va_list variable plus optionally some constant,
481    either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
482    depending whether LHS is a function local temporary.  */
483 
484 static void
485 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
486 {
487   if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
488     return;
489 
490   if (TREE_CODE (rhs) == SSA_NAME)
491     {
492       if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
493 	return;
494     }
495   else if (TREE_CODE (rhs) == ADDR_EXPR
496 	   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
497 	   && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
498     {
499       tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
500       if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
501 	return;
502     }
503   else
504     return;
505 
506   if (TREE_CODE (lhs) != SSA_NAME)
507     {
508       si->va_list_escapes = true;
509       return;
510     }
511 
512   if (si->compute_sizes < 0)
513     {
514       si->compute_sizes = 0;
515       if (si->va_start_count == 1
516 	  && reachable_at_most_once (si->bb, si->va_start_bb))
517 	si->compute_sizes = 1;
518 
519       if (dump_file && (dump_flags & TDF_DETAILS))
520 	fprintf (dump_file,
521 		 "bb%d will %sbe executed at most once for each va_start "
522 		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
523 		 si->va_start_bb->index);
524     }
525 
526   /* For void * or char * va_list types, there is just one counter.
527      If va_arg is used in a loop, we don't know how many registers need
528      saving.  */
529   if (! si->compute_sizes)
530     {
531       si->va_list_escapes = true;
532       return;
533     }
534 
535   if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
536       == HOST_WIDE_INT_M1U)
537     {
538       si->va_list_escapes = true;
539       return;
540     }
541 
542   bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
543 }
544 
545 
546 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
547    Return true if va_list might be escaping.  */
548 
549 static bool
550 check_all_va_list_escapes (struct stdarg_info *si)
551 {
552   basic_block bb;
553 
554   FOR_EACH_BB_FN (bb, cfun)
555     {
556       for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
557 	   gsi_next (&i))
558 	{
559 	  tree lhs;
560 	  use_operand_p uop;
561 	  ssa_op_iter soi;
562 	  gphi *phi = i.phi ();
563 
564 	  lhs = PHI_RESULT (phi);
565 	  if (virtual_operand_p (lhs)
566 	      || bitmap_bit_p (si->va_list_escape_vars,
567 			       SSA_NAME_VERSION (lhs)))
568 	    continue;
569 
570 	  FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
571 	    {
572 	      tree rhs = USE_FROM_PTR (uop);
573 	      if (TREE_CODE (rhs) == SSA_NAME
574 		  && bitmap_bit_p (si->va_list_escape_vars,
575 				SSA_NAME_VERSION (rhs)))
576 		{
577 		  if (dump_file && (dump_flags & TDF_DETAILS))
578 		    {
579 		      fputs ("va_list escapes in ", dump_file);
580 		      print_gimple_stmt (dump_file, phi, 0, dump_flags);
581 		      fputc ('\n', dump_file);
582 		    }
583 		  return true;
584 		}
585 	    }
586 	}
587 
588       for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
589 	   gsi_next (&i))
590 	{
591 	  gimple stmt = gsi_stmt (i);
592 	  tree use;
593 	  ssa_op_iter iter;
594 
595 	  if (is_gimple_debug (stmt))
596 	    continue;
597 
598 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
599 	    {
600 	      if (! bitmap_bit_p (si->va_list_escape_vars,
601 				  SSA_NAME_VERSION (use)))
602 		continue;
603 
604 	      if (is_gimple_assign (stmt))
605 		{
606 		  tree rhs = gimple_assign_rhs1 (stmt);
607 		  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
608 
609 		  /* x = *ap_temp;  */
610 		  if (rhs_code == MEM_REF
611 		      && TREE_OPERAND (rhs, 0) == use
612 		      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
613 		      && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
614 		      && si->offsets[SSA_NAME_VERSION (use)] != -1)
615 		    {
616 		      unsigned HOST_WIDE_INT gpr_size;
617 		      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
618 
619 		      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
620 			  	 + tree_to_shwi (TREE_OPERAND (rhs, 1))
621 				 + tree_to_uhwi (access_size);
622 		      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
623 			cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
624 		      else if (gpr_size > cfun->va_list_gpr_size)
625 			cfun->va_list_gpr_size = gpr_size;
626 		      continue;
627 		    }
628 
629 		  /* va_arg sequences may contain
630 		     other_ap_temp = ap_temp;
631 		     other_ap_temp = ap_temp + constant;
632 		     other_ap_temp = (some_type *) ap_temp;
633 		     ap = ap_temp;
634 		     statements.  */
635 		  if (rhs == use
636 		      && ((rhs_code == POINTER_PLUS_EXPR
637 			   && (TREE_CODE (gimple_assign_rhs2 (stmt))
638 			       == INTEGER_CST))
639 			  || gimple_assign_cast_p (stmt)
640 			  || (get_gimple_rhs_class (rhs_code)
641 			      == GIMPLE_SINGLE_RHS)))
642 		    {
643 		      tree lhs = gimple_assign_lhs (stmt);
644 
645 		      if (TREE_CODE (lhs) == SSA_NAME
646 			  && bitmap_bit_p (si->va_list_escape_vars,
647 					   SSA_NAME_VERSION (lhs)))
648 			continue;
649 
650 		      if (TREE_CODE (lhs) == VAR_DECL
651 			  && bitmap_bit_p (si->va_list_vars,
652 					   DECL_UID (lhs) + num_ssa_names))
653 			continue;
654 		    }
655 		  else if (rhs_code == ADDR_EXPR
656 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
657 			   && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
658 		    {
659 		      tree lhs = gimple_assign_lhs (stmt);
660 
661 		      if (bitmap_bit_p (si->va_list_escape_vars,
662 					SSA_NAME_VERSION (lhs)))
663 			continue;
664 		    }
665 		}
666 
667 	      if (dump_file && (dump_flags & TDF_DETAILS))
668 		{
669 		  fputs ("va_list escapes in ", dump_file);
670 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
671 		  fputc ('\n', dump_file);
672 		}
673 	      return true;
674 	    }
675 	}
676     }
677 
678   return false;
679 }
680 
681 
682 namespace {
683 
684 const pass_data pass_data_stdarg =
685 {
686   GIMPLE_PASS, /* type */
687   "stdarg", /* name */
688   OPTGROUP_NONE, /* optinfo_flags */
689   TV_NONE, /* tv_id */
690   ( PROP_cfg | PROP_ssa ), /* properties_required */
691   0, /* properties_provided */
692   0, /* properties_destroyed */
693   0, /* todo_flags_start */
694   0, /* todo_flags_finish */
695 };
696 
697 class pass_stdarg : public gimple_opt_pass
698 {
699 public:
700   pass_stdarg (gcc::context *ctxt)
701     : gimple_opt_pass (pass_data_stdarg, ctxt)
702   {}
703 
704   /* opt_pass methods: */
705   virtual bool gate (function *fun)
706     {
707       return (flag_stdarg_opt
708 #ifdef ACCEL_COMPILER
709 	      /* Disable for GCC5 in the offloading compilers, as
710 		 va_list and gpr/fpr counter fields are not merged.
711 		 In GCC6 when stdarg is lowered late this shouldn't be
712 		 an issue.  */
713 	      && !in_lto_p
714 #endif
715 	      /* This optimization is only for stdarg functions.  */
716 	      && fun->stdarg != 0);
717     }
718 
719   virtual unsigned int execute (function *);
720 
721 }; // class pass_stdarg
722 
723 unsigned int
724 pass_stdarg::execute (function *fun)
725 {
726   basic_block bb;
727   bool va_list_escapes = false;
728   bool va_list_simple_ptr;
729   struct stdarg_info si;
730   struct walk_stmt_info wi;
731   const char *funcname = NULL;
732   tree cfun_va_list;
733 
734   fun->va_list_gpr_size = 0;
735   fun->va_list_fpr_size = 0;
736   memset (&si, 0, sizeof (si));
737   si.va_list_vars = BITMAP_ALLOC (NULL);
738   si.va_list_escape_vars = BITMAP_ALLOC (NULL);
739 
740   if (dump_file)
741     funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
742 
743   cfun_va_list = targetm.fn_abi_va_list (fun->decl);
744   va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
745 		       && (TREE_TYPE (cfun_va_list) == void_type_node
746 			   || TREE_TYPE (cfun_va_list) == char_type_node);
747   gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
748 
749   FOR_EACH_BB_FN (bb, fun)
750     {
751       gimple_stmt_iterator i;
752 
753       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
754 	{
755 	  gimple stmt = gsi_stmt (i);
756 	  tree callee, ap;
757 
758 	  if (!is_gimple_call (stmt))
759 	    continue;
760 
761 	  callee = gimple_call_fndecl (stmt);
762 	  if (!callee
763 	      || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
764 	    continue;
765 
766 	  switch (DECL_FUNCTION_CODE (callee))
767 	    {
768 	    case BUILT_IN_VA_START:
769 	      break;
770 	      /* If old style builtins are used, don't optimize anything.  */
771 	    case BUILT_IN_SAVEREGS:
772 	    case BUILT_IN_NEXT_ARG:
773 	      va_list_escapes = true;
774 	      continue;
775 	    default:
776 	      continue;
777 	    }
778 
779 	  si.va_start_count++;
780 	  ap = gimple_call_arg (stmt, 0);
781 
782 	  if (TREE_CODE (ap) != ADDR_EXPR)
783 	    {
784 	      va_list_escapes = true;
785 	      break;
786 	    }
787 	  ap = TREE_OPERAND (ap, 0);
788 	  if (TREE_CODE (ap) == ARRAY_REF)
789 	    {
790 	      if (! integer_zerop (TREE_OPERAND (ap, 1)))
791 	        {
792 	          va_list_escapes = true;
793 	          break;
794 		}
795 	      ap = TREE_OPERAND (ap, 0);
796 	    }
797 	  if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
798 	      != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
799 	      || TREE_CODE (ap) != VAR_DECL)
800 	    {
801 	      va_list_escapes = true;
802 	      break;
803 	    }
804 
805 	  if (is_global_var (ap))
806 	    {
807 	      va_list_escapes = true;
808 	      break;
809 	    }
810 
811 	  bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
812 
813 	  /* VA_START_BB and VA_START_AP will be only used if there is just
814 	     one va_start in the function.  */
815 	  si.va_start_bb = bb;
816 	  si.va_start_ap = ap;
817 	}
818 
819       if (va_list_escapes)
820 	break;
821     }
822 
823   /* If there were no va_start uses in the function, there is no need to
824      save anything.  */
825   if (si.va_start_count == 0)
826     goto finish;
827 
828   /* If some va_list arguments weren't local, we can't optimize.  */
829   if (va_list_escapes)
830     goto finish;
831 
832   /* For void * or char * va_list, something useful can be done only
833      if there is just one va_start.  */
834   if (va_list_simple_ptr && si.va_start_count > 1)
835     {
836       va_list_escapes = true;
837       goto finish;
838     }
839 
840   /* For struct * va_list, if the backend didn't tell us what the counter fields
841      are, there is nothing more we can do.  */
842   if (!va_list_simple_ptr
843       && va_list_gpr_counter_field == NULL_TREE
844       && va_list_fpr_counter_field == NULL_TREE)
845     {
846       va_list_escapes = true;
847       goto finish;
848     }
849 
850   /* For void * or char * va_list there is just one counter
851      (va_list itself).  Use VA_LIST_GPR_SIZE for it.  */
852   if (va_list_simple_ptr)
853     fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
854 
855   calculate_dominance_info (CDI_DOMINATORS);
856   memset (&wi, 0, sizeof (wi));
857   wi.info = si.va_list_vars;
858 
859   FOR_EACH_BB_FN (bb, fun)
860     {
861       si.compute_sizes = -1;
862       si.bb = bb;
863 
864       /* For va_list_simple_ptr, we have to check PHI nodes too.  We treat
865 	 them as assignments for the purpose of escape analysis.  This is
866 	 not needed for non-simple va_list because virtual phis don't perform
867 	 any real data movement.  Also, check PHI nodes for taking address of
868 	 the va_list vars.  */
869       tree lhs, rhs;
870       use_operand_p uop;
871       ssa_op_iter soi;
872 
873       for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
874 	   gsi_next (&i))
875 	{
876 	  gphi *phi = i.phi ();
877 	  lhs = PHI_RESULT (phi);
878 
879 	  if (virtual_operand_p (lhs))
880 	    continue;
881 
882 	  if (va_list_simple_ptr)
883 	    {
884 	      FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
885 		{
886 		  rhs = USE_FROM_PTR (uop);
887 		  if (va_list_ptr_read (&si, rhs, lhs))
888 		    continue;
889 		  else if (va_list_ptr_write (&si, lhs, rhs))
890 		    continue;
891 		  else
892 		    check_va_list_escapes (&si, lhs, rhs);
893 
894 		  if (si.va_list_escapes)
895 		    {
896 		      if (dump_file && (dump_flags & TDF_DETAILS))
897 			{
898 			  fputs ("va_list escapes in ", dump_file);
899 			  print_gimple_stmt (dump_file, phi, 0, dump_flags);
900 			  fputc ('\n', dump_file);
901 			}
902 		      va_list_escapes = true;
903 		    }
904 		}
905 	    }
906 
907 	  for (unsigned j = 0; !va_list_escapes
908 			       && j < gimple_phi_num_args (phi); ++j)
909 	    if ((!va_list_simple_ptr
910 		 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
911 		&& walk_tree (gimple_phi_arg_def_ptr (phi, j),
912 			      find_va_list_reference, &wi, NULL))
913 	      {
914 		if (dump_file && (dump_flags & TDF_DETAILS))
915 		  {
916 		    fputs ("va_list escapes in ", dump_file);
917 		    print_gimple_stmt (dump_file, phi, 0, dump_flags);
918 		    fputc ('\n', dump_file);
919 		  }
920 		va_list_escapes = true;
921 	      }
922 	}
923 
924       for (gimple_stmt_iterator i = gsi_start_bb (bb);
925 	   !gsi_end_p (i) && !va_list_escapes;
926 	   gsi_next (&i))
927 	{
928 	  gimple stmt = gsi_stmt (i);
929 
930 	  /* Don't look at __builtin_va_{start,end}, they are ok.  */
931 	  if (is_gimple_call (stmt))
932 	    {
933 	      tree callee = gimple_call_fndecl (stmt);
934 
935 	      if (callee
936 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
937 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
938 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
939 		continue;
940 	    }
941 
942 	  if (is_gimple_assign (stmt))
943 	    {
944 	      lhs = gimple_assign_lhs (stmt);
945 	      rhs = gimple_assign_rhs1 (stmt);
946 
947 	      if (va_list_simple_ptr)
948 		{
949 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
950 		      == GIMPLE_SINGLE_RHS)
951 		    {
952 		      /* Check for ap ={v} {}.  */
953 		      if (TREE_CLOBBER_P (rhs))
954 			continue;
955 
956 		      /* Check for tem = ap.  */
957 		      else if (va_list_ptr_read (&si, rhs, lhs))
958 			continue;
959 
960 		      /* Check for the last insn in:
961 			 tem1 = ap;
962 			 tem2 = tem1 + CST;
963 			 ap = tem2;
964 			 sequence.  */
965 		      else if (va_list_ptr_write (&si, lhs, rhs))
966 			continue;
967 		    }
968 
969 		  if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
970 		       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
971 		      || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
972 		      || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
973 			  == GIMPLE_SINGLE_RHS))
974 		    check_va_list_escapes (&si, lhs, rhs);
975 		}
976 	      else
977 		{
978 		  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
979 		      == GIMPLE_SINGLE_RHS)
980 		    {
981 		      /* Check for ap ={v} {}.  */
982 		      if (TREE_CLOBBER_P (rhs))
983 			continue;
984 
985 		      /* Check for ap[0].field = temp.  */
986 		      else if (va_list_counter_struct_op (&si, lhs, rhs, true))
987 			continue;
988 
989 		      /* Check for temp = ap[0].field.  */
990 		      else if (va_list_counter_struct_op (&si, rhs, lhs,
991 							  false))
992 			continue;
993 		    }
994 
995 		  /* Do any architecture specific checking.  */
996 		  if (targetm.stdarg_optimize_hook
997 		      && targetm.stdarg_optimize_hook (&si, stmt))
998 		    continue;
999 		}
1000 	    }
1001 	  else if (is_gimple_debug (stmt))
1002 	    continue;
1003 
1004 	  /* All other uses of va_list are either va_copy (that is not handled
1005 	     in this optimization), taking address of va_list variable or
1006 	     passing va_list to other functions (in that case va_list might
1007 	     escape the function and therefore va_start needs to set it up
1008 	     fully), or some unexpected use of va_list.  None of these should
1009 	     happen in a gimplified VA_ARG_EXPR.  */
1010 	  if (si.va_list_escapes
1011 	      || walk_gimple_op (stmt, find_va_list_reference, &wi))
1012 	    {
1013 	      if (dump_file && (dump_flags & TDF_DETAILS))
1014 		{
1015 		  fputs ("va_list escapes in ", dump_file);
1016 		  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1017 		  fputc ('\n', dump_file);
1018 		}
1019 	      va_list_escapes = true;
1020 	    }
1021 	}
1022 
1023       if (va_list_escapes)
1024 	break;
1025     }
1026 
1027   if (! va_list_escapes
1028       && va_list_simple_ptr
1029       && ! bitmap_empty_p (si.va_list_escape_vars)
1030       && check_all_va_list_escapes (&si))
1031     va_list_escapes = true;
1032 
1033 finish:
1034   if (va_list_escapes)
1035     {
1036       fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1037       fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
1038     }
1039   BITMAP_FREE (si.va_list_vars);
1040   BITMAP_FREE (si.va_list_escape_vars);
1041   free (si.offsets);
1042   if (dump_file)
1043     {
1044       fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1045 	       funcname, (int) va_list_escapes);
1046       if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1047 	fputs ("all", dump_file);
1048       else
1049 	fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1050       fputs (" GPR units and ", dump_file);
1051       if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1052 	fputs ("all", dump_file);
1053       else
1054 	fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1055       fputs (" FPR units.\n", dump_file);
1056     }
1057   return 0;
1058 }
1059 
1060 } // anon namespace
1061 
1062 gimple_opt_pass *
1063 make_pass_stdarg (gcc::context *ctxt)
1064 {
1065   return new pass_stdarg (ctxt);
1066 }
1067