xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-ccp.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Conditional constant propagation pass for the GNU compiler.
2    Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3    2010 Free Software Foundation, Inc.
4    Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5    Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* Conditional constant propagation (CCP) is based on the SSA
24    propagation engine (tree-ssa-propagate.c).  Constant assignments of
25    the form VAR = CST are propagated from the assignments into uses of
26    VAR, which in turn may generate new constants.  The simulation uses
27    a four level lattice to keep track of constant values associated
28    with SSA names.  Given an SSA name V_i, it may take one of the
29    following values:
30 
31 	UNINITIALIZED   ->  the initial state of the value.  This value
32 			    is replaced with a correct initial value
33 			    the first time the value is used, so the
34 			    rest of the pass does not need to care about
35 			    it.  Using this value simplifies initialization
36 			    of the pass, and prevents us from needlessly
37 			    scanning statements that are never reached.
38 
39 	UNDEFINED	->  V_i is a local variable whose definition
40 			    has not been processed yet.  Therefore we
41 			    don't yet know if its value is a constant
42 			    or not.
43 
44 	CONSTANT	->  V_i has been found to hold a constant
45 			    value C.
46 
47 	VARYING		->  V_i cannot take a constant value, or if it
48 			    does, it is not possible to determine it
49 			    at compile time.
50 
51    The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 
53    1- In ccp_visit_stmt, we are interested in assignments whose RHS
54       evaluates into a constant and conditional jumps whose predicate
55       evaluates into a boolean true or false.  When an assignment of
56       the form V_i = CONST is found, V_i's lattice value is set to
57       CONSTANT and CONST is associated with it.  This causes the
58       propagation engine to add all the SSA edges coming out the
59       assignment into the worklists, so that statements that use V_i
60       can be visited.
61 
62       If the statement is a conditional with a constant predicate, we
63       mark the outgoing edges as executable or not executable
64       depending on the predicate's value.  This is then used when
65       visiting PHI nodes to know when a PHI argument can be ignored.
66 
67 
68    2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69       same constant C, then the LHS of the PHI is set to C.  This
70       evaluation is known as the "meet operation".  Since one of the
71       goals of this evaluation is to optimistically return constant
72       values as often as possible, it uses two main short cuts:
73 
74       - If an argument is flowing in through a non-executable edge, it
75 	is ignored.  This is useful in cases like this:
76 
77 			if (PRED)
78 			  a_9 = 3;
79 			else
80 			  a_10 = 100;
81 			a_11 = PHI (a_9, a_10)
82 
83 	If PRED is known to always evaluate to false, then we can
84 	assume that a_11 will always take its value from a_10, meaning
85 	that instead of consider it VARYING (a_9 and a_10 have
86 	different values), we can consider it CONSTANT 100.
87 
88       - If an argument has an UNDEFINED value, then it does not affect
89 	the outcome of the meet operation.  If a variable V_i has an
90 	UNDEFINED value, it means that either its defining statement
91 	hasn't been visited yet or V_i has no defining statement, in
92 	which case the original symbol 'V' is being used
93 	uninitialized.  Since 'V' is a local variable, the compiler
94 	may assume any initial value for it.
95 
96 
97    After propagation, every variable V_i that ends up with a lattice
98    value of CONSTANT will have the associated constant value in the
99    array CONST_VAL[i].VALUE.  That is fed into substitute_and_fold for
100    final substitution and folding.
101 
102 
103    Constant propagation in stores and loads (STORE-CCP)
104    ----------------------------------------------------
105 
106    While CCP has all the logic to propagate constants in GIMPLE
107    registers, it is missing the ability to associate constants with
108    stores and loads (i.e., pointer dereferences, structures and
109    global/aliased variables).  We don't keep loads and stores in
110    SSA, but we do build a factored use-def web for them (in the
111    virtual operands).
112 
113    For instance, consider the following code fragment:
114 
115 	  struct A a;
116 	  const int B = 42;
117 
118 	  void foo (int i)
119 	  {
120 	    if (i > 10)
121 	      a.a = 42;
122 	    else
123 	      {
124 		a.b = 21;
125 		a.a = a.b + 21;
126 	      }
127 
128 	    if (a.a != B)
129 	      never_executed ();
130 	  }
131 
132    We should be able to deduce that the predicate 'a.a != B' is always
133    false.  To achieve this, we associate constant values to the SSA
134    names in the VDEF operands for each store.  Additionally,
135    since we also glob partial loads/stores with the base symbol, we
136    also keep track of the memory reference where the constant value
137    was stored (in the MEM_REF field of PROP_VALUE_T).  For instance,
138 
139         # a_5 = VDEF <a_4>
140         a.a = 2;
141 
142         # VUSE <a_5>
143         x_3 = a.b;
144 
145    In the example above, CCP will associate value '2' with 'a_5', but
146    it would be wrong to replace the load from 'a.b' with '2', because
147    '2' had been stored into a.a.
148 
149    Note that the initial value of virtual operands is VARYING, not
150    UNDEFINED.  Consider, for instance global variables:
151 
152    	int A;
153 
154    	foo (int i)
155   	{
156 	  if (i_3 > 10)
157 	    A_4 = 3;
158           # A_5 = PHI (A_4, A_2);
159 
160 	  # VUSE <A_5>
161 	  A.0_6 = A;
162 
163 	  return A.0_6;
164 	}
165 
166    The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167    been defined outside of foo.  If we were to assume it UNDEFINED, we
168    would erroneously optimize the above into 'return 3;'.
169 
170    Though STORE-CCP is not too expensive, it does have to do more work
171    than regular CCP, so it is only enabled at -O2.  Both regular CCP
172    and STORE-CCP use the exact same algorithm.  The only distinction
173    is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174    set to true.  This affects the evaluation of statements and PHI
175    nodes.
176 
177    References:
178 
179      Constant propagation with conditional branches,
180      Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181 
182      Building an Optimizing Compiler,
183      Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184 
185      Advanced Compiler Design and Implementation,
186      Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
187 
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "flags.h"
194 #include "rtl.h"
195 #include "tm_p.h"
196 #include "ggc.h"
197 #include "basic-block.h"
198 #include "output.h"
199 #include "expr.h"
200 #include "function.h"
201 #include "diagnostic.h"
202 #include "timevar.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
209 #include "target.h"
210 #include "toplev.h"
211 #include "dbgcnt.h"
212 
213 
214 /* Possible lattice values.  */
215 typedef enum
216 {
217   UNINITIALIZED,
218   UNDEFINED,
219   CONSTANT,
220   VARYING
221 } ccp_lattice_t;
222 
223 /* Array of propagated constant values.  After propagation,
224    CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I).  If
225    the constant is held in an SSA name representing a memory store
226    (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227    memory reference used to store (i.e., the LHS of the assignment
228    doing the store).  */
229 static prop_value_t *const_val;
230 
231 static void canonicalize_float_value (prop_value_t *);
232 static bool ccp_fold_stmt (gimple_stmt_iterator *);
233 
234 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX.  */
235 
236 static void
237 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 {
239   switch (val.lattice_val)
240     {
241     case UNINITIALIZED:
242       fprintf (outf, "%sUNINITIALIZED", prefix);
243       break;
244     case UNDEFINED:
245       fprintf (outf, "%sUNDEFINED", prefix);
246       break;
247     case VARYING:
248       fprintf (outf, "%sVARYING", prefix);
249       break;
250     case CONSTANT:
251       fprintf (outf, "%sCONSTANT ", prefix);
252       print_generic_expr (outf, val.value, dump_flags);
253       break;
254     default:
255       gcc_unreachable ();
256     }
257 }
258 
259 
260 /* Print lattice value VAL to stderr.  */
261 
262 void debug_lattice_value (prop_value_t val);
263 
264 void
265 debug_lattice_value (prop_value_t val)
266 {
267   dump_lattice_value (stderr, "", val);
268   fprintf (stderr, "\n");
269 }
270 
271 
272 
273 /* If SYM is a constant variable with known value, return the value.
274    NULL_TREE is returned otherwise.  */
275 
276 tree
277 get_symbol_constant_value (tree sym)
278 {
279   if (TREE_STATIC (sym)
280       && ((TREE_READONLY (sym) && !TREE_THIS_VOLATILE (sym))
281 	  || TREE_CODE (sym) == CONST_DECL))
282     {
283       tree val = DECL_INITIAL (sym);
284       if (val)
285 	{
286 	  STRIP_NOPS (val);
287 	  if (is_gimple_min_invariant (val))
288 	    {
289 	      if (TREE_CODE (val) == ADDR_EXPR)
290 		{
291 		  tree base = get_base_address (TREE_OPERAND (val, 0));
292 		  if (base && TREE_CODE (base) == VAR_DECL)
293 		    {
294 		      TREE_ADDRESSABLE (base) = 1;
295 		      if (gimple_referenced_vars (cfun))
296 			add_referenced_var (base);
297 		    }
298 		}
299 	      return val;
300 	    }
301 	}
302       /* Variables declared 'const' without an initializer
303 	 have zero as the initializer if they may not be
304 	 overridden at link or run time.  */
305       if (!val
306 	  && !DECL_EXTERNAL (sym)
307 	  && targetm.binds_local_p (sym)
308           && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
309 	       || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
310 	return fold_convert (TREE_TYPE (sym), integer_zero_node);
311     }
312 
313   return NULL_TREE;
314 }
315 
316 /* Compute a default value for variable VAR and store it in the
317    CONST_VAL array.  The following rules are used to get default
318    values:
319 
320    1- Global and static variables that are declared constant are
321       considered CONSTANT.
322 
323    2- Any other value is considered UNDEFINED.  This is useful when
324       considering PHI nodes.  PHI arguments that are undefined do not
325       change the constant value of the PHI node, which allows for more
326       constants to be propagated.
327 
328    3- Variables defined by statements other than assignments and PHI
329       nodes are considered VARYING.
330 
331    4- Initial values of variables that are not GIMPLE registers are
332       considered VARYING.  */
333 
334 static prop_value_t
335 get_default_value (tree var)
336 {
337   tree sym = SSA_NAME_VAR (var);
338   prop_value_t val = { UNINITIALIZED, NULL_TREE };
339   gimple stmt;
340 
341   stmt = SSA_NAME_DEF_STMT (var);
342 
343   if (gimple_nop_p (stmt))
344     {
345       /* Variables defined by an empty statement are those used
346 	 before being initialized.  If VAR is a local variable, we
347 	 can assume initially that it is UNDEFINED, otherwise we must
348 	 consider it VARYING.  */
349       if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
350 	val.lattice_val = UNDEFINED;
351       else
352 	val.lattice_val = VARYING;
353     }
354   else if (is_gimple_assign (stmt)
355 	   /* Value-returning GIMPLE_CALL statements assign to
356 	      a variable, and are treated similarly to GIMPLE_ASSIGN.  */
357 	   || (is_gimple_call (stmt)
358 	       && gimple_call_lhs (stmt) != NULL_TREE)
359 	   || gimple_code (stmt) == GIMPLE_PHI)
360     {
361       tree cst;
362       if (gimple_assign_single_p (stmt)
363 	  && DECL_P (gimple_assign_rhs1 (stmt))
364 	  && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
365 	{
366 	  val.lattice_val = CONSTANT;
367 	  val.value = cst;
368 	}
369       else
370 	/* Any other variable defined by an assignment or a PHI node
371 	   is considered UNDEFINED.  */
372 	val.lattice_val = UNDEFINED;
373     }
374   else
375     {
376       /* Otherwise, VAR will never take on a constant value.  */
377       val.lattice_val = VARYING;
378     }
379 
380   return val;
381 }
382 
383 
384 /* Get the constant value associated with variable VAR.  */
385 
386 static inline prop_value_t *
387 get_value (tree var)
388 {
389   prop_value_t *val;
390 
391   if (const_val == NULL)
392     return NULL;
393 
394   val = &const_val[SSA_NAME_VERSION (var)];
395   if (val->lattice_val == UNINITIALIZED)
396     *val = get_default_value (var);
397 
398   canonicalize_float_value (val);
399 
400   return val;
401 }
402 
403 /* Sets the value associated with VAR to VARYING.  */
404 
405 static inline void
406 set_value_varying (tree var)
407 {
408   prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
409 
410   val->lattice_val = VARYING;
411   val->value = NULL_TREE;
412 }
413 
414 /* For float types, modify the value of VAL to make ccp work correctly
415    for non-standard values (-0, NaN):
416 
417    If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
418    If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
419      This is to fix the following problem (see PR 29921): Suppose we have
420 
421      x = 0.0 * y
422 
423      and we set value of y to NaN.  This causes value of x to be set to NaN.
424      When we later determine that y is in fact VARYING, fold uses the fact
425      that HONOR_NANS is false, and we try to change the value of x to 0,
426      causing an ICE.  With HONOR_NANS being false, the real appearance of
427      NaN would cause undefined behavior, though, so claiming that y (and x)
428      are UNDEFINED initially is correct.  */
429 
430 static void
431 canonicalize_float_value (prop_value_t *val)
432 {
433   enum machine_mode mode;
434   tree type;
435   REAL_VALUE_TYPE d;
436 
437   if (val->lattice_val != CONSTANT
438       || TREE_CODE (val->value) != REAL_CST)
439     return;
440 
441   d = TREE_REAL_CST (val->value);
442   type = TREE_TYPE (val->value);
443   mode = TYPE_MODE (type);
444 
445   if (!HONOR_SIGNED_ZEROS (mode)
446       && REAL_VALUE_MINUS_ZERO (d))
447     {
448       val->value = build_real (type, dconst0);
449       return;
450     }
451 
452   if (!HONOR_NANS (mode)
453       && REAL_VALUE_ISNAN (d))
454     {
455       val->lattice_val = UNDEFINED;
456       val->value = NULL;
457       return;
458     }
459 }
460 
461 /* Set the value for variable VAR to NEW_VAL.  Return true if the new
462    value is different from VAR's previous value.  */
463 
464 static bool
465 set_lattice_value (tree var, prop_value_t new_val)
466 {
467   prop_value_t *old_val = get_value (var);
468 
469   canonicalize_float_value (&new_val);
470 
471   /* Lattice transitions must always be monotonically increasing in
472      value.  If *OLD_VAL and NEW_VAL are the same, return false to
473      inform the caller that this was a non-transition.  */
474 
475   gcc_assert (old_val->lattice_val < new_val.lattice_val
476               || (old_val->lattice_val == new_val.lattice_val
477 		  && ((!old_val->value && !new_val.value)
478 		      || operand_equal_p (old_val->value, new_val.value, 0))));
479 
480   if (old_val->lattice_val != new_val.lattice_val)
481     {
482       if (dump_file && (dump_flags & TDF_DETAILS))
483 	{
484 	  dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
485 	  fprintf (dump_file, ".  Adding SSA edges to worklist.\n");
486 	}
487 
488       *old_val = new_val;
489 
490       gcc_assert (new_val.lattice_val != UNDEFINED);
491       return true;
492     }
493 
494   return false;
495 }
496 
497 
498 /* Return the likely CCP lattice value for STMT.
499 
500    If STMT has no operands, then return CONSTANT.
501 
502    Else if undefinedness of operands of STMT cause its value to be
503    undefined, then return UNDEFINED.
504 
505    Else if any operands of STMT are constants, then return CONSTANT.
506 
507    Else return VARYING.  */
508 
509 static ccp_lattice_t
510 likely_value (gimple stmt)
511 {
512   bool has_constant_operand, has_undefined_operand, all_undefined_operands;
513   tree use;
514   ssa_op_iter iter;
515   unsigned i;
516 
517   enum gimple_code code = gimple_code (stmt);
518 
519   /* This function appears to be called only for assignments, calls,
520      conditionals, and switches, due to the logic in visit_stmt.  */
521   gcc_assert (code == GIMPLE_ASSIGN
522               || code == GIMPLE_CALL
523               || code == GIMPLE_COND
524               || code == GIMPLE_SWITCH);
525 
526   /* If the statement has volatile operands, it won't fold to a
527      constant value.  */
528   if (gimple_has_volatile_ops (stmt))
529     return VARYING;
530 
531   /* Arrive here for more complex cases.  */
532   has_constant_operand = false;
533   has_undefined_operand = false;
534   all_undefined_operands = true;
535   FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
536     {
537       prop_value_t *val = get_value (use);
538 
539       if (val->lattice_val == UNDEFINED)
540 	has_undefined_operand = true;
541       else
542 	all_undefined_operands = false;
543 
544       if (val->lattice_val == CONSTANT)
545 	has_constant_operand = true;
546     }
547 
548   /* There may be constants in regular rhs operands.  For calls we
549      have to ignore lhs, fndecl and static chain, otherwise only
550      the lhs.  */
551   for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
552        i < gimple_num_ops (stmt); ++i)
553     {
554       tree op = gimple_op (stmt, i);
555       if (!op || TREE_CODE (op) == SSA_NAME)
556 	continue;
557       if (is_gimple_min_invariant (op))
558 	has_constant_operand = true;
559     }
560 
561   if (has_constant_operand)
562     all_undefined_operands = false;
563 
564   /* If the operation combines operands like COMPLEX_EXPR make sure to
565      not mark the result UNDEFINED if only one part of the result is
566      undefined.  */
567   if (has_undefined_operand && all_undefined_operands)
568     return UNDEFINED;
569   else if (code == GIMPLE_ASSIGN && has_undefined_operand)
570     {
571       switch (gimple_assign_rhs_code (stmt))
572 	{
573 	/* Unary operators are handled with all_undefined_operands.  */
574 	case PLUS_EXPR:
575 	case MINUS_EXPR:
576 	case POINTER_PLUS_EXPR:
577 	  /* Not MIN_EXPR, MAX_EXPR.  One VARYING operand may be selected.
578 	     Not bitwise operators, one VARYING operand may specify the
579 	     result completely.  Not logical operators for the same reason.
580 	     Not COMPLEX_EXPR as one VARYING operand makes the result partly
581 	     not UNDEFINED.  Not *DIV_EXPR, comparisons and shifts because
582 	     the undefined operand may be promoted.  */
583 	  return UNDEFINED;
584 
585 	default:
586 	  ;
587 	}
588     }
589   /* If there was an UNDEFINED operand but the result may be not UNDEFINED
590      fall back to VARYING even if there were CONSTANT operands.  */
591   if (has_undefined_operand)
592     return VARYING;
593 
594   /* We do not consider virtual operands here -- load from read-only
595      memory may have only VARYING virtual operands, but still be
596      constant.  */
597   if (has_constant_operand
598       || gimple_references_memory_p (stmt))
599     return CONSTANT;
600 
601   return VARYING;
602 }
603 
604 /* Returns true if STMT cannot be constant.  */
605 
606 static bool
607 surely_varying_stmt_p (gimple stmt)
608 {
609   /* If the statement has operands that we cannot handle, it cannot be
610      constant.  */
611   if (gimple_has_volatile_ops (stmt))
612     return true;
613 
614   /* If it is a call and does not return a value or is not a
615      builtin and not an indirect call, it is varying.  */
616   if (is_gimple_call (stmt))
617     {
618       tree fndecl;
619       if (!gimple_call_lhs (stmt)
620 	  || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
621 	      && !DECL_BUILT_IN (fndecl)))
622 	return true;
623     }
624 
625   /* Any other store operation is not interesting.  */
626   else if (gimple_vdef (stmt))
627     return true;
628 
629   /* Anything other than assignments and conditional jumps are not
630      interesting for CCP.  */
631   if (gimple_code (stmt) != GIMPLE_ASSIGN
632       && gimple_code (stmt) != GIMPLE_COND
633       && gimple_code (stmt) != GIMPLE_SWITCH
634       && gimple_code (stmt) != GIMPLE_CALL)
635     return true;
636 
637   return false;
638 }
639 
640 /* Initialize local data structures for CCP.  */
641 
642 static void
643 ccp_initialize (void)
644 {
645   basic_block bb;
646 
647   const_val = XCNEWVEC (prop_value_t, num_ssa_names);
648 
649   /* Initialize simulation flags for PHI nodes and statements.  */
650   FOR_EACH_BB (bb)
651     {
652       gimple_stmt_iterator i;
653 
654       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
655         {
656 	  gimple stmt = gsi_stmt (i);
657 	  bool is_varying;
658 
659 	  /* If the statement is a control insn, then we do not
660 	     want to avoid simulating the statement once.  Failure
661 	     to do so means that those edges will never get added.  */
662 	  if (stmt_ends_bb_p (stmt))
663 	    is_varying = false;
664 	  else
665 	    is_varying = surely_varying_stmt_p (stmt);
666 
667 	  if (is_varying)
668 	    {
669 	      tree def;
670 	      ssa_op_iter iter;
671 
672 	      /* If the statement will not produce a constant, mark
673 		 all its outputs VARYING.  */
674 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
675 		set_value_varying (def);
676 	    }
677           prop_set_simulate_again (stmt, !is_varying);
678 	}
679     }
680 
681   /* Now process PHI nodes.  We never clear the simulate_again flag on
682      phi nodes, since we do not know which edges are executable yet,
683      except for phi nodes for virtual operands when we do not do store ccp.  */
684   FOR_EACH_BB (bb)
685     {
686       gimple_stmt_iterator i;
687 
688       for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
689         {
690           gimple phi = gsi_stmt (i);
691 
692 	  if (!is_gimple_reg (gimple_phi_result (phi)))
693             prop_set_simulate_again (phi, false);
694 	  else
695             prop_set_simulate_again (phi, true);
696 	}
697     }
698 }
699 
700 /* Debug count support. Reset the values of ssa names
701    VARYING when the total number ssa names analyzed is
702    beyond the debug count specified.  */
703 
704 static void
705 do_dbg_cnt (void)
706 {
707   unsigned i;
708   for (i = 0; i < num_ssa_names; i++)
709     {
710       if (!dbg_cnt (ccp))
711         {
712           const_val[i].lattice_val = VARYING;
713           const_val[i].value = NULL_TREE;
714         }
715     }
716 }
717 
718 
719 /* Do final substitution of propagated values, cleanup the flowgraph and
720    free allocated storage.
721 
722    Return TRUE when something was optimized.  */
723 
724 static bool
725 ccp_finalize (void)
726 {
727   bool something_changed;
728 
729   do_dbg_cnt ();
730   /* Perform substitutions based on the known constant values.  */
731   something_changed = substitute_and_fold (const_val, ccp_fold_stmt, true);
732 
733   free (const_val);
734   const_val = NULL;
735   return something_changed;;
736 }
737 
738 
739 /* Compute the meet operator between *VAL1 and *VAL2.  Store the result
740    in VAL1.
741 
742    		any  M UNDEFINED   = any
743 		any  M VARYING     = VARYING
744 		Ci   M Cj	   = Ci		if (i == j)
745 		Ci   M Cj	   = VARYING	if (i != j)
746    */
747 
748 static void
749 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
750 {
751   if (val1->lattice_val == UNDEFINED)
752     {
753       /* UNDEFINED M any = any   */
754       *val1 = *val2;
755     }
756   else if (val2->lattice_val == UNDEFINED)
757     {
758       /* any M UNDEFINED = any
759          Nothing to do.  VAL1 already contains the value we want.  */
760       ;
761     }
762   else if (val1->lattice_val == VARYING
763            || val2->lattice_val == VARYING)
764     {
765       /* any M VARYING = VARYING.  */
766       val1->lattice_val = VARYING;
767       val1->value = NULL_TREE;
768     }
769   else if (val1->lattice_val == CONSTANT
770 	   && val2->lattice_val == CONSTANT
771 	   && simple_cst_equal (val1->value, val2->value) == 1)
772     {
773       /* Ci M Cj = Ci		if (i == j)
774 	 Ci M Cj = VARYING	if (i != j)
775 
776          If these two values come from memory stores, make sure that
777 	 they come from the same memory reference.  */
778       val1->lattice_val = CONSTANT;
779       val1->value = val1->value;
780     }
781   else
782     {
783       /* Any other combination is VARYING.  */
784       val1->lattice_val = VARYING;
785       val1->value = NULL_TREE;
786     }
787 }
788 
789 
790 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
791    lattice values to determine PHI_NODE's lattice value.  The value of a
792    PHI node is determined calling ccp_lattice_meet with all the arguments
793    of the PHI node that are incoming via executable edges.  */
794 
795 static enum ssa_prop_result
796 ccp_visit_phi_node (gimple phi)
797 {
798   unsigned i;
799   prop_value_t *old_val, new_val;
800 
801   if (dump_file && (dump_flags & TDF_DETAILS))
802     {
803       fprintf (dump_file, "\nVisiting PHI node: ");
804       print_gimple_stmt (dump_file, phi, 0, dump_flags);
805     }
806 
807   old_val = get_value (gimple_phi_result (phi));
808   switch (old_val->lattice_val)
809     {
810     case VARYING:
811       return SSA_PROP_VARYING;
812 
813     case CONSTANT:
814       new_val = *old_val;
815       break;
816 
817     case UNDEFINED:
818       new_val.lattice_val = UNDEFINED;
819       new_val.value = NULL_TREE;
820       break;
821 
822     default:
823       gcc_unreachable ();
824     }
825 
826   for (i = 0; i < gimple_phi_num_args (phi); i++)
827     {
828       /* Compute the meet operator over all the PHI arguments flowing
829 	 through executable edges.  */
830       edge e = gimple_phi_arg_edge (phi, i);
831 
832       if (dump_file && (dump_flags & TDF_DETAILS))
833 	{
834 	  fprintf (dump_file,
835 	      "\n    Argument #%d (%d -> %d %sexecutable)\n",
836 	      i, e->src->index, e->dest->index,
837 	      (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
838 	}
839 
840       /* If the incoming edge is executable, Compute the meet operator for
841 	 the existing value of the PHI node and the current PHI argument.  */
842       if (e->flags & EDGE_EXECUTABLE)
843 	{
844 	  tree arg = gimple_phi_arg (phi, i)->def;
845 	  prop_value_t arg_val;
846 
847 	  if (is_gimple_min_invariant (arg))
848 	    {
849 	      arg_val.lattice_val = CONSTANT;
850 	      arg_val.value = arg;
851 	    }
852 	  else
853 	    arg_val = *(get_value (arg));
854 
855 	  ccp_lattice_meet (&new_val, &arg_val);
856 
857 	  if (dump_file && (dump_flags & TDF_DETAILS))
858 	    {
859 	      fprintf (dump_file, "\t");
860 	      print_generic_expr (dump_file, arg, dump_flags);
861 	      dump_lattice_value (dump_file, "\tValue: ", arg_val);
862 	      fprintf (dump_file, "\n");
863 	    }
864 
865 	  if (new_val.lattice_val == VARYING)
866 	    break;
867 	}
868     }
869 
870   if (dump_file && (dump_flags & TDF_DETAILS))
871     {
872       dump_lattice_value (dump_file, "\n    PHI node value: ", new_val);
873       fprintf (dump_file, "\n\n");
874     }
875 
876   /* Make the transition to the new value.  */
877   if (set_lattice_value (gimple_phi_result (phi), new_val))
878     {
879       if (new_val.lattice_val == VARYING)
880 	return SSA_PROP_VARYING;
881       else
882 	return SSA_PROP_INTERESTING;
883     }
884   else
885     return SSA_PROP_NOT_INTERESTING;
886 }
887 
888 /* Return true if we may propagate the address expression ADDR into the
889    dereference DEREF and cancel them.  */
890 
891 bool
892 may_propagate_address_into_dereference (tree addr, tree deref)
893 {
894   gcc_assert (INDIRECT_REF_P (deref)
895 	      && TREE_CODE (addr) == ADDR_EXPR);
896 
897   /* Don't propagate if ADDR's operand has incomplete type.  */
898   if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
899     return false;
900 
901   /* If the address is invariant then we do not need to preserve restrict
902      qualifications.  But we do need to preserve volatile qualifiers until
903      we can annotate the folded dereference itself properly.  */
904   if (is_gimple_min_invariant (addr)
905       && (!TREE_THIS_VOLATILE (deref)
906 	  || TYPE_VOLATILE (TREE_TYPE (addr))))
907     return useless_type_conversion_p (TREE_TYPE (deref),
908 				      TREE_TYPE (TREE_OPERAND (addr, 0)));
909 
910   /* Else both the address substitution and the folding must result in
911      a valid useless type conversion sequence.  */
912   return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
913 				     TREE_TYPE (addr))
914 	  && useless_type_conversion_p (TREE_TYPE (deref),
915 					TREE_TYPE (TREE_OPERAND (addr, 0))));
916 }
917 
918 /* CCP specific front-end to the non-destructive constant folding
919    routines.
920 
921    Attempt to simplify the RHS of STMT knowing that one or more
922    operands are constants.
923 
924    If simplification is possible, return the simplified RHS,
925    otherwise return the original RHS or NULL_TREE.  */
926 
927 static tree
928 ccp_fold (gimple stmt)
929 {
930   location_t loc = gimple_location (stmt);
931   switch (gimple_code (stmt))
932     {
933     case GIMPLE_ASSIGN:
934       {
935         enum tree_code subcode = gimple_assign_rhs_code (stmt);
936 
937         switch (get_gimple_rhs_class (subcode))
938           {
939           case GIMPLE_SINGLE_RHS:
940             {
941               tree rhs = gimple_assign_rhs1 (stmt);
942               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
943 
944               if (TREE_CODE (rhs) == SSA_NAME)
945                 {
946                   /* If the RHS is an SSA_NAME, return its known constant value,
947                      if any.  */
948                   return get_value (rhs)->value;
949                 }
950 	      /* Handle propagating invariant addresses into address operations.
951 		 The folding we do here matches that in tree-ssa-forwprop.c.  */
952 	      else if (TREE_CODE (rhs) == ADDR_EXPR)
953 		{
954 		  tree *base;
955 		  base = &TREE_OPERAND (rhs, 0);
956 		  while (handled_component_p (*base))
957 		    base = &TREE_OPERAND (*base, 0);
958 		  if (TREE_CODE (*base) == INDIRECT_REF
959 		      && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
960 		    {
961 		      prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
962 		      if (val->lattice_val == CONSTANT
963 			  && TREE_CODE (val->value) == ADDR_EXPR
964 			  && may_propagate_address_into_dereference
965 			       (val->value, *base))
966 			{
967 			  /* We need to return a new tree, not modify the IL
968 			     or share parts of it.  So play some tricks to
969 			     avoid manually building it.  */
970 			  tree ret, save = *base;
971 			  *base = TREE_OPERAND (val->value, 0);
972 			  ret = unshare_expr (rhs);
973 			  recompute_tree_invariant_for_addr_expr (ret);
974 			  *base = save;
975 			  return ret;
976 			}
977 		    }
978 		}
979 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
980 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
981 		       && (CONSTRUCTOR_NELTS (rhs)
982 			   == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
983 		{
984 		  unsigned i;
985 		  tree val, list;
986 
987 		  list = NULL_TREE;
988 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
989 		    {
990 		      if (TREE_CODE (val) == SSA_NAME
991 			  && get_value (val)->lattice_val == CONSTANT)
992 			val = get_value (val)->value;
993 		      if (TREE_CODE (val) == INTEGER_CST
994 			  || TREE_CODE (val) == REAL_CST
995 			  || TREE_CODE (val) == FIXED_CST)
996 			list = tree_cons (NULL_TREE, val, list);
997 		      else
998 			return NULL_TREE;
999 		    }
1000 
1001 		  return build_vector (TREE_TYPE (rhs), nreverse (list));
1002 		}
1003 
1004               if (kind == tcc_reference)
1005 		{
1006 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1007 		       || TREE_CODE (rhs) == REALPART_EXPR
1008 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
1009 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1010 		    {
1011 		      prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
1012 		      if (val->lattice_val == CONSTANT)
1013 			return fold_unary_loc (EXPR_LOCATION (rhs),
1014 					   TREE_CODE (rhs),
1015 					   TREE_TYPE (rhs), val->value);
1016 		    }
1017 		  else if (TREE_CODE (rhs) == INDIRECT_REF
1018 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1019 		    {
1020 		      prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
1021 		      if (val->lattice_val == CONSTANT
1022 			  && TREE_CODE (val->value) == ADDR_EXPR
1023 			  && useless_type_conversion_p (TREE_TYPE (rhs),
1024 							TREE_TYPE (TREE_TYPE (val->value))))
1025 			rhs = TREE_OPERAND (val->value, 0);
1026 		    }
1027 		  return fold_const_aggregate_ref (rhs);
1028 		}
1029               else if (kind == tcc_declaration)
1030                 return get_symbol_constant_value (rhs);
1031               return rhs;
1032             }
1033 
1034           case GIMPLE_UNARY_RHS:
1035             {
1036               /* Handle unary operators that can appear in GIMPLE form.
1037                  Note that we know the single operand must be a constant,
1038                  so this should almost always return a simplified RHS.  */
1039               tree lhs = gimple_assign_lhs (stmt);
1040               tree op0 = gimple_assign_rhs1 (stmt);
1041 
1042               /* Simplify the operand down to a constant.  */
1043               if (TREE_CODE (op0) == SSA_NAME)
1044                 {
1045                   prop_value_t *val = get_value (op0);
1046                   if (val->lattice_val == CONSTANT)
1047                     op0 = get_value (op0)->value;
1048                 }
1049 
1050 	      /* Conversions are useless for CCP purposes if they are
1051 		 value-preserving.  Thus the restrictions that
1052 		 useless_type_conversion_p places for pointer type conversions
1053 		 do not apply here.  Substitution later will only substitute to
1054 		 allowed places.  */
1055 	      if (CONVERT_EXPR_CODE_P (subcode)
1056 		  && POINTER_TYPE_P (TREE_TYPE (lhs))
1057 		  && POINTER_TYPE_P (TREE_TYPE (op0))
1058 		  /* Do not allow differences in volatile qualification
1059 		     as this might get us confused as to whether a
1060 		     propagation destination statement is volatile
1061 		     or not.  See PR36988.  */
1062 		  && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1063 		      == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1064 		{
1065 		  tree tem;
1066 		  /* Still try to generate a constant of correct type.  */
1067 		  if (!useless_type_conversion_p (TREE_TYPE (lhs),
1068 						  TREE_TYPE (op0))
1069 		      && ((tem = maybe_fold_offset_to_address
1070 			   (loc,
1071 			    op0, integer_zero_node, TREE_TYPE (lhs)))
1072 			  != NULL_TREE))
1073 		    return tem;
1074 		  return op0;
1075 		}
1076 
1077               return
1078 		fold_unary_ignore_overflow_loc (loc, subcode,
1079 						gimple_expr_type (stmt), op0);
1080             }
1081 
1082           case GIMPLE_BINARY_RHS:
1083             {
1084               /* Handle binary operators that can appear in GIMPLE form.  */
1085               tree op0 = gimple_assign_rhs1 (stmt);
1086               tree op1 = gimple_assign_rhs2 (stmt);
1087 
1088               /* Simplify the operands down to constants when appropriate.  */
1089               if (TREE_CODE (op0) == SSA_NAME)
1090                 {
1091                   prop_value_t *val = get_value (op0);
1092                   if (val->lattice_val == CONSTANT)
1093                     op0 = val->value;
1094                 }
1095 
1096               if (TREE_CODE (op1) == SSA_NAME)
1097                 {
1098                   prop_value_t *val = get_value (op1);
1099                   if (val->lattice_val == CONSTANT)
1100                     op1 = val->value;
1101                 }
1102 
1103 	      /* Fold &foo + CST into an invariant reference if possible.  */
1104 	      if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1105 		  && TREE_CODE (op0) == ADDR_EXPR
1106 		  && TREE_CODE (op1) == INTEGER_CST)
1107 		{
1108 		  tree tem = maybe_fold_offset_to_address
1109 		    (loc, op0, op1, TREE_TYPE (op0));
1110 		  if (tem != NULL_TREE)
1111 		    return tem;
1112 		}
1113 
1114               return fold_binary_loc (loc, subcode,
1115 				  gimple_expr_type (stmt), op0, op1);
1116             }
1117 
1118           default:
1119             gcc_unreachable ();
1120           }
1121       }
1122       break;
1123 
1124     case GIMPLE_CALL:
1125       {
1126 	tree fn = gimple_call_fn (stmt);
1127 	prop_value_t *val;
1128 
1129 	if (TREE_CODE (fn) == SSA_NAME)
1130 	  {
1131 	    val = get_value (fn);
1132 	    if (val->lattice_val == CONSTANT)
1133 	      fn = val->value;
1134 	  }
1135 	if (TREE_CODE (fn) == ADDR_EXPR
1136 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1137 	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1138 	  {
1139 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1140 	    tree call, retval;
1141 	    unsigned i;
1142 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
1143 	      {
1144 		args[i] = gimple_call_arg (stmt, i);
1145 		if (TREE_CODE (args[i]) == SSA_NAME)
1146 		  {
1147 		    val = get_value (args[i]);
1148 		    if (val->lattice_val == CONSTANT)
1149 		      args[i] = val->value;
1150 		  }
1151 	      }
1152 	    call = build_call_array_loc (loc,
1153 					 gimple_call_return_type (stmt),
1154 					 fn, gimple_call_num_args (stmt), args);
1155 	    retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1156 	    if (retval)
1157 	      /* fold_call_expr wraps the result inside a NOP_EXPR.  */
1158 	      STRIP_NOPS (retval);
1159 	    return retval;
1160 	  }
1161 	return NULL_TREE;
1162       }
1163 
1164     case GIMPLE_COND:
1165       {
1166         /* Handle comparison operators that can appear in GIMPLE form.  */
1167         tree op0 = gimple_cond_lhs (stmt);
1168         tree op1 = gimple_cond_rhs (stmt);
1169         enum tree_code code = gimple_cond_code (stmt);
1170 
1171         /* Simplify the operands down to constants when appropriate.  */
1172         if (TREE_CODE (op0) == SSA_NAME)
1173           {
1174             prop_value_t *val = get_value (op0);
1175             if (val->lattice_val == CONSTANT)
1176               op0 = val->value;
1177           }
1178 
1179         if (TREE_CODE (op1) == SSA_NAME)
1180           {
1181             prop_value_t *val = get_value (op1);
1182             if (val->lattice_val == CONSTANT)
1183               op1 = val->value;
1184           }
1185 
1186         return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1187       }
1188 
1189     case GIMPLE_SWITCH:
1190       {
1191         tree rhs = gimple_switch_index (stmt);
1192 
1193         if (TREE_CODE (rhs) == SSA_NAME)
1194           {
1195             /* If the RHS is an SSA_NAME, return its known constant value,
1196                if any.  */
1197             return get_value (rhs)->value;
1198           }
1199 
1200         return rhs;
1201       }
1202 
1203     default:
1204       gcc_unreachable ();
1205     }
1206 }
1207 
1208 
1209 /* Return the tree representing the element referenced by T if T is an
1210    ARRAY_REF or COMPONENT_REF into constant aggregates.  Return
1211    NULL_TREE otherwise.  */
1212 
1213 tree
1214 fold_const_aggregate_ref (tree t)
1215 {
1216   prop_value_t *value;
1217   tree base, ctor, idx, field;
1218   unsigned HOST_WIDE_INT cnt;
1219   tree cfield, cval;
1220 
1221   if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1222     return get_symbol_constant_value (t);
1223 
1224   switch (TREE_CODE (t))
1225     {
1226     case ARRAY_REF:
1227       /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
1228 	 DECL_INITIAL.  If BASE is a nested reference into another
1229 	 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1230 	 the inner reference.  */
1231       base = TREE_OPERAND (t, 0);
1232       switch (TREE_CODE (base))
1233 	{
1234 	case VAR_DECL:
1235 	  if (!TREE_READONLY (base)
1236 	      || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1237 	      || !targetm.binds_local_p (base))
1238 	    return NULL_TREE;
1239 
1240 	  ctor = DECL_INITIAL (base);
1241 	  break;
1242 
1243 	case ARRAY_REF:
1244 	case COMPONENT_REF:
1245 	  ctor = fold_const_aggregate_ref (base);
1246 	  break;
1247 
1248 	case STRING_CST:
1249 	case CONSTRUCTOR:
1250 	  ctor = base;
1251 	  break;
1252 
1253 	default:
1254 	  return NULL_TREE;
1255 	}
1256 
1257       if (ctor == NULL_TREE
1258 	  || (TREE_CODE (ctor) != CONSTRUCTOR
1259 	      && TREE_CODE (ctor) != STRING_CST)
1260 	  || !TREE_STATIC (ctor))
1261 	return NULL_TREE;
1262 
1263       /* Get the index.  If we have an SSA_NAME, try to resolve it
1264 	 with the current lattice value for the SSA_NAME.  */
1265       idx = TREE_OPERAND (t, 1);
1266       switch (TREE_CODE (idx))
1267 	{
1268 	case SSA_NAME:
1269 	  if ((value = get_value (idx))
1270 	      && value->lattice_val == CONSTANT
1271 	      && TREE_CODE (value->value) == INTEGER_CST)
1272 	    idx = value->value;
1273 	  else
1274 	    return NULL_TREE;
1275 	  break;
1276 
1277 	case INTEGER_CST:
1278 	  break;
1279 
1280 	default:
1281 	  return NULL_TREE;
1282 	}
1283 
1284       /* Fold read from constant string.  */
1285       if (TREE_CODE (ctor) == STRING_CST)
1286 	{
1287 	  if ((TYPE_MODE (TREE_TYPE (t))
1288 	       == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1289 	      && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1290 	          == MODE_INT)
1291 	      && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1292 	      && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1293 	    return build_int_cst_type (TREE_TYPE (t),
1294 				       (TREE_STRING_POINTER (ctor)
1295 					[TREE_INT_CST_LOW (idx)]));
1296 	  return NULL_TREE;
1297 	}
1298 
1299       /* Whoo-hoo!  I'll fold ya baby.  Yeah!  */
1300       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1301 	if (tree_int_cst_equal (cfield, idx))
1302 	  {
1303 	    STRIP_NOPS (cval);
1304 	    if (TREE_CODE (cval) == ADDR_EXPR)
1305 	      {
1306 		tree base = get_base_address (TREE_OPERAND (cval, 0));
1307 		if (base && TREE_CODE (base) == VAR_DECL)
1308 		  add_referenced_var (base);
1309 	      }
1310 	    return cval;
1311 	  }
1312       break;
1313 
1314     case COMPONENT_REF:
1315       /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
1316 	 DECL_INITIAL.  If BASE is a nested reference into another
1317 	 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1318 	 the inner reference.  */
1319       base = TREE_OPERAND (t, 0);
1320       switch (TREE_CODE (base))
1321 	{
1322 	case VAR_DECL:
1323 	  if (!TREE_READONLY (base)
1324 	      || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1325 	      || !targetm.binds_local_p (base))
1326 	    return NULL_TREE;
1327 
1328 	  ctor = DECL_INITIAL (base);
1329 	  break;
1330 
1331 	case ARRAY_REF:
1332 	case COMPONENT_REF:
1333 	  ctor = fold_const_aggregate_ref (base);
1334 	  break;
1335 
1336 	default:
1337 	  return NULL_TREE;
1338 	}
1339 
1340       if (ctor == NULL_TREE
1341 	  || TREE_CODE (ctor) != CONSTRUCTOR
1342 	  || !TREE_STATIC (ctor))
1343 	return NULL_TREE;
1344 
1345       field = TREE_OPERAND (t, 1);
1346 
1347       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1348 	if (cfield == field
1349 	    /* FIXME: Handle bit-fields.  */
1350 	    && ! DECL_BIT_FIELD (cfield))
1351 	  {
1352 	    STRIP_NOPS (cval);
1353 	    if (TREE_CODE (cval) == ADDR_EXPR)
1354 	      {
1355 		tree base = get_base_address (TREE_OPERAND (cval, 0));
1356 		if (base && TREE_CODE (base) == VAR_DECL)
1357 		  add_referenced_var (base);
1358 	      }
1359 	    return cval;
1360 	  }
1361       break;
1362 
1363     case REALPART_EXPR:
1364     case IMAGPART_EXPR:
1365       {
1366 	tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1367 	if (c && TREE_CODE (c) == COMPLEX_CST)
1368 	  return fold_build1_loc (EXPR_LOCATION (t),
1369 			      TREE_CODE (t), TREE_TYPE (t), c);
1370 	break;
1371       }
1372 
1373     case INDIRECT_REF:
1374       {
1375 	tree base = TREE_OPERAND (t, 0);
1376 	if (TREE_CODE (base) == SSA_NAME
1377 	    && (value = get_value (base))
1378 	    && value->lattice_val == CONSTANT
1379 	    && TREE_CODE (value->value) == ADDR_EXPR
1380 	    && useless_type_conversion_p (TREE_TYPE (t),
1381 					  TREE_TYPE (TREE_TYPE (value->value))))
1382 	  return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1383 	break;
1384       }
1385 
1386     default:
1387       break;
1388     }
1389 
1390   return NULL_TREE;
1391 }
1392 
1393 /* Evaluate statement STMT.
1394    Valid only for assignments, calls, conditionals, and switches. */
1395 
1396 static prop_value_t
1397 evaluate_stmt (gimple stmt)
1398 {
1399   prop_value_t val;
1400   tree simplified = NULL_TREE;
1401   ccp_lattice_t likelyvalue = likely_value (stmt);
1402   bool is_constant;
1403 
1404   fold_defer_overflow_warnings ();
1405 
1406   /* If the statement is likely to have a CONSTANT result, then try
1407      to fold the statement to determine the constant value.  */
1408   /* FIXME.  This is the only place that we call ccp_fold.
1409      Since likely_value never returns CONSTANT for calls, we will
1410      not attempt to fold them, including builtins that may profit.  */
1411   if (likelyvalue == CONSTANT)
1412     simplified = ccp_fold (stmt);
1413   /* If the statement is likely to have a VARYING result, then do not
1414      bother folding the statement.  */
1415   else if (likelyvalue == VARYING)
1416     {
1417       enum gimple_code code = gimple_code (stmt);
1418       if (code == GIMPLE_ASSIGN)
1419         {
1420           enum tree_code subcode = gimple_assign_rhs_code (stmt);
1421 
1422           /* Other cases cannot satisfy is_gimple_min_invariant
1423              without folding.  */
1424           if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1425             simplified = gimple_assign_rhs1 (stmt);
1426         }
1427       else if (code == GIMPLE_SWITCH)
1428         simplified = gimple_switch_index (stmt);
1429       else
1430 	/* These cannot satisfy is_gimple_min_invariant without folding.  */
1431 	gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1432     }
1433 
1434   is_constant = simplified && is_gimple_min_invariant (simplified);
1435 
1436   fold_undefer_overflow_warnings (is_constant, stmt, 0);
1437 
1438   if (dump_file && (dump_flags & TDF_DETAILS))
1439     {
1440       fprintf (dump_file, "which is likely ");
1441       switch (likelyvalue)
1442 	{
1443 	case CONSTANT:
1444 	  fprintf (dump_file, "CONSTANT");
1445 	  break;
1446 	case UNDEFINED:
1447 	  fprintf (dump_file, "UNDEFINED");
1448 	  break;
1449 	case VARYING:
1450 	  fprintf (dump_file, "VARYING");
1451 	  break;
1452 	default:;
1453 	}
1454       fprintf (dump_file, "\n");
1455     }
1456 
1457   if (is_constant)
1458     {
1459       /* The statement produced a constant value.  */
1460       val.lattice_val = CONSTANT;
1461       val.value = simplified;
1462     }
1463   else
1464     {
1465       /* The statement produced a nonconstant value.  If the statement
1466 	 had UNDEFINED operands, then the result of the statement
1467 	 should be UNDEFINED.  Otherwise, the statement is VARYING.  */
1468       if (likelyvalue == UNDEFINED)
1469 	val.lattice_val = likelyvalue;
1470       else
1471 	val.lattice_val = VARYING;
1472 
1473       val.value = NULL_TREE;
1474     }
1475 
1476   return val;
1477 }
1478 
1479 /* Fold the stmt at *GSI with CCP specific information that propagating
1480    and regular folding does not catch.  */
1481 
1482 static bool
1483 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1484 {
1485   gimple stmt = gsi_stmt (*gsi);
1486 
1487   switch (gimple_code (stmt))
1488     {
1489     case GIMPLE_COND:
1490       {
1491 	prop_value_t val;
1492 	/* Statement evaluation will handle type mismatches in constants
1493 	   more gracefully than the final propagation.  This allows us to
1494 	   fold more conditionals here.  */
1495 	val = evaluate_stmt (stmt);
1496 	if (val.lattice_val != CONSTANT
1497 	    || TREE_CODE (val.value) != INTEGER_CST)
1498 	  return false;
1499 
1500 	if (integer_zerop (val.value))
1501 	  gimple_cond_make_false (stmt);
1502 	else
1503 	  gimple_cond_make_true (stmt);
1504 
1505 	return true;
1506       }
1507 
1508     case GIMPLE_CALL:
1509       {
1510 	tree lhs = gimple_call_lhs (stmt);
1511 	prop_value_t *val;
1512 	tree argt;
1513 	bool changed = false;
1514 	unsigned i;
1515 
1516 	/* If the call was folded into a constant make sure it goes
1517 	   away even if we cannot propagate into all uses because of
1518 	   type issues.  */
1519 	if (lhs
1520 	    && TREE_CODE (lhs) == SSA_NAME
1521 	    && (val = get_value (lhs))
1522 	    && val->lattice_val == CONSTANT)
1523 	  {
1524 	    tree new_rhs = unshare_expr (val->value);
1525 	    bool res;
1526 	    if (!useless_type_conversion_p (TREE_TYPE (lhs),
1527 					    TREE_TYPE (new_rhs)))
1528 	      new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1529 	    res = update_call_from_tree (gsi, new_rhs);
1530 	    gcc_assert (res);
1531 	    return true;
1532 	  }
1533 
1534 	/* Propagate into the call arguments.  Compared to replace_uses_in
1535 	   this can use the argument slot types for type verification
1536 	   instead of the current argument type.  We also can safely
1537 	   drop qualifiers here as we are dealing with constants anyway.  */
1538 	argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
1539 	for (i = 0; i < gimple_call_num_args (stmt) && argt;
1540 	     ++i, argt = TREE_CHAIN (argt))
1541 	  {
1542 	    tree arg = gimple_call_arg (stmt, i);
1543 	    if (TREE_CODE (arg) == SSA_NAME
1544 		&& (val = get_value (arg))
1545 		&& val->lattice_val == CONSTANT
1546 		&& useless_type_conversion_p
1547 		     (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1548 		      TYPE_MAIN_VARIANT (TREE_TYPE (val->value))))
1549 	      {
1550 		gimple_call_set_arg (stmt, i, unshare_expr (val->value));
1551 		changed = true;
1552 	      }
1553 	  }
1554 
1555 	return changed;
1556       }
1557 
1558     case GIMPLE_ASSIGN:
1559       {
1560 	tree lhs = gimple_assign_lhs (stmt);
1561 	prop_value_t *val;
1562 
1563 	/* If we have a load that turned out to be constant replace it
1564 	   as we cannot propagate into all uses in all cases.  */
1565 	if (gimple_assign_single_p (stmt)
1566 	    && TREE_CODE (lhs) == SSA_NAME
1567 	    && (val = get_value (lhs))
1568 	    && val->lattice_val == CONSTANT)
1569 	  {
1570 	    tree rhs = unshare_expr (val->value);
1571 	    if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1572 	      rhs = fold_convert (TREE_TYPE (lhs), rhs);
1573 	    gimple_assign_set_rhs_from_tree (gsi, rhs);
1574 	    return true;
1575 	  }
1576 
1577 	return false;
1578       }
1579 
1580     default:
1581       return false;
1582     }
1583 }
1584 
1585 /* Visit the assignment statement STMT.  Set the value of its LHS to the
1586    value computed by the RHS and store LHS in *OUTPUT_P.  If STMT
1587    creates virtual definitions, set the value of each new name to that
1588    of the RHS (if we can derive a constant out of the RHS).
1589    Value-returning call statements also perform an assignment, and
1590    are handled here.  */
1591 
1592 static enum ssa_prop_result
1593 visit_assignment (gimple stmt, tree *output_p)
1594 {
1595   prop_value_t val;
1596   enum ssa_prop_result retval;
1597 
1598   tree lhs = gimple_get_lhs (stmt);
1599 
1600   gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1601               || gimple_call_lhs (stmt) != NULL_TREE);
1602 
1603   if (gimple_assign_copy_p (stmt))
1604     {
1605       tree rhs = gimple_assign_rhs1 (stmt);
1606 
1607       if  (TREE_CODE (rhs) == SSA_NAME)
1608         {
1609           /* For a simple copy operation, we copy the lattice values.  */
1610           prop_value_t *nval = get_value (rhs);
1611           val = *nval;
1612         }
1613       else
1614         val = evaluate_stmt (stmt);
1615     }
1616   else
1617     /* Evaluate the statement, which could be
1618        either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1619     val = evaluate_stmt (stmt);
1620 
1621   retval = SSA_PROP_NOT_INTERESTING;
1622 
1623   /* Set the lattice value of the statement's output.  */
1624   if (TREE_CODE (lhs) == SSA_NAME)
1625     {
1626       /* If STMT is an assignment to an SSA_NAME, we only have one
1627 	 value to set.  */
1628       if (set_lattice_value (lhs, val))
1629 	{
1630 	  *output_p = lhs;
1631 	  if (val.lattice_val == VARYING)
1632 	    retval = SSA_PROP_VARYING;
1633 	  else
1634 	    retval = SSA_PROP_INTERESTING;
1635 	}
1636     }
1637 
1638   return retval;
1639 }
1640 
1641 
1642 /* Visit the conditional statement STMT.  Return SSA_PROP_INTERESTING
1643    if it can determine which edge will be taken.  Otherwise, return
1644    SSA_PROP_VARYING.  */
1645 
1646 static enum ssa_prop_result
1647 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1648 {
1649   prop_value_t val;
1650   basic_block block;
1651 
1652   block = gimple_bb (stmt);
1653   val = evaluate_stmt (stmt);
1654 
1655   /* Find which edge out of the conditional block will be taken and add it
1656      to the worklist.  If no single edge can be determined statically,
1657      return SSA_PROP_VARYING to feed all the outgoing edges to the
1658      propagation engine.  */
1659   *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1660   if (*taken_edge_p)
1661     return SSA_PROP_INTERESTING;
1662   else
1663     return SSA_PROP_VARYING;
1664 }
1665 
1666 
1667 /* Evaluate statement STMT.  If the statement produces an output value and
1668    its evaluation changes the lattice value of its output, return
1669    SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1670    output value.
1671 
1672    If STMT is a conditional branch and we can determine its truth
1673    value, set *TAKEN_EDGE_P accordingly.  If STMT produces a varying
1674    value, return SSA_PROP_VARYING.  */
1675 
1676 static enum ssa_prop_result
1677 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1678 {
1679   tree def;
1680   ssa_op_iter iter;
1681 
1682   if (dump_file && (dump_flags & TDF_DETAILS))
1683     {
1684       fprintf (dump_file, "\nVisiting statement:\n");
1685       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1686     }
1687 
1688   switch (gimple_code (stmt))
1689     {
1690       case GIMPLE_ASSIGN:
1691         /* If the statement is an assignment that produces a single
1692            output value, evaluate its RHS to see if the lattice value of
1693            its output has changed.  */
1694         return visit_assignment (stmt, output_p);
1695 
1696       case GIMPLE_CALL:
1697         /* A value-returning call also performs an assignment.  */
1698         if (gimple_call_lhs (stmt) != NULL_TREE)
1699           return visit_assignment (stmt, output_p);
1700         break;
1701 
1702       case GIMPLE_COND:
1703       case GIMPLE_SWITCH:
1704         /* If STMT is a conditional branch, see if we can determine
1705            which branch will be taken.   */
1706         /* FIXME.  It appears that we should be able to optimize
1707            computed GOTOs here as well.  */
1708         return visit_cond_stmt (stmt, taken_edge_p);
1709 
1710       default:
1711         break;
1712     }
1713 
1714   /* Any other kind of statement is not interesting for constant
1715      propagation and, therefore, not worth simulating.  */
1716   if (dump_file && (dump_flags & TDF_DETAILS))
1717     fprintf (dump_file, "No interesting values produced.  Marked VARYING.\n");
1718 
1719   /* Definitions made by statements other than assignments to
1720      SSA_NAMEs represent unknown modifications to their outputs.
1721      Mark them VARYING.  */
1722   FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1723     {
1724       prop_value_t v = { VARYING, NULL_TREE };
1725       set_lattice_value (def, v);
1726     }
1727 
1728   return SSA_PROP_VARYING;
1729 }
1730 
1731 
1732 /* Main entry point for SSA Conditional Constant Propagation.  */
1733 
1734 static unsigned int
1735 do_ssa_ccp (void)
1736 {
1737   ccp_initialize ();
1738   ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1739   if (ccp_finalize ())
1740     return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1741   else
1742     return 0;
1743 }
1744 
1745 
1746 static bool
1747 gate_ccp (void)
1748 {
1749   return flag_tree_ccp != 0;
1750 }
1751 
1752 
1753 struct gimple_opt_pass pass_ccp =
1754 {
1755  {
1756   GIMPLE_PASS,
1757   "ccp",				/* name */
1758   gate_ccp,				/* gate */
1759   do_ssa_ccp,				/* execute */
1760   NULL,					/* sub */
1761   NULL,					/* next */
1762   0,					/* static_pass_number */
1763   TV_TREE_CCP,				/* tv_id */
1764   PROP_cfg | PROP_ssa,			/* properties_required */
1765   0,					/* properties_provided */
1766   0,					/* properties_destroyed */
1767   0,					/* todo_flags_start */
1768   TODO_dump_func | TODO_verify_ssa
1769   | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1770  }
1771 };
1772 
1773 
1774 /* A subroutine of fold_stmt.  Attempts to fold *(A+O) to A[X].
1775    BASE is an array type.  OFFSET is a byte displacement.  ORIG_TYPE
1776    is the desired result type.
1777 
1778    LOC is the location of the original expression.  */
1779 
1780 static tree
1781 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
1782 				tree orig_type,
1783 				bool allow_negative_idx)
1784 {
1785   tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1786   tree array_type, elt_type, elt_size;
1787   tree domain_type;
1788 
1789   /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1790      measured in units of the size of elements type) from that ARRAY_REF).
1791      We can't do anything if either is variable.
1792 
1793      The case we handle here is *(&A[N]+O).  */
1794   if (TREE_CODE (base) == ARRAY_REF)
1795     {
1796       tree low_bound = array_ref_low_bound (base);
1797 
1798       elt_offset = TREE_OPERAND (base, 1);
1799       if (TREE_CODE (low_bound) != INTEGER_CST
1800 	  || TREE_CODE (elt_offset) != INTEGER_CST)
1801 	return NULL_TREE;
1802 
1803       elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1804       base = TREE_OPERAND (base, 0);
1805     }
1806 
1807   /* Ignore stupid user tricks of indexing non-array variables.  */
1808   array_type = TREE_TYPE (base);
1809   if (TREE_CODE (array_type) != ARRAY_TYPE)
1810     return NULL_TREE;
1811   elt_type = TREE_TYPE (array_type);
1812   if (!useless_type_conversion_p (orig_type, elt_type))
1813     return NULL_TREE;
1814 
1815   /* Use signed size type for intermediate computation on the index.  */
1816   idx_type = signed_type_for (size_type_node);
1817 
1818   /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1819      element type (so we can use the alignment if it's not constant).
1820      Otherwise, compute the offset as an index by using a division.  If the
1821      division isn't exact, then don't do anything.  */
1822   elt_size = TYPE_SIZE_UNIT (elt_type);
1823   if (!elt_size)
1824     return NULL;
1825   if (integer_zerop (offset))
1826     {
1827       if (TREE_CODE (elt_size) != INTEGER_CST)
1828 	elt_size = size_int (TYPE_ALIGN (elt_type));
1829 
1830       idx = build_int_cst (idx_type, 0);
1831     }
1832   else
1833     {
1834       unsigned HOST_WIDE_INT lquo, lrem;
1835       HOST_WIDE_INT hquo, hrem;
1836       double_int soffset;
1837 
1838       /* The final array offset should be signed, so we need
1839 	 to sign-extend the (possibly pointer) offset here
1840 	 and use signed division.  */
1841       soffset = double_int_sext (tree_to_double_int (offset),
1842 				 TYPE_PRECISION (TREE_TYPE (offset)));
1843       if (TREE_CODE (elt_size) != INTEGER_CST
1844 	  || div_and_round_double (TRUNC_DIV_EXPR, 0,
1845 				   soffset.low, soffset.high,
1846 				   TREE_INT_CST_LOW (elt_size),
1847 				   TREE_INT_CST_HIGH (elt_size),
1848 				   &lquo, &hquo, &lrem, &hrem)
1849 	  || lrem || hrem)
1850 	return NULL_TREE;
1851 
1852       idx = build_int_cst_wide (idx_type, lquo, hquo);
1853     }
1854 
1855   /* Assume the low bound is zero.  If there is a domain type, get the
1856      low bound, if any, convert the index into that type, and add the
1857      low bound.  */
1858   min_idx = build_int_cst (idx_type, 0);
1859   domain_type = TYPE_DOMAIN (array_type);
1860   if (domain_type)
1861     {
1862       idx_type = domain_type;
1863       if (TYPE_MIN_VALUE (idx_type))
1864 	min_idx = TYPE_MIN_VALUE (idx_type);
1865       else
1866 	min_idx = fold_convert (idx_type, min_idx);
1867 
1868       if (TREE_CODE (min_idx) != INTEGER_CST)
1869 	return NULL_TREE;
1870 
1871       elt_offset = fold_convert (idx_type, elt_offset);
1872     }
1873 
1874   if (!integer_zerop (min_idx))
1875     idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1876   if (!integer_zerop (elt_offset))
1877     idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1878 
1879   /* Make sure to possibly truncate late after offsetting.  */
1880   idx = fold_convert (idx_type, idx);
1881 
1882   /* We don't want to construct access past array bounds. For example
1883        char *(c[4]);
1884        c[3][2];
1885      should not be simplified into (*c)[14] or tree-vrp will
1886      give false warnings.  The same is true for
1887        struct A { long x; char d[0]; } *a;
1888        (char *)a - 4;
1889      which should be not folded to &a->d[-8].  */
1890   if (domain_type
1891       && TYPE_MAX_VALUE (domain_type)
1892       && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1893     {
1894       tree up_bound = TYPE_MAX_VALUE (domain_type);
1895 
1896       if (tree_int_cst_lt (up_bound, idx)
1897 	  /* Accesses after the end of arrays of size 0 (gcc
1898 	     extension) and 1 are likely intentional ("struct
1899 	     hack").  */
1900 	  && compare_tree_int (up_bound, 1) > 0)
1901 	return NULL_TREE;
1902     }
1903   if (domain_type
1904       && TYPE_MIN_VALUE (domain_type))
1905     {
1906       if (!allow_negative_idx
1907 	  && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1908 	  && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1909 	return NULL_TREE;
1910     }
1911   else if (!allow_negative_idx
1912 	   && compare_tree_int (idx, 0) < 0)
1913     return NULL_TREE;
1914 
1915   {
1916     tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1917     SET_EXPR_LOCATION (t, loc);
1918     return t;
1919   }
1920 }
1921 
1922 
1923 /* Attempt to fold *(S+O) to S.X.
1924    BASE is a record type.  OFFSET is a byte displacement.  ORIG_TYPE
1925    is the desired result type.
1926 
1927    LOC is the location of the original expression.  */
1928 
1929 static tree
1930 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
1931 				    tree base, tree offset, tree orig_type)
1932 {
1933   tree f, t, field_type, tail_array_field, field_offset;
1934   tree ret;
1935   tree new_base;
1936 
1937   if (TREE_CODE (record_type) != RECORD_TYPE
1938       && TREE_CODE (record_type) != UNION_TYPE
1939       && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1940     return NULL_TREE;
1941 
1942   /* Short-circuit silly cases.  */
1943   if (useless_type_conversion_p (record_type, orig_type))
1944     return NULL_TREE;
1945 
1946   tail_array_field = NULL_TREE;
1947   for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1948     {
1949       int cmp;
1950 
1951       if (TREE_CODE (f) != FIELD_DECL)
1952 	continue;
1953       if (DECL_BIT_FIELD (f))
1954 	continue;
1955 
1956       if (!DECL_FIELD_OFFSET (f))
1957 	continue;
1958       field_offset = byte_position (f);
1959       if (TREE_CODE (field_offset) != INTEGER_CST)
1960 	continue;
1961 
1962       /* ??? Java creates "interesting" fields for representing base classes.
1963 	 They have no name, and have no context.  With no context, we get into
1964 	 trouble with nonoverlapping_component_refs_p.  Skip them.  */
1965       if (!DECL_FIELD_CONTEXT (f))
1966 	continue;
1967 
1968       /* The previous array field isn't at the end.  */
1969       tail_array_field = NULL_TREE;
1970 
1971       /* Check to see if this offset overlaps with the field.  */
1972       cmp = tree_int_cst_compare (field_offset, offset);
1973       if (cmp > 0)
1974 	continue;
1975 
1976       field_type = TREE_TYPE (f);
1977 
1978       /* Here we exactly match the offset being checked.  If the types match,
1979 	 then we can return that field.  */
1980       if (cmp == 0
1981 	  && useless_type_conversion_p (orig_type, field_type))
1982 	{
1983 	  t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1984 	  return t;
1985 	}
1986 
1987       /* Don't care about offsets into the middle of scalars.  */
1988       if (!AGGREGATE_TYPE_P (field_type))
1989 	continue;
1990 
1991       /* Check for array at the end of the struct.  This is often
1992 	 used as for flexible array members.  We should be able to
1993 	 turn this into an array access anyway.  */
1994       if (TREE_CODE (field_type) == ARRAY_TYPE)
1995 	tail_array_field = f;
1996 
1997       /* Check the end of the field against the offset.  */
1998       if (!DECL_SIZE_UNIT (f)
1999 	  || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
2000 	continue;
2001       t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
2002       if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
2003 	continue;
2004 
2005       /* If we matched, then set offset to the displacement into
2006 	 this field.  */
2007       new_base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
2008       SET_EXPR_LOCATION (new_base, loc);
2009 
2010       /* Recurse to possibly find the match.  */
2011       ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
2012 					    f == TYPE_FIELDS (record_type));
2013       if (ret)
2014 	return ret;
2015       ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
2016 						orig_type);
2017       if (ret)
2018 	return ret;
2019     }
2020 
2021   if (!tail_array_field)
2022     return NULL_TREE;
2023 
2024   f = tail_array_field;
2025   field_type = TREE_TYPE (f);
2026   offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
2027 
2028   /* If we get here, we've got an aggregate field, and a possibly
2029      nonzero offset into them.  Recurse and hope for a valid match.  */
2030   base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
2031   SET_EXPR_LOCATION (base, loc);
2032 
2033   t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
2034 				      f == TYPE_FIELDS (record_type));
2035   if (t)
2036     return t;
2037   return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
2038 					     orig_type);
2039 }
2040 
2041 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
2042    or BASE[index] or by combination of those.
2043 
2044    LOC is the location of original expression.
2045 
2046    Before attempting the conversion strip off existing ADDR_EXPRs and
2047    handled component refs.  */
2048 
2049 tree
2050 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
2051 				tree orig_type)
2052 {
2053   tree ret;
2054   tree type;
2055 
2056   STRIP_NOPS (base);
2057   if (TREE_CODE (base) != ADDR_EXPR)
2058     return NULL_TREE;
2059 
2060   base = TREE_OPERAND (base, 0);
2061 
2062   /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
2063      so it needs to be removed and new COMPONENT_REF constructed.
2064      The wrong COMPONENT_REF are often constructed by folding the
2065      (type *)&object within the expression (type *)&object+offset  */
2066   if (handled_component_p (base))
2067     {
2068       HOST_WIDE_INT sub_offset, size, maxsize;
2069       tree newbase;
2070       newbase = get_ref_base_and_extent (base, &sub_offset,
2071 					 &size, &maxsize);
2072       gcc_assert (newbase);
2073       if (size == maxsize
2074 	  && size != -1
2075 	  && !(sub_offset & (BITS_PER_UNIT - 1)))
2076 	{
2077 	  base = newbase;
2078 	  if (sub_offset)
2079 	    offset = int_const_binop (PLUS_EXPR, offset,
2080 				      build_int_cst (TREE_TYPE (offset),
2081 						     sub_offset / BITS_PER_UNIT), 1);
2082 	}
2083     }
2084   if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
2085       && integer_zerop (offset))
2086     return base;
2087   type = TREE_TYPE (base);
2088 
2089   ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
2090   if (!ret)
2091     ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
2092 
2093   return ret;
2094 }
2095 
2096 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2097    or &BASE[index] or by combination of those.
2098 
2099    LOC is the location of the original expression.
2100 
2101    Before attempting the conversion strip off existing component refs.  */
2102 
2103 tree
2104 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
2105 			      tree orig_type)
2106 {
2107   tree t;
2108 
2109   gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
2110 	      && POINTER_TYPE_P (orig_type));
2111 
2112   t = maybe_fold_offset_to_reference (loc, addr, offset,
2113 				      TREE_TYPE (orig_type));
2114   if (t != NULL_TREE)
2115     {
2116       tree orig = addr;
2117       tree ptr_type;
2118 
2119       /* For __builtin_object_size to function correctly we need to
2120          make sure not to fold address arithmetic so that we change
2121 	 reference from one array to another.  This would happen for
2122 	 example for
2123 
2124 	   struct X { char s1[10]; char s2[10] } s;
2125 	   char *foo (void) { return &s.s2[-4]; }
2126 
2127 	 where we need to avoid generating &s.s1[6].  As the C and
2128 	 C++ frontends create different initial trees
2129 	 (char *) &s.s1 + -4  vs.  &s.s1[-4]  we have to do some
2130 	 sophisticated comparisons here.  Note that checking for the
2131 	 condition after the fact is easier than trying to avoid doing
2132 	 the folding.  */
2133       STRIP_NOPS (orig);
2134       if (TREE_CODE (orig) == ADDR_EXPR)
2135 	orig = TREE_OPERAND (orig, 0);
2136       if ((TREE_CODE (orig) == ARRAY_REF
2137 	   || (TREE_CODE (orig) == COMPONENT_REF
2138 	       && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2139 	  && (TREE_CODE (t) == ARRAY_REF
2140 	      || TREE_CODE (t) == COMPONENT_REF)
2141 	  && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2142 			       ? TREE_OPERAND (orig, 0) : orig,
2143 			       TREE_CODE (t) == ARRAY_REF
2144 			       ? TREE_OPERAND (t, 0) : t, 0))
2145 	return NULL_TREE;
2146 
2147       ptr_type = build_pointer_type (TREE_TYPE (t));
2148       if (!useless_type_conversion_p (orig_type, ptr_type))
2149 	return NULL_TREE;
2150       return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
2151     }
2152 
2153   return NULL_TREE;
2154 }
2155 
2156 /* A subroutine of fold_stmt.  Attempt to simplify *(BASE+OFFSET).
2157    Return the simplified expression, or NULL if nothing could be done.  */
2158 
2159 static tree
2160 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2161 {
2162   tree t;
2163   bool volatile_p = TREE_THIS_VOLATILE (expr);
2164   location_t loc = EXPR_LOCATION (expr);
2165 
2166   /* We may well have constructed a double-nested PLUS_EXPR via multiple
2167      substitutions.  Fold that down to one.  Remove NON_LVALUE_EXPRs that
2168      are sometimes added.  */
2169   base = fold (base);
2170   STRIP_TYPE_NOPS (base);
2171   TREE_OPERAND (expr, 0) = base;
2172 
2173   /* One possibility is that the address reduces to a string constant.  */
2174   t = fold_read_from_constant_string (expr);
2175   if (t)
2176     return t;
2177 
2178   /* Add in any offset from a POINTER_PLUS_EXPR.  */
2179   if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2180     {
2181       tree offset2;
2182 
2183       offset2 = TREE_OPERAND (base, 1);
2184       if (TREE_CODE (offset2) != INTEGER_CST)
2185 	return NULL_TREE;
2186       base = TREE_OPERAND (base, 0);
2187 
2188       offset = fold_convert (sizetype,
2189 			     int_const_binop (PLUS_EXPR, offset, offset2, 1));
2190     }
2191 
2192   if (TREE_CODE (base) == ADDR_EXPR)
2193     {
2194       tree base_addr = base;
2195 
2196       /* Strip the ADDR_EXPR.  */
2197       base = TREE_OPERAND (base, 0);
2198 
2199       /* Fold away CONST_DECL to its value, if the type is scalar.  */
2200       if (TREE_CODE (base) == CONST_DECL
2201 	  && is_gimple_min_invariant (DECL_INITIAL (base)))
2202 	return DECL_INITIAL (base);
2203 
2204       /* If there is no offset involved simply return the folded base.  */
2205       if (integer_zerop (offset))
2206 	return base;
2207 
2208       /* Try folding *(&B+O) to B.X.  */
2209       t = maybe_fold_offset_to_reference (loc, base_addr, offset,
2210 					  TREE_TYPE (expr));
2211       if (t)
2212 	{
2213 	  /* Preserve volatileness of the original expression.
2214 	     We can end up with a plain decl here which is shared
2215 	     and we shouldn't mess with its flags.  */
2216 	  if (!SSA_VAR_P (t))
2217 	    TREE_THIS_VOLATILE (t) = volatile_p;
2218 	  return t;
2219 	}
2220     }
2221   else
2222     {
2223       /* We can get here for out-of-range string constant accesses,
2224 	 such as "_"[3].  Bail out of the entire substitution search
2225 	 and arrange for the entire statement to be replaced by a
2226 	 call to __builtin_trap.  In all likelihood this will all be
2227 	 constant-folded away, but in the meantime we can't leave with
2228 	 something that get_expr_operands can't understand.  */
2229 
2230       t = base;
2231       STRIP_NOPS (t);
2232       if (TREE_CODE (t) == ADDR_EXPR
2233 	  && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2234 	{
2235 	  /* FIXME: Except that this causes problems elsewhere with dead
2236 	     code not being deleted, and we die in the rtl expanders
2237 	     because we failed to remove some ssa_name.  In the meantime,
2238 	     just return zero.  */
2239 	  /* FIXME2: This condition should be signaled by
2240 	     fold_read_from_constant_string directly, rather than
2241 	     re-checking for it here.  */
2242 	  return integer_zero_node;
2243 	}
2244 
2245       /* Try folding *(B+O) to B->X.  Still an improvement.  */
2246       if (POINTER_TYPE_P (TREE_TYPE (base)))
2247 	{
2248           t = maybe_fold_offset_to_reference (loc, base, offset,
2249 				              TREE_TYPE (expr));
2250 	  if (t)
2251 	    return t;
2252 	}
2253     }
2254 
2255   /* Otherwise we had an offset that we could not simplify.  */
2256   return NULL_TREE;
2257 }
2258 
2259 
2260 /* A quaint feature extant in our address arithmetic is that there
2261    can be hidden type changes here.  The type of the result need
2262    not be the same as the type of the input pointer.
2263 
2264    What we're after here is an expression of the form
2265 	(T *)(&array + const)
2266    where array is OP0, const is OP1, RES_TYPE is T and
2267    the cast doesn't actually exist, but is implicit in the
2268    type of the POINTER_PLUS_EXPR.  We'd like to turn this into
2269 	&array[x]
2270    which may be able to propagate further.  */
2271 
2272 tree
2273 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
2274 {
2275   tree ptd_type;
2276   tree t;
2277 
2278   /* The first operand should be an ADDR_EXPR.  */
2279   if (TREE_CODE (op0) != ADDR_EXPR)
2280     return NULL_TREE;
2281   op0 = TREE_OPERAND (op0, 0);
2282 
2283   /* It had better be a constant.  */
2284   if (TREE_CODE (op1) != INTEGER_CST)
2285     {
2286       /* Or op0 should now be A[0] and the non-constant offset defined
2287 	 via a multiplication by the array element size.  */
2288       if (TREE_CODE (op0) == ARRAY_REF
2289 	  && integer_zerop (TREE_OPERAND (op0, 1))
2290 	  && TREE_CODE (op1) == SSA_NAME
2291 	  && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2292 	{
2293 	  gimple offset_def = SSA_NAME_DEF_STMT (op1);
2294 	  if (!is_gimple_assign (offset_def))
2295 	    return NULL_TREE;
2296 
2297 	  /* As we will end up creating a variable index array access
2298 	     in the outermost array dimension make sure there isn't
2299 	     a more inner array that the index could overflow to.  */
2300 	  if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
2301 	    return NULL_TREE;
2302 
2303 	  /* Do not build array references of something that we can't
2304 	     see the true number of array dimensions for.  */
2305 	  if (!DECL_P (TREE_OPERAND (op0, 0))
2306 	      && !handled_component_p (TREE_OPERAND (op0, 0)))
2307 	    return NULL_TREE;
2308 
2309 	  if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2310 	      && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2311 	      && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2312 				     TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2313 	    return build_fold_addr_expr
2314 			  (build4 (ARRAY_REF, TREE_TYPE (op0),
2315 				   TREE_OPERAND (op0, 0),
2316 				   gimple_assign_rhs1 (offset_def),
2317 				   TREE_OPERAND (op0, 2),
2318 				   TREE_OPERAND (op0, 3)));
2319 	  else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2320 		   && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2321 	    return build_fold_addr_expr
2322 			  (build4 (ARRAY_REF, TREE_TYPE (op0),
2323 				   TREE_OPERAND (op0, 0),
2324 				   op1,
2325 				   TREE_OPERAND (op0, 2),
2326 				   TREE_OPERAND (op0, 3)));
2327 	}
2328       return NULL_TREE;
2329     }
2330 
2331   /* If the first operand is an ARRAY_REF, expand it so that we can fold
2332      the offset into it.  */
2333   while (TREE_CODE (op0) == ARRAY_REF)
2334     {
2335       tree array_obj = TREE_OPERAND (op0, 0);
2336       tree array_idx = TREE_OPERAND (op0, 1);
2337       tree elt_type = TREE_TYPE (op0);
2338       tree elt_size = TYPE_SIZE_UNIT (elt_type);
2339       tree min_idx;
2340 
2341       if (TREE_CODE (array_idx) != INTEGER_CST)
2342 	break;
2343       if (TREE_CODE (elt_size) != INTEGER_CST)
2344 	break;
2345 
2346       /* Un-bias the index by the min index of the array type.  */
2347       min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2348       if (min_idx)
2349 	{
2350 	  min_idx = TYPE_MIN_VALUE (min_idx);
2351 	  if (min_idx)
2352 	    {
2353 	      if (TREE_CODE (min_idx) != INTEGER_CST)
2354 		break;
2355 
2356 	      array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2357 	      if (!integer_zerop (min_idx))
2358 		array_idx = int_const_binop (MINUS_EXPR, array_idx,
2359 					     min_idx, 0);
2360 	    }
2361 	}
2362 
2363       /* Convert the index to a byte offset.  */
2364       array_idx = fold_convert (sizetype, array_idx);
2365       array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2366 
2367       /* Update the operands for the next round, or for folding.  */
2368       op1 = int_const_binop (PLUS_EXPR,
2369 			     array_idx, op1, 0);
2370       op0 = array_obj;
2371     }
2372 
2373   ptd_type = TREE_TYPE (res_type);
2374   /* If we want a pointer to void, reconstruct the reference from the
2375      array element type.  A pointer to that can be trivially converted
2376      to void *.  This happens as we fold (void *)(ptr p+ off).  */
2377   if (VOID_TYPE_P (ptd_type)
2378       && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2379     ptd_type = TREE_TYPE (TREE_TYPE (op0));
2380 
2381   /* At which point we can try some of the same things as for indirects.  */
2382   t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
2383   if (!t)
2384     t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
2385 					    ptd_type);
2386   if (t)
2387     {
2388       t = build1 (ADDR_EXPR, res_type, t);
2389       SET_EXPR_LOCATION (t, loc);
2390     }
2391 
2392   return t;
2393 }
2394 
2395 /* Subroutine of fold_stmt.  We perform several simplifications of the
2396    memory reference tree EXPR and make sure to re-gimplify them properly
2397    after propagation of constant addresses.  IS_LHS is true if the
2398    reference is supposed to be an lvalue.  */
2399 
2400 static tree
2401 maybe_fold_reference (tree expr, bool is_lhs)
2402 {
2403   tree *t = &expr;
2404 
2405   if (TREE_CODE (expr) == ARRAY_REF
2406       && !is_lhs)
2407     {
2408       tree tem = fold_read_from_constant_string (expr);
2409       if (tem)
2410 	return tem;
2411     }
2412 
2413   /* ???  We might want to open-code the relevant remaining cases
2414      to avoid using the generic fold.  */
2415   if (handled_component_p (*t)
2416       && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
2417     {
2418       tree tem = fold (*t);
2419       if (tem != *t)
2420 	return tem;
2421     }
2422 
2423   while (handled_component_p (*t))
2424     t = &TREE_OPERAND (*t, 0);
2425 
2426   if (TREE_CODE (*t) == INDIRECT_REF)
2427     {
2428       tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
2429 					   integer_zero_node);
2430       /* Avoid folding *"abc" = 5 into 'a' = 5.  */
2431       if (is_lhs && tem && CONSTANT_CLASS_P (tem))
2432 	tem = NULL_TREE;
2433       if (!tem
2434 	  && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
2435 	/* If we had a good reason for propagating the address here,
2436 	   make sure we end up with valid gimple.  See PR34989.  */
2437 	tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2438 
2439       if (tem)
2440 	{
2441 	  *t = tem;
2442 	  tem = maybe_fold_reference (expr, is_lhs);
2443 	  if (tem)
2444 	    return tem;
2445 	  return expr;
2446 	}
2447     }
2448   else if (!is_lhs
2449 	   && DECL_P (*t))
2450     {
2451       tree tem = get_symbol_constant_value (*t);
2452       if (tem
2453 	  && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
2454 	{
2455 	  *t = unshare_expr (tem);
2456 	  tem = maybe_fold_reference (expr, is_lhs);
2457 	  if (tem)
2458 	    return tem;
2459 	  return expr;
2460 	}
2461     }
2462 
2463   return NULL_TREE;
2464 }
2465 
2466 
2467 /* Return the string length, maximum string length or maximum value of
2468    ARG in LENGTH.
2469    If ARG is an SSA name variable, follow its use-def chains.  If LENGTH
2470    is not NULL and, for TYPE == 0, its value is not equal to the length
2471    we determine or if we are unable to determine the length or value,
2472    return false.  VISITED is a bitmap of visited variables.
2473    TYPE is 0 if string length should be returned, 1 for maximum string
2474    length and 2 for maximum value ARG can have.  */
2475 
2476 static bool
2477 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2478 {
2479   tree var, val;
2480   gimple def_stmt;
2481 
2482   if (TREE_CODE (arg) != SSA_NAME)
2483     {
2484       if (TREE_CODE (arg) == COND_EXPR)
2485         return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2486                && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2487       /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
2488       else if (TREE_CODE (arg) == ADDR_EXPR
2489 	       && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2490 	       && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2491 	{
2492 	  tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2493 	  if (TREE_CODE (aop0) == INDIRECT_REF
2494 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2495 	    return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2496 				      length, visited, type);
2497 	}
2498 
2499       if (type == 2)
2500 	{
2501 	  val = arg;
2502 	  if (TREE_CODE (val) != INTEGER_CST
2503 	      || tree_int_cst_sgn (val) < 0)
2504 	    return false;
2505 	}
2506       else
2507 	val = c_strlen (arg, 1);
2508       if (!val)
2509 	return false;
2510 
2511       if (*length)
2512 	{
2513 	  if (type > 0)
2514 	    {
2515 	      if (TREE_CODE (*length) != INTEGER_CST
2516 		  || TREE_CODE (val) != INTEGER_CST)
2517 		return false;
2518 
2519 	      if (tree_int_cst_lt (*length, val))
2520 		*length = val;
2521 	      return true;
2522 	    }
2523 	  else if (simple_cst_equal (val, *length) != 1)
2524 	    return false;
2525 	}
2526 
2527       *length = val;
2528       return true;
2529     }
2530 
2531   /* If we were already here, break the infinite cycle.  */
2532   if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2533     return true;
2534   bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2535 
2536   var = arg;
2537   def_stmt = SSA_NAME_DEF_STMT (var);
2538 
2539   switch (gimple_code (def_stmt))
2540     {
2541       case GIMPLE_ASSIGN:
2542         /* The RHS of the statement defining VAR must either have a
2543            constant length or come from another SSA_NAME with a constant
2544            length.  */
2545         if (gimple_assign_single_p (def_stmt)
2546             || gimple_assign_unary_nop_p (def_stmt))
2547           {
2548             tree rhs = gimple_assign_rhs1 (def_stmt);
2549             return get_maxval_strlen (rhs, length, visited, type);
2550           }
2551         return false;
2552 
2553       case GIMPLE_PHI:
2554 	{
2555 	  /* All the arguments of the PHI node must have the same constant
2556 	     length.  */
2557 	  unsigned i;
2558 
2559 	  for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2560           {
2561             tree arg = gimple_phi_arg (def_stmt, i)->def;
2562 
2563             /* If this PHI has itself as an argument, we cannot
2564                determine the string length of this argument.  However,
2565                if we can find a constant string length for the other
2566                PHI args then we can still be sure that this is a
2567                constant string length.  So be optimistic and just
2568                continue with the next argument.  */
2569             if (arg == gimple_phi_result (def_stmt))
2570               continue;
2571 
2572             if (!get_maxval_strlen (arg, length, visited, type))
2573               return false;
2574           }
2575         }
2576         return true;
2577 
2578       default:
2579         return false;
2580     }
2581 }
2582 
2583 
2584 /* Fold builtin call in statement STMT.  Returns a simplified tree.
2585    We may return a non-constant expression, including another call
2586    to a different function and with different arguments, e.g.,
2587    substituting memcpy for strcpy when the string length is known.
2588    Note that some builtins expand into inline code that may not
2589    be valid in GIMPLE.  Callers must take care.  */
2590 
2591 static tree
2592 ccp_fold_builtin (gimple stmt)
2593 {
2594   tree result, val[3];
2595   tree callee, a;
2596   int arg_idx, type;
2597   bitmap visited;
2598   bool ignore;
2599   int nargs;
2600   location_t loc = gimple_location (stmt);
2601 
2602   gcc_assert (is_gimple_call (stmt));
2603 
2604   ignore = (gimple_call_lhs (stmt) == NULL);
2605 
2606   /* First try the generic builtin folder.  If that succeeds, return the
2607      result directly.  */
2608   result = fold_call_stmt (stmt, ignore);
2609   if (result)
2610     {
2611       if (ignore)
2612 	STRIP_NOPS (result);
2613       return result;
2614     }
2615 
2616   /* Ignore MD builtins.  */
2617   callee = gimple_call_fndecl (stmt);
2618   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2619     return NULL_TREE;
2620 
2621   /* If the builtin could not be folded, and it has no argument list,
2622      we're done.  */
2623   nargs = gimple_call_num_args (stmt);
2624   if (nargs == 0)
2625     return NULL_TREE;
2626 
2627   /* Limit the work only for builtins we know how to simplify.  */
2628   switch (DECL_FUNCTION_CODE (callee))
2629     {
2630     case BUILT_IN_STRLEN:
2631     case BUILT_IN_FPUTS:
2632     case BUILT_IN_FPUTS_UNLOCKED:
2633       arg_idx = 0;
2634       type = 0;
2635       break;
2636     case BUILT_IN_STRCPY:
2637     case BUILT_IN_STRNCPY:
2638       arg_idx = 1;
2639       type = 0;
2640       break;
2641     case BUILT_IN_MEMCPY_CHK:
2642     case BUILT_IN_MEMPCPY_CHK:
2643     case BUILT_IN_MEMMOVE_CHK:
2644     case BUILT_IN_MEMSET_CHK:
2645     case BUILT_IN_STRNCPY_CHK:
2646       arg_idx = 2;
2647       type = 2;
2648       break;
2649     case BUILT_IN_STRCPY_CHK:
2650     case BUILT_IN_STPCPY_CHK:
2651       arg_idx = 1;
2652       type = 1;
2653       break;
2654     case BUILT_IN_SNPRINTF_CHK:
2655     case BUILT_IN_VSNPRINTF_CHK:
2656       arg_idx = 1;
2657       type = 2;
2658       break;
2659     default:
2660       return NULL_TREE;
2661     }
2662 
2663   if (arg_idx >= nargs)
2664     return NULL_TREE;
2665 
2666   /* Try to use the dataflow information gathered by the CCP process.  */
2667   visited = BITMAP_ALLOC (NULL);
2668   bitmap_clear (visited);
2669 
2670   memset (val, 0, sizeof (val));
2671   a = gimple_call_arg (stmt, arg_idx);
2672   if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2673     val[arg_idx] = NULL_TREE;
2674 
2675   BITMAP_FREE (visited);
2676 
2677   result = NULL_TREE;
2678   switch (DECL_FUNCTION_CODE (callee))
2679     {
2680     case BUILT_IN_STRLEN:
2681       if (val[0] && nargs == 1)
2682 	{
2683 	  tree new_val =
2684               fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2685 
2686 	  /* If the result is not a valid gimple value, or not a cast
2687 	     of a valid gimple value, then we can not use the result.  */
2688 	  if (is_gimple_val (new_val)
2689 	      || (is_gimple_cast (new_val)
2690 		  && is_gimple_val (TREE_OPERAND (new_val, 0))))
2691 	    return new_val;
2692 	}
2693       break;
2694 
2695     case BUILT_IN_STRCPY:
2696       if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2697 	result = fold_builtin_strcpy (loc, callee,
2698                                       gimple_call_arg (stmt, 0),
2699                                       gimple_call_arg (stmt, 1),
2700 				      val[1]);
2701       break;
2702 
2703     case BUILT_IN_STRNCPY:
2704       if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2705 	result = fold_builtin_strncpy (loc, callee,
2706                                        gimple_call_arg (stmt, 0),
2707                                        gimple_call_arg (stmt, 1),
2708                                        gimple_call_arg (stmt, 2),
2709 				       val[1]);
2710       break;
2711 
2712     case BUILT_IN_FPUTS:
2713       if (nargs == 2)
2714 	result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
2715 				     gimple_call_arg (stmt, 1),
2716 				     ignore, false, val[0]);
2717       break;
2718 
2719     case BUILT_IN_FPUTS_UNLOCKED:
2720       if (nargs == 2)
2721 	result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
2722 				     gimple_call_arg (stmt, 1),
2723 				     ignore, true, val[0]);
2724       break;
2725 
2726     case BUILT_IN_MEMCPY_CHK:
2727     case BUILT_IN_MEMPCPY_CHK:
2728     case BUILT_IN_MEMMOVE_CHK:
2729     case BUILT_IN_MEMSET_CHK:
2730       if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2731 	result = fold_builtin_memory_chk (loc, callee,
2732                                           gimple_call_arg (stmt, 0),
2733                                           gimple_call_arg (stmt, 1),
2734                                           gimple_call_arg (stmt, 2),
2735                                           gimple_call_arg (stmt, 3),
2736 					  val[2], ignore,
2737 					  DECL_FUNCTION_CODE (callee));
2738       break;
2739 
2740     case BUILT_IN_STRCPY_CHK:
2741     case BUILT_IN_STPCPY_CHK:
2742       if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2743 	result = fold_builtin_stxcpy_chk (loc, callee,
2744                                           gimple_call_arg (stmt, 0),
2745                                           gimple_call_arg (stmt, 1),
2746                                           gimple_call_arg (stmt, 2),
2747 					  val[1], ignore,
2748 					  DECL_FUNCTION_CODE (callee));
2749       break;
2750 
2751     case BUILT_IN_STRNCPY_CHK:
2752       if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2753 	result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
2754                                            gimple_call_arg (stmt, 1),
2755                                            gimple_call_arg (stmt, 2),
2756                                            gimple_call_arg (stmt, 3),
2757 					   val[2]);
2758       break;
2759 
2760     case BUILT_IN_SNPRINTF_CHK:
2761     case BUILT_IN_VSNPRINTF_CHK:
2762       if (val[1] && is_gimple_val (val[1]))
2763 	result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2764                                                    DECL_FUNCTION_CODE (callee));
2765       break;
2766 
2767     default:
2768       gcc_unreachable ();
2769     }
2770 
2771   if (result && ignore)
2772     result = fold_ignored_result (result);
2773   return result;
2774 }
2775 
2776 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
2777    replacement rhs for the statement or NULL_TREE if no simplification
2778    could be made.  It is assumed that the operands have been previously
2779    folded.  */
2780 
2781 static tree
2782 fold_gimple_assign (gimple_stmt_iterator *si)
2783 {
2784   gimple stmt = gsi_stmt (*si);
2785   enum tree_code subcode = gimple_assign_rhs_code (stmt);
2786   location_t loc = gimple_location (stmt);
2787 
2788   tree result = NULL_TREE;
2789 
2790   switch (get_gimple_rhs_class (subcode))
2791     {
2792     case GIMPLE_SINGLE_RHS:
2793       {
2794         tree rhs = gimple_assign_rhs1 (stmt);
2795 
2796         /* Try to fold a conditional expression.  */
2797         if (TREE_CODE (rhs) == COND_EXPR)
2798           {
2799 	    tree op0 = COND_EXPR_COND (rhs);
2800 	    tree tem;
2801 	    bool set = false;
2802 	    location_t cond_loc = EXPR_LOCATION (rhs);
2803 
2804 	    if (COMPARISON_CLASS_P (op0))
2805 	      {
2806 		fold_defer_overflow_warnings ();
2807 		tem = fold_binary_loc (cond_loc,
2808 				   TREE_CODE (op0), TREE_TYPE (op0),
2809 				   TREE_OPERAND (op0, 0),
2810 				   TREE_OPERAND (op0, 1));
2811 		/* This is actually a conditional expression, not a GIMPLE
2812 		   conditional statement, however, the valid_gimple_rhs_p
2813 		   test still applies.  */
2814 		set = (tem && is_gimple_condexpr (tem)
2815 		       && valid_gimple_rhs_p (tem));
2816 		fold_undefer_overflow_warnings (set, stmt, 0);
2817 	      }
2818 	    else if (is_gimple_min_invariant (op0))
2819 	      {
2820 		tem = op0;
2821 		set = true;
2822 	      }
2823 	    else
2824 	      return NULL_TREE;
2825 
2826 	    if (set)
2827 	      result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
2828 				    COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2829           }
2830 
2831 	else if (TREE_CODE (rhs) == TARGET_MEM_REF)
2832 	  return maybe_fold_tmr (rhs);
2833 
2834 	else if (REFERENCE_CLASS_P (rhs))
2835 	  return maybe_fold_reference (rhs, false);
2836 
2837 	else if (TREE_CODE (rhs) == ADDR_EXPR)
2838 	  {
2839 	    tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
2840 	    if (tem)
2841 	      result = fold_convert (TREE_TYPE (rhs),
2842 				     build_fold_addr_expr_loc (loc, tem));
2843 	  }
2844 
2845 	else if (TREE_CODE (rhs) == CONSTRUCTOR
2846 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2847 		 && (CONSTRUCTOR_NELTS (rhs)
2848 		     == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2849 	  {
2850 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
2851 	    unsigned i;
2852 	    tree val;
2853 
2854 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2855 	      if (TREE_CODE (val) != INTEGER_CST
2856 		  && TREE_CODE (val) != REAL_CST
2857 		  && TREE_CODE (val) != FIXED_CST)
2858 		return NULL_TREE;
2859 
2860 	    return build_vector_from_ctor (TREE_TYPE (rhs),
2861 					   CONSTRUCTOR_ELTS (rhs));
2862 	  }
2863 
2864 	else if (DECL_P (rhs))
2865 	  return unshare_expr (get_symbol_constant_value (rhs));
2866 
2867         /* If we couldn't fold the RHS, hand over to the generic
2868            fold routines.  */
2869         if (result == NULL_TREE)
2870           result = fold (rhs);
2871 
2872         /* Strip away useless type conversions.  Both the NON_LVALUE_EXPR
2873            that may have been added by fold, and "useless" type
2874            conversions that might now be apparent due to propagation.  */
2875         STRIP_USELESS_TYPE_CONVERSION (result);
2876 
2877         if (result != rhs && valid_gimple_rhs_p (result))
2878 	  return result;
2879 
2880 	return NULL_TREE;
2881       }
2882       break;
2883 
2884     case GIMPLE_UNARY_RHS:
2885       {
2886 	tree rhs = gimple_assign_rhs1 (stmt);
2887 
2888 	result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
2889 	if (result)
2890 	  {
2891 	    /* If the operation was a conversion do _not_ mark a
2892 	       resulting constant with TREE_OVERFLOW if the original
2893 	       constant was not.  These conversions have implementation
2894 	       defined behavior and retaining the TREE_OVERFLOW flag
2895 	       here would confuse later passes such as VRP.  */
2896 	    if (CONVERT_EXPR_CODE_P (subcode)
2897 		&& TREE_CODE (result) == INTEGER_CST
2898 		&& TREE_CODE (rhs) == INTEGER_CST)
2899 	      TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2900 
2901 	    STRIP_USELESS_TYPE_CONVERSION (result);
2902 	    if (valid_gimple_rhs_p (result))
2903 	      return result;
2904 	  }
2905 	else if (CONVERT_EXPR_CODE_P (subcode)
2906 		 && POINTER_TYPE_P (gimple_expr_type (stmt))
2907 		 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2908 	  {
2909 	    tree type = gimple_expr_type (stmt);
2910 	    tree t = maybe_fold_offset_to_address (loc,
2911 						   gimple_assign_rhs1 (stmt),
2912 						   integer_zero_node, type);
2913 	    if (t)
2914 	      return t;
2915 	  }
2916       }
2917       break;
2918 
2919     case GIMPLE_BINARY_RHS:
2920       /* Try to fold pointer addition.  */
2921       if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2922 	{
2923 	  tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2924 	  if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2925 	    {
2926 	      type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2927 	      if (!useless_type_conversion_p
2928 		    (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2929 		type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2930 	    }
2931 	  result = maybe_fold_stmt_addition (gimple_location (stmt),
2932 					     type,
2933 					     gimple_assign_rhs1 (stmt),
2934 					     gimple_assign_rhs2 (stmt));
2935 	}
2936 
2937       if (!result)
2938         result = fold_binary_loc (loc, subcode,
2939                               TREE_TYPE (gimple_assign_lhs (stmt)),
2940                               gimple_assign_rhs1 (stmt),
2941                               gimple_assign_rhs2 (stmt));
2942 
2943       if (result)
2944         {
2945           STRIP_USELESS_TYPE_CONVERSION (result);
2946           if (valid_gimple_rhs_p (result))
2947 	    return result;
2948 
2949 	  /* Fold might have produced non-GIMPLE, so if we trust it blindly
2950 	     we lose canonicalization opportunities.  Do not go again
2951 	     through fold here though, or the same non-GIMPLE will be
2952 	     produced.  */
2953           if (commutative_tree_code (subcode)
2954               && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2955                                        gimple_assign_rhs2 (stmt), false))
2956             return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2957                            gimple_assign_rhs2 (stmt),
2958                            gimple_assign_rhs1 (stmt));
2959         }
2960       break;
2961 
2962     case GIMPLE_INVALID_RHS:
2963       gcc_unreachable ();
2964     }
2965 
2966   return NULL_TREE;
2967 }
2968 
2969 /* Attempt to fold a conditional statement. Return true if any changes were
2970    made. We only attempt to fold the condition expression, and do not perform
2971    any transformation that would require alteration of the cfg.  It is
2972    assumed that the operands have been previously folded.  */
2973 
2974 static bool
2975 fold_gimple_cond (gimple stmt)
2976 {
2977   tree result = fold_binary_loc (gimple_location (stmt),
2978 			     gimple_cond_code (stmt),
2979                              boolean_type_node,
2980                              gimple_cond_lhs (stmt),
2981                              gimple_cond_rhs (stmt));
2982 
2983   if (result)
2984     {
2985       STRIP_USELESS_TYPE_CONVERSION (result);
2986       if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2987         {
2988           gimple_cond_set_condition_from_tree (stmt, result);
2989           return true;
2990         }
2991     }
2992 
2993   return false;
2994 }
2995 
2996 static void gimplify_and_update_call_from_tree (gimple_stmt_iterator *, tree);
2997 
2998 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2999    The statement may be replaced by another statement, e.g., if the call
3000    simplifies to a constant value. Return true if any changes were made.
3001    It is assumed that the operands have been previously folded.  */
3002 
3003 static bool
3004 fold_gimple_call (gimple_stmt_iterator *gsi)
3005 {
3006   gimple stmt = gsi_stmt (*gsi);
3007 
3008   tree callee = gimple_call_fndecl (stmt);
3009 
3010   /* Check for builtins that CCP can handle using information not
3011      available in the generic fold routines.  */
3012   if (callee && DECL_BUILT_IN (callee))
3013     {
3014       tree result = ccp_fold_builtin (stmt);
3015 
3016       if (result)
3017 	{
3018           if (!update_call_from_tree (gsi, result))
3019 	    gimplify_and_update_call_from_tree (gsi, result);
3020 	  return true;
3021 	}
3022     }
3023   else
3024     {
3025       /* Check for resolvable OBJ_TYPE_REF.  The only sorts we can resolve
3026          here are when we've propagated the address of a decl into the
3027          object slot.  */
3028       /* ??? Should perhaps do this in fold proper.  However, doing it
3029          there requires that we create a new CALL_EXPR, and that requires
3030          copying EH region info to the new node.  Easier to just do it
3031          here where we can just smash the call operand.  */
3032       /* ??? Is there a good reason not to do this in fold_stmt_inplace?  */
3033       callee = gimple_call_fn (stmt);
3034       if (TREE_CODE (callee) == OBJ_TYPE_REF
3035           && lang_hooks.fold_obj_type_ref
3036           && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
3037           && DECL_P (TREE_OPERAND
3038                      (OBJ_TYPE_REF_OBJECT (callee), 0)))
3039         {
3040           tree t;
3041 
3042           /* ??? Caution: Broken ADDR_EXPR semantics means that
3043              looking at the type of the operand of the addr_expr
3044              can yield an array type.  See silly exception in
3045              check_pointer_types_r.  */
3046           t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
3047           t = lang_hooks.fold_obj_type_ref (callee, t);
3048           if (t)
3049             {
3050               gimple_call_set_fn (stmt, t);
3051               return true;
3052             }
3053         }
3054     }
3055 
3056   return false;
3057 }
3058 
3059 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
3060    distinguishes both cases.  */
3061 
3062 static bool
3063 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
3064 {
3065   bool changed = false;
3066   gimple stmt = gsi_stmt (*gsi);
3067   unsigned i;
3068 
3069   /* Fold the main computation performed by the statement.  */
3070   switch (gimple_code (stmt))
3071     {
3072     case GIMPLE_ASSIGN:
3073       {
3074 	unsigned old_num_ops = gimple_num_ops (stmt);
3075 	tree new_rhs = fold_gimple_assign (gsi);
3076 	tree lhs = gimple_assign_lhs (stmt);
3077 	if (new_rhs
3078 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
3079 					   TREE_TYPE (new_rhs)))
3080 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3081 	if (new_rhs
3082 	    && (!inplace
3083 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3084 	  {
3085 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3086 	    changed = true;
3087 	  }
3088 	break;
3089       }
3090 
3091     case GIMPLE_COND:
3092       changed |= fold_gimple_cond (stmt);
3093       break;
3094 
3095     case GIMPLE_CALL:
3096       /* Fold *& in call arguments.  */
3097       for (i = 0; i < gimple_call_num_args (stmt); ++i)
3098 	if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3099 	  {
3100 	    tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3101 	    if (tmp)
3102 	      {
3103 		gimple_call_set_arg (stmt, i, tmp);
3104 		changed = true;
3105 	      }
3106 	  }
3107       /* The entire statement may be replaced in this case.  */
3108       if (!inplace)
3109 	changed |= fold_gimple_call (gsi);
3110       break;
3111 
3112     case GIMPLE_ASM:
3113       /* Fold *& in asm operands.  */
3114       for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3115 	{
3116 	  tree link = gimple_asm_output_op (stmt, i);
3117 	  tree op = TREE_VALUE (link);
3118 	  if (REFERENCE_CLASS_P (op)
3119 	      && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3120 	    {
3121 	      TREE_VALUE (link) = op;
3122 	      changed = true;
3123 	    }
3124 	}
3125       for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3126 	{
3127 	  tree link = gimple_asm_input_op (stmt, i);
3128 	  tree op = TREE_VALUE (link);
3129 	  if (REFERENCE_CLASS_P (op)
3130 	      && (op = maybe_fold_reference (op, false)) != NULL_TREE)
3131 	    {
3132 	      TREE_VALUE (link) = op;
3133 	      changed = true;
3134 	    }
3135 	}
3136       break;
3137 
3138     default:;
3139     }
3140 
3141   stmt = gsi_stmt (*gsi);
3142 
3143   /* Fold *& on the lhs.  */
3144   if (gimple_has_lhs (stmt))
3145     {
3146       tree lhs = gimple_get_lhs (stmt);
3147       if (lhs && REFERENCE_CLASS_P (lhs))
3148 	{
3149 	  tree new_lhs = maybe_fold_reference (lhs, true);
3150 	  if (new_lhs)
3151 	    {
3152 	      gimple_set_lhs (stmt, new_lhs);
3153 	      changed = true;
3154 	    }
3155 	}
3156     }
3157 
3158   return changed;
3159 }
3160 
3161 /* Fold the statement pointed to by GSI.  In some cases, this function may
3162    replace the whole statement with a new one.  Returns true iff folding
3163    makes any changes.
3164    The statement pointed to by GSI should be in valid gimple form but may
3165    be in unfolded state as resulting from for example constant propagation
3166    which can produce *&x = 0.  */
3167 
3168 bool
3169 fold_stmt (gimple_stmt_iterator *gsi)
3170 {
3171   return fold_stmt_1 (gsi, false);
3172 }
3173 
3174 /* Perform the minimal folding on statement STMT.  Only operations like
3175    *&x created by constant propagation are handled.  The statement cannot
3176    be replaced with a new one.  Return true if the statement was
3177    changed, false otherwise.
3178    The statement STMT should be in valid gimple form but may
3179    be in unfolded state as resulting from for example constant propagation
3180    which can produce *&x = 0.  */
3181 
3182 bool
3183 fold_stmt_inplace (gimple stmt)
3184 {
3185   gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3186   bool changed = fold_stmt_1 (&gsi, true);
3187   gcc_assert (gsi_stmt (gsi) == stmt);
3188   return changed;
3189 }
3190 
3191 /* Try to optimize out __builtin_stack_restore.  Optimize it out
3192    if there is another __builtin_stack_restore in the same basic
3193    block and no calls or ASM_EXPRs are in between, or if this block's
3194    only outgoing edge is to EXIT_BLOCK and there are no calls or
3195    ASM_EXPRs after this __builtin_stack_restore.  */
3196 
3197 static tree
3198 optimize_stack_restore (gimple_stmt_iterator i)
3199 {
3200   tree callee;
3201   gimple stmt;
3202 
3203   basic_block bb = gsi_bb (i);
3204   gimple call = gsi_stmt (i);
3205 
3206   if (gimple_code (call) != GIMPLE_CALL
3207       || gimple_call_num_args (call) != 1
3208       || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3209       || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3210     return NULL_TREE;
3211 
3212   for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3213     {
3214       stmt = gsi_stmt (i);
3215       if (gimple_code (stmt) == GIMPLE_ASM)
3216 	return NULL_TREE;
3217       if (gimple_code (stmt) != GIMPLE_CALL)
3218 	continue;
3219 
3220       callee = gimple_call_fndecl (stmt);
3221       if (!callee
3222 	  || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3223 	  /* All regular builtins are ok, just obviously not alloca.  */
3224 	  || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
3225 	return NULL_TREE;
3226 
3227       if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3228 	goto second_stack_restore;
3229     }
3230 
3231   if (!gsi_end_p (i))
3232     return NULL_TREE;
3233 
3234   /* Allow one successor of the exit block, or zero successors.  */
3235   switch (EDGE_COUNT (bb->succs))
3236     {
3237     case 0:
3238       break;
3239     case 1:
3240       if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
3241 	return NULL_TREE;
3242       break;
3243     default:
3244       return NULL_TREE;
3245     }
3246  second_stack_restore:
3247 
3248   /* If there's exactly one use, then zap the call to __builtin_stack_save.
3249      If there are multiple uses, then the last one should remove the call.
3250      In any case, whether the call to __builtin_stack_save can be removed
3251      or not is irrelevant to removing the call to __builtin_stack_restore.  */
3252   if (has_single_use (gimple_call_arg (call, 0)))
3253     {
3254       gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3255       if (is_gimple_call (stack_save))
3256 	{
3257 	  callee = gimple_call_fndecl (stack_save);
3258 	  if (callee
3259 	      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
3260 	      && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
3261 	    {
3262 	      gimple_stmt_iterator stack_save_gsi;
3263 	      tree rhs;
3264 
3265 	      stack_save_gsi = gsi_for_stmt (stack_save);
3266 	      rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3267 	      update_call_from_tree (&stack_save_gsi, rhs);
3268 	    }
3269 	}
3270     }
3271 
3272   /* No effect, so the statement will be deleted.  */
3273   return integer_zero_node;
3274 }
3275 
3276 /* If va_list type is a simple pointer and nothing special is needed,
3277    optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3278    __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3279    pointer assignment.  */
3280 
3281 static tree
3282 optimize_stdarg_builtin (gimple call)
3283 {
3284   tree callee, lhs, rhs, cfun_va_list;
3285   bool va_list_simple_ptr;
3286   location_t loc = gimple_location (call);
3287 
3288   if (gimple_code (call) != GIMPLE_CALL)
3289     return NULL_TREE;
3290 
3291   callee = gimple_call_fndecl (call);
3292 
3293   cfun_va_list = targetm.fn_abi_va_list (callee);
3294   va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3295 		       && (TREE_TYPE (cfun_va_list) == void_type_node
3296 			   || TREE_TYPE (cfun_va_list) == char_type_node);
3297 
3298   switch (DECL_FUNCTION_CODE (callee))
3299     {
3300     case BUILT_IN_VA_START:
3301       if (!va_list_simple_ptr
3302 	  || targetm.expand_builtin_va_start != NULL
3303           || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3304 	return NULL_TREE;
3305 
3306       if (gimple_call_num_args (call) != 2)
3307 	return NULL_TREE;
3308 
3309       lhs = gimple_call_arg (call, 0);
3310       if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3311 	  || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3312 	     != TYPE_MAIN_VARIANT (cfun_va_list))
3313 	return NULL_TREE;
3314 
3315       lhs = build_fold_indirect_ref_loc (loc, lhs);
3316       rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
3317                              1, integer_zero_node);
3318       rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
3319       return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3320 
3321     case BUILT_IN_VA_COPY:
3322       if (!va_list_simple_ptr)
3323 	return NULL_TREE;
3324 
3325       if (gimple_call_num_args (call) != 2)
3326 	return NULL_TREE;
3327 
3328       lhs = gimple_call_arg (call, 0);
3329       if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3330 	  || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3331 	     != TYPE_MAIN_VARIANT (cfun_va_list))
3332 	return NULL_TREE;
3333 
3334       lhs = build_fold_indirect_ref_loc (loc, lhs);
3335       rhs = gimple_call_arg (call, 1);
3336       if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3337 	  != TYPE_MAIN_VARIANT (cfun_va_list))
3338 	return NULL_TREE;
3339 
3340       rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
3341       return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3342 
3343     case BUILT_IN_VA_END:
3344       /* No effect, so the statement will be deleted.  */
3345       return integer_zero_node;
3346 
3347     default:
3348       gcc_unreachable ();
3349     }
3350 }
3351 
3352 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3353    RHS of an assignment.  Insert the necessary statements before
3354    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
3355    is replaced.  If the call is expected to produces a result, then it
3356    is replaced by an assignment of the new RHS to the result variable.
3357    If the result is to be ignored, then the call is replaced by a
3358    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
3359    VUSE and the last VDEF of the whole sequence be the same as the replaced
3360    statement and using new SSA names for stores in between.  */
3361 
3362 static void
3363 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3364 {
3365   tree lhs;
3366   tree tmp = NULL_TREE;  /* Silence warning.  */
3367   gimple stmt, new_stmt;
3368   gimple_stmt_iterator i;
3369   gimple_seq stmts = gimple_seq_alloc();
3370   struct gimplify_ctx gctx;
3371   gimple last = NULL;
3372   gimple laststore = NULL;
3373   tree reaching_vuse;
3374 
3375   stmt = gsi_stmt (*si_p);
3376 
3377   gcc_assert (is_gimple_call (stmt));
3378 
3379   lhs = gimple_call_lhs (stmt);
3380   reaching_vuse = gimple_vuse (stmt);
3381 
3382   push_gimplify_context (&gctx);
3383 
3384   if (lhs == NULL_TREE)
3385     {
3386       gimplify_and_add (expr, &stmts);
3387       /* We can end up with folding a memcpy of an empty class assignment
3388 	 which gets optimized away by C++ gimplification.  */
3389       if (gimple_seq_empty_p (stmts))
3390 	{
3391 	  pop_gimplify_context (NULL);
3392 	  if (gimple_in_ssa_p (cfun))
3393 	    {
3394 	      unlink_stmt_vdef (stmt);
3395 	      release_defs (stmt);
3396 	    }
3397 	  gsi_remove (si_p, true);
3398 	  return;
3399 	}
3400     }
3401   else
3402     tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3403 
3404   pop_gimplify_context (NULL);
3405 
3406   if (gimple_has_location (stmt))
3407     annotate_all_with_location (stmts, gimple_location (stmt));
3408 
3409   /* The replacement can expose previously unreferenced variables.  */
3410   for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3411     {
3412       if (last)
3413 	{
3414 	  gsi_insert_before (si_p, last, GSI_NEW_STMT);
3415 	  gsi_next (si_p);
3416 	}
3417       new_stmt = gsi_stmt (i);
3418       if (gimple_in_ssa_p (cfun))
3419 	{
3420 	  find_new_referenced_vars (new_stmt);
3421 	  mark_symbols_for_renaming (new_stmt);
3422 	}
3423       /* If the new statement has a VUSE, update it with exact SSA name we
3424          know will reach this one.  */
3425       if (gimple_vuse (new_stmt))
3426 	{
3427 	  /* If we've also seen a previous store create a new VDEF for
3428 	     the latter one, and make that the new reaching VUSE.  */
3429 	  if (laststore)
3430 	    {
3431 	      reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
3432 	      gimple_set_vdef (laststore, reaching_vuse);
3433 	      update_stmt (laststore);
3434 	      laststore = NULL;
3435 	    }
3436 	  gimple_set_vuse (new_stmt, reaching_vuse);
3437 	  gimple_set_modified (new_stmt, true);
3438 	}
3439       if (gimple_assign_single_p (new_stmt)
3440 	  && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
3441 	{
3442 	  laststore = new_stmt;
3443 	}
3444       last = new_stmt;
3445     }
3446 
3447   if (lhs == NULL_TREE)
3448     {
3449       /* If we replace a call without LHS that has a VDEF and our new
3450          sequence ends with a store we must make that store have the same
3451 	 vdef in order not to break the sequencing.  This can happen
3452 	 for instance when folding memcpy calls into assignments.  */
3453       if (gimple_vdef (stmt) && laststore)
3454 	{
3455 	  gimple_set_vdef (laststore, gimple_vdef (stmt));
3456 	  if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
3457 	    SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
3458 	  update_stmt (laststore);
3459 	}
3460       else if (gimple_in_ssa_p (cfun))
3461 	{
3462 	  unlink_stmt_vdef (stmt);
3463 	  release_defs (stmt);
3464 	}
3465       new_stmt = last;
3466     }
3467   else
3468     {
3469       if (last)
3470 	{
3471 	  gsi_insert_before (si_p, last, GSI_NEW_STMT);
3472 	  gsi_next (si_p);
3473 	}
3474       if (laststore && is_gimple_reg (lhs))
3475 	{
3476 	  gimple_set_vdef (laststore, gimple_vdef (stmt));
3477 	  update_stmt (laststore);
3478 	  if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
3479 	    SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
3480 	  laststore = NULL;
3481 	}
3482       else if (laststore)
3483 	{
3484 	  reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
3485 	  gimple_set_vdef (laststore, reaching_vuse);
3486 	  update_stmt (laststore);
3487 	  laststore = NULL;
3488 	}
3489       new_stmt = gimple_build_assign (lhs, tmp);
3490       if (!is_gimple_reg (tmp))
3491 	gimple_set_vuse (new_stmt, reaching_vuse);
3492       if (!is_gimple_reg (lhs))
3493 	{
3494 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3495 	  if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
3496 	    SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
3497 	}
3498       else if (reaching_vuse == gimple_vuse (stmt))
3499 	unlink_stmt_vdef (stmt);
3500     }
3501 
3502   gimple_set_location (new_stmt, gimple_location (stmt));
3503   gsi_replace (si_p, new_stmt, false);
3504 }
3505 
3506 /* A simple pass that attempts to fold all builtin functions.  This pass
3507    is run after we've propagated as many constants as we can.  */
3508 
3509 static unsigned int
3510 execute_fold_all_builtins (void)
3511 {
3512   bool cfg_changed = false;
3513   basic_block bb;
3514   unsigned int todoflags = 0;
3515 
3516   FOR_EACH_BB (bb)
3517     {
3518       gimple_stmt_iterator i;
3519       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3520 	{
3521           gimple stmt, old_stmt;
3522 	  tree callee, result;
3523 	  enum built_in_function fcode;
3524 
3525 	  stmt = gsi_stmt (i);
3526 
3527           if (gimple_code (stmt) != GIMPLE_CALL)
3528 	    {
3529 	      gsi_next (&i);
3530 	      continue;
3531 	    }
3532 	  callee = gimple_call_fndecl (stmt);
3533 	  if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3534 	    {
3535 	      gsi_next (&i);
3536 	      continue;
3537 	    }
3538 	  fcode = DECL_FUNCTION_CODE (callee);
3539 
3540 	  result = ccp_fold_builtin (stmt);
3541 
3542 	  if (result)
3543 	    gimple_remove_stmt_histograms (cfun, stmt);
3544 
3545 	  if (!result)
3546 	    switch (DECL_FUNCTION_CODE (callee))
3547 	      {
3548 	      case BUILT_IN_CONSTANT_P:
3549 		/* Resolve __builtin_constant_p.  If it hasn't been
3550 		   folded to integer_one_node by now, it's fairly
3551 		   certain that the value simply isn't constant.  */
3552                 result = integer_zero_node;
3553 		break;
3554 
3555 	      case BUILT_IN_STACK_RESTORE:
3556 		result = optimize_stack_restore (i);
3557 		if (result)
3558 		  break;
3559 		gsi_next (&i);
3560 		continue;
3561 
3562 	      case BUILT_IN_VA_START:
3563 	      case BUILT_IN_VA_END:
3564 	      case BUILT_IN_VA_COPY:
3565 		/* These shouldn't be folded before pass_stdarg.  */
3566 		result = optimize_stdarg_builtin (stmt);
3567 		if (result)
3568 		  break;
3569 		/* FALLTHRU */
3570 
3571 	      default:
3572 		gsi_next (&i);
3573 		continue;
3574 	      }
3575 
3576 	  if (dump_file && (dump_flags & TDF_DETAILS))
3577 	    {
3578 	      fprintf (dump_file, "Simplified\n  ");
3579 	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3580 	    }
3581 
3582           old_stmt = stmt;
3583           if (!update_call_from_tree (&i, result))
3584 	    {
3585 	      gimplify_and_update_call_from_tree (&i, result);
3586 	      todoflags |= TODO_update_address_taken;
3587 	    }
3588 
3589 	  stmt = gsi_stmt (i);
3590 	  update_stmt (stmt);
3591 
3592 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3593 	      && gimple_purge_dead_eh_edges (bb))
3594 	    cfg_changed = true;
3595 
3596 	  if (dump_file && (dump_flags & TDF_DETAILS))
3597 	    {
3598 	      fprintf (dump_file, "to\n  ");
3599 	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3600 	      fprintf (dump_file, "\n");
3601 	    }
3602 
3603 	  /* Retry the same statement if it changed into another
3604 	     builtin, there might be new opportunities now.  */
3605           if (gimple_code (stmt) != GIMPLE_CALL)
3606 	    {
3607 	      gsi_next (&i);
3608 	      continue;
3609 	    }
3610 	  callee = gimple_call_fndecl (stmt);
3611 	  if (!callee
3612               || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3613 	      || DECL_FUNCTION_CODE (callee) == fcode)
3614 	    gsi_next (&i);
3615 	}
3616     }
3617 
3618   /* Delete unreachable blocks.  */
3619   if (cfg_changed)
3620     todoflags |= TODO_cleanup_cfg;
3621 
3622   return todoflags;
3623 }
3624 
3625 
3626 struct gimple_opt_pass pass_fold_builtins =
3627 {
3628  {
3629   GIMPLE_PASS,
3630   "fab",				/* name */
3631   NULL,					/* gate */
3632   execute_fold_all_builtins,		/* execute */
3633   NULL,					/* sub */
3634   NULL,					/* next */
3635   0,					/* static_pass_number */
3636   TV_NONE,				/* tv_id */
3637   PROP_cfg | PROP_ssa,			/* properties_required */
3638   0,					/* properties_provided */
3639   0,					/* properties_destroyed */
3640   0,					/* todo_flags_start */
3641   TODO_dump_func
3642     | TODO_verify_ssa
3643     | TODO_update_ssa			/* todo_flags_finish */
3644  }
3645 };
3646