xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-pre.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* SSA-PRE for trees.
2    Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5    <stevenb@suse.de>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
13 
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17 GNU General Public License for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "obstack.h"
42 #include "tree-pass.h"
43 #include "flags.h"
44 #include "bitmap.h"
45 #include "langhooks.h"
46 #include "cfgloop.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
49 #include "params.h"
50 #include "dbgcnt.h"
51 
52 /* TODO:
53 
54    1. Avail sets can be shared by making an avail_find_leader that
55       walks up the dominator tree and looks in those avail sets.
56       This might affect code optimality, it's unclear right now.
57    2. Strength reduction can be performed by anticipating expressions
58       we can repair later on.
59    3. We can do back-substitution or smarter value numbering to catch
60       commutative expressions split up over multiple statements.
61 */
62 
63 /* For ease of terminology, "expression node" in the below refers to
64    every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
65    represent the actual statement containing the expressions we care about,
66    and we cache the value number by putting it in the expression.  */
67 
68 /* Basic algorithm
69 
70    First we walk the statements to generate the AVAIL sets, the
71    EXP_GEN sets, and the tmp_gen sets.  EXP_GEN sets represent the
72    generation of values/expressions by a given block.  We use them
73    when computing the ANTIC sets.  The AVAIL sets consist of
74    SSA_NAME's that represent values, so we know what values are
75    available in what blocks.  AVAIL is a forward dataflow problem.  In
76    SSA, values are never killed, so we don't need a kill set, or a
77    fixpoint iteration, in order to calculate the AVAIL sets.  In
78    traditional parlance, AVAIL sets tell us the downsafety of the
79    expressions/values.
80 
81    Next, we generate the ANTIC sets.  These sets represent the
82    anticipatable expressions.  ANTIC is a backwards dataflow
83    problem.  An expression is anticipatable in a given block if it could
84    be generated in that block.  This means that if we had to perform
85    an insertion in that block, of the value of that expression, we
86    could.  Calculating the ANTIC sets requires phi translation of
87    expressions, because the flow goes backwards through phis.  We must
88    iterate to a fixpoint of the ANTIC sets, because we have a kill
89    set.  Even in SSA form, values are not live over the entire
90    function, only from their definition point onwards.  So we have to
91    remove values from the ANTIC set once we go past the definition
92    point of the leaders that make them up.
93    compute_antic/compute_antic_aux performs this computation.
94 
95    Third, we perform insertions to make partially redundant
96    expressions fully redundant.
97 
98    An expression is partially redundant (excluding partial
99    anticipation) if:
100 
101    1. It is AVAIL in some, but not all, of the predecessors of a
102       given block.
103    2. It is ANTIC in all the predecessors.
104 
105    In order to make it fully redundant, we insert the expression into
106    the predecessors where it is not available, but is ANTIC.
107 
108    For the partial anticipation case, we only perform insertion if it
109    is partially anticipated in some block, and fully available in all
110    of the predecessors.
111 
112    insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
113    performs these steps.
114 
115    Fourth, we eliminate fully redundant expressions.
116    This is a simple statement walk that replaces redundant
117    calculations with the now available values.  */
118 
119 /* Representations of value numbers:
120 
121    Value numbers are represented by a representative SSA_NAME.  We
122    will create fake SSA_NAME's in situations where we need a
123    representative but do not have one (because it is a complex
124    expression).  In order to facilitate storing the value numbers in
125    bitmaps, and keep the number of wasted SSA_NAME's down, we also
126    associate a value_id with each value number, and create full blown
127    ssa_name's only where we actually need them (IE in operands of
128    existing expressions).
129 
130    Theoretically you could replace all the value_id's with
131    SSA_NAME_VERSION, but this would allocate a large number of
132    SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
133    It would also require an additional indirection at each point we
134    use the value id.  */
135 
136 /* Representation of expressions on value numbers:
137 
138    Expressions consisting of value numbers are represented the same
139    way as our VN internally represents them, with an additional
140    "pre_expr" wrapping around them in order to facilitate storing all
141    of the expressions in the same sets.  */
142 
143 /* Representation of sets:
144 
145    The dataflow sets do not need to be sorted in any particular order
146    for the majority of their lifetime, are simply represented as two
147    bitmaps, one that keeps track of values present in the set, and one
148    that keeps track of expressions present in the set.
149 
150    When we need them in topological order, we produce it on demand by
151    transforming the bitmap into an array and sorting it into topo
152    order.  */
153 
154 /* Type of expression, used to know which member of the PRE_EXPR union
155    is valid.  */
156 
157 enum pre_expr_kind
158 {
159     NAME,
160     NARY,
161     REFERENCE,
162     CONSTANT
163 };
164 
165 typedef union pre_expr_union_d
166 {
167   tree name;
168   tree constant;
169   vn_nary_op_t nary;
170   vn_reference_t reference;
171 } pre_expr_union;
172 
173 typedef struct pre_expr_d
174 {
175   enum pre_expr_kind kind;
176   unsigned int id;
177   pre_expr_union u;
178 } *pre_expr;
179 
180 #define PRE_EXPR_NAME(e) (e)->u.name
181 #define PRE_EXPR_NARY(e) (e)->u.nary
182 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
183 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
184 
185 static int
186 pre_expr_eq (const void *p1, const void *p2)
187 {
188   const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
189   const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
190 
191   if (e1->kind != e2->kind)
192     return false;
193 
194   switch (e1->kind)
195     {
196     case CONSTANT:
197       return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
198 				       PRE_EXPR_CONSTANT (e2));
199     case NAME:
200       return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
201     case NARY:
202       return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203     case REFERENCE:
204       return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
205 			      PRE_EXPR_REFERENCE (e2));
206     default:
207       gcc_unreachable ();
208     }
209 }
210 
211 static hashval_t
212 pre_expr_hash (const void *p1)
213 {
214   const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
215   switch (e->kind)
216     {
217     case CONSTANT:
218       return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219     case NAME:
220       return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221     case NARY:
222       return PRE_EXPR_NARY (e)->hashcode;
223     case REFERENCE:
224       return PRE_EXPR_REFERENCE (e)->hashcode;
225     default:
226       gcc_unreachable ();
227     }
228 }
229 
230 
231 /* Next global expression id number.  */
232 static unsigned int next_expression_id;
233 
234 /* Mapping from expression to id number we can use in bitmap sets.  */
235 DEF_VEC_P (pre_expr);
236 DEF_VEC_ALLOC_P (pre_expr, heap);
237 static VEC(pre_expr, heap) *expressions;
238 static htab_t expression_to_id;
239 static VEC(unsigned, heap) *name_to_id;
240 
241 /* Allocate an expression id for EXPR.  */
242 
243 static inline unsigned int
244 alloc_expression_id (pre_expr expr)
245 {
246   void **slot;
247   /* Make sure we won't overflow. */
248   gcc_assert (next_expression_id + 1 > next_expression_id);
249   expr->id = next_expression_id++;
250   VEC_safe_push (pre_expr, heap, expressions, expr);
251   if (expr->kind == NAME)
252     {
253       unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
254       /* VEC_safe_grow_cleared allocates no headroom.  Avoid frequent
255 	 re-allocations by using VEC_reserve upfront.  There is no
256 	 VEC_quick_grow_cleared unfortunately.  */
257       VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
258       VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259       gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260       VEC_replace (unsigned, name_to_id, version, expr->id);
261     }
262   else
263     {
264       slot = htab_find_slot (expression_to_id, expr, INSERT);
265       gcc_assert (!*slot);
266       *slot = expr;
267     }
268   return next_expression_id - 1;
269 }
270 
271 /* Return the expression id for tree EXPR.  */
272 
273 static inline unsigned int
274 get_expression_id (const pre_expr expr)
275 {
276   return expr->id;
277 }
278 
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr)
281 {
282   void **slot;
283 
284   if (expr->kind == NAME)
285     {
286       unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
287       if (VEC_length (unsigned, name_to_id) <= version)
288 	return 0;
289       return VEC_index (unsigned, name_to_id, version);
290     }
291   else
292     {
293       slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
294       if (!slot)
295 	return 0;
296       return ((pre_expr)*slot)->id;
297     }
298 }
299 
300 /* Return the existing expression id for EXPR, or create one if one
301    does not exist yet.  */
302 
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr)
305 {
306   unsigned int id = lookup_expression_id (expr);
307   if (id == 0)
308     return alloc_expression_id (expr);
309   return expr->id = id;
310 }
311 
312 /* Return the expression that has expression id ID */
313 
314 static inline pre_expr
315 expression_for_id (unsigned int id)
316 {
317   return VEC_index (pre_expr, expressions, id);
318 }
319 
320 /* Free the expression id field in all of our expressions,
321    and then destroy the expressions array.  */
322 
323 static void
324 clear_expression_ids (void)
325 {
326   VEC_free (pre_expr, heap, expressions);
327 }
328 
329 static alloc_pool pre_expr_pool;
330 
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it.  */
332 
333 static pre_expr
334 get_or_alloc_expr_for_name (tree name)
335 {
336   struct pre_expr_d expr;
337   pre_expr result;
338   unsigned int result_id;
339 
340   expr.kind = NAME;
341   expr.id = 0;
342   PRE_EXPR_NAME (&expr) = name;
343   result_id = lookup_expression_id (&expr);
344   if (result_id != 0)
345     return expression_for_id (result_id);
346 
347   result = (pre_expr) pool_alloc (pre_expr_pool);
348   result->kind = NAME;
349   PRE_EXPR_NAME (result) = name;
350   alloc_expression_id (result);
351   return result;
352 }
353 
354 static bool in_fre = false;
355 
356 /* An unordered bitmap set.  One bitmap tracks values, the other,
357    expressions.  */
358 typedef struct bitmap_set
359 {
360   bitmap expressions;
361   bitmap values;
362 } *bitmap_set_t;
363 
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi)		\
365   EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
366 
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi)		\
368   EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
369 
370 /* Mapping from value id to expressions with that value_id.  */
371 DEF_VEC_P (bitmap_set_t);
372 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
373 static VEC(bitmap_set_t, heap) *value_expressions;
374 
375 /* Sets that we need to keep track of.  */
376 typedef struct bb_bitmap_sets
377 {
378   /* The EXP_GEN set, which represents expressions/values generated in
379      a basic block.  */
380   bitmap_set_t exp_gen;
381 
382   /* The PHI_GEN set, which represents PHI results generated in a
383      basic block.  */
384   bitmap_set_t phi_gen;
385 
386   /* The TMP_GEN set, which represents results/temporaries generated
387      in a basic block. IE the LHS of an expression.  */
388   bitmap_set_t tmp_gen;
389 
390   /* The AVAIL_OUT set, which represents which values are available in
391      a given basic block.  */
392   bitmap_set_t avail_out;
393 
394   /* The ANTIC_IN set, which represents which values are anticipatable
395      in a given basic block.  */
396   bitmap_set_t antic_in;
397 
398   /* The PA_IN set, which represents which values are
399      partially anticipatable in a given basic block.  */
400   bitmap_set_t pa_in;
401 
402   /* The NEW_SETS set, which is used during insertion to augment the
403      AVAIL_OUT set of blocks with the new insertions performed during
404      the current iteration.  */
405   bitmap_set_t new_sets;
406 
407   /* A cache for value_dies_in_block_x.  */
408   bitmap expr_dies;
409 
410   /* True if we have visited this block during ANTIC calculation.  */
411   unsigned int visited : 1;
412 
413   /* True we have deferred processing this block during ANTIC
414      calculation until its successor is processed.  */
415   unsigned int deferred : 1;
416 
417   /* True when the block contains a call that might not return.  */
418   unsigned int contains_may_not_return_call : 1;
419 } *bb_value_sets_t;
420 
421 #define EXP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->exp_gen
422 #define PHI_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->phi_gen
423 #define TMP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->tmp_gen
424 #define AVAIL_OUT(BB)	((bb_value_sets_t) ((BB)->aux))->avail_out
425 #define ANTIC_IN(BB)	((bb_value_sets_t) ((BB)->aux))->antic_in
426 #define PA_IN(BB)	((bb_value_sets_t) ((BB)->aux))->pa_in
427 #define NEW_SETS(BB)	((bb_value_sets_t) ((BB)->aux))->new_sets
428 #define EXPR_DIES(BB)	((bb_value_sets_t) ((BB)->aux))->expr_dies
429 #define BB_VISITED(BB)	((bb_value_sets_t) ((BB)->aux))->visited
430 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
432 
433 
434 /* Basic block list in postorder.  */
435 static int *postorder;
436 
437 /* This structure is used to keep track of statistics on what
438    optimization PRE was able to perform.  */
439 static struct
440 {
441   /* The number of RHS computations eliminated by PRE.  */
442   int eliminations;
443 
444   /* The number of new expressions/temporaries generated by PRE.  */
445   int insertions;
446 
447   /* The number of inserts found due to partial anticipation  */
448   int pa_insert;
449 
450   /* The number of new PHI nodes added by PRE.  */
451   int phis;
452 
453   /* The number of values found constant.  */
454   int constified;
455 
456 } pre_stats;
457 
458 static bool do_partial_partial;
459 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
460 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
461 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
462 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
463 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
464 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
465 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 				      unsigned int, bool);
467 static bitmap_set_t bitmap_set_new (void);
468 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469 					 gimple, tree);
470 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471 					 gimple);
472 static unsigned int get_expr_value_id (pre_expr);
473 
474 /* We can add and remove elements and entries to and from sets
475    and hash tables, so we use alloc pools for them.  */
476 
477 static alloc_pool bitmap_set_pool;
478 static bitmap_obstack grand_bitmap_obstack;
479 
480 /* To avoid adding 300 temporary variables when we only need one, we
481    only create one temporary variable, on demand, and build ssa names
482    off that.  We do have to change the variable if the types don't
483    match the current variable's type.  */
484 static tree pretemp;
485 static tree storetemp;
486 static tree prephitemp;
487 
488 /* Set of blocks with statements that have had its EH information
489    cleaned up.  */
490 static bitmap need_eh_cleanup;
491 
492 /* The phi_translate_table caches phi translations for a given
493    expression and predecessor.  */
494 
495 static htab_t phi_translate_table;
496 
497 /* A three tuple {e, pred, v} used to cache phi translations in the
498    phi_translate_table.  */
499 
500 typedef struct expr_pred_trans_d
501 {
502   /* The expression.  */
503   pre_expr e;
504 
505   /* The predecessor block along which we translated the expression.  */
506   basic_block pred;
507 
508   /* The value that resulted from the translation.  */
509   pre_expr v;
510 
511   /* The hashcode for the expression, pred pair. This is cached for
512      speed reasons.  */
513   hashval_t hashcode;
514 } *expr_pred_trans_t;
515 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
516 
517 /* Return the hash value for a phi translation table entry.  */
518 
519 static hashval_t
520 expr_pred_trans_hash (const void *p)
521 {
522   const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
523   return ve->hashcode;
524 }
525 
526 /* Return true if two phi translation table entries are the same.
527    P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
528 
529 static int
530 expr_pred_trans_eq (const void *p1, const void *p2)
531 {
532   const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
533   const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
534   basic_block b1 = ve1->pred;
535   basic_block b2 = ve2->pred;
536 
537   /* If they are not translations for the same basic block, they can't
538      be equal.  */
539   if (b1 != b2)
540     return false;
541   return pre_expr_eq (ve1->e, ve2->e);
542 }
543 
544 /* Search in the phi translation table for the translation of
545    expression E in basic block PRED.
546    Return the translated value, if found, NULL otherwise.  */
547 
548 static inline pre_expr
549 phi_trans_lookup (pre_expr e, basic_block pred)
550 {
551   void **slot;
552   struct expr_pred_trans_d ept;
553 
554   ept.e = e;
555   ept.pred = pred;
556   ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
557   slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
558 				   NO_INSERT);
559   if (!slot)
560     return NULL;
561   else
562     return ((expr_pred_trans_t) *slot)->v;
563 }
564 
565 
566 /* Add the tuple mapping from {expression E, basic block PRED} to
567    value V, to the phi translation table.  */
568 
569 static inline void
570 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
571 {
572   void **slot;
573   expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
574   new_pair->e = e;
575   new_pair->pred = pred;
576   new_pair->v = v;
577   new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
578 						 pred->index);
579 
580   slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
581 				   new_pair->hashcode, INSERT);
582   if (*slot)
583     free (*slot);
584   *slot = (void *) new_pair;
585 }
586 
587 
588 /* Add expression E to the expression set of value id V.  */
589 
590 void
591 add_to_value (unsigned int v, pre_expr e)
592 {
593   bitmap_set_t set;
594 
595   gcc_assert (get_expr_value_id (e) == v);
596 
597   if (v >= VEC_length (bitmap_set_t, value_expressions))
598     {
599       VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
600 			     v + 1);
601     }
602 
603   set = VEC_index (bitmap_set_t, value_expressions, v);
604   if (!set)
605     {
606       set = bitmap_set_new ();
607       VEC_replace (bitmap_set_t, value_expressions, v, set);
608     }
609 
610   bitmap_insert_into_set_1 (set, e, v, true);
611 }
612 
613 /* Create a new bitmap set and return it.  */
614 
615 static bitmap_set_t
616 bitmap_set_new (void)
617 {
618   bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
619   ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
620   ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
621   return ret;
622 }
623 
624 /* Return the value id for a PRE expression EXPR.  */
625 
626 static unsigned int
627 get_expr_value_id (pre_expr expr)
628 {
629   switch (expr->kind)
630     {
631     case CONSTANT:
632       {
633 	unsigned int id;
634 	id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
635 	if (id == 0)
636 	  {
637 	    id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
638 	    add_to_value (id, expr);
639 	  }
640 	return id;
641       }
642     case NAME:
643       return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
644     case NARY:
645       return PRE_EXPR_NARY (expr)->value_id;
646     case REFERENCE:
647       return PRE_EXPR_REFERENCE (expr)->value_id;
648     default:
649       gcc_unreachable ();
650     }
651 }
652 
653 /* Remove an expression EXPR from a bitmapped set.  */
654 
655 static void
656 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
657 {
658   unsigned int val  = get_expr_value_id (expr);
659   if (!value_id_constant_p (val))
660     {
661       bitmap_clear_bit (set->values, val);
662       bitmap_clear_bit (set->expressions, get_expression_id (expr));
663     }
664 }
665 
666 static void
667 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
668 			  unsigned int val, bool allow_constants)
669 {
670   if (allow_constants || !value_id_constant_p (val))
671     {
672       /* We specifically expect this and only this function to be able to
673 	 insert constants into a set.  */
674       bitmap_set_bit (set->values, val);
675       bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
676     }
677 }
678 
679 /* Insert an expression EXPR into a bitmapped set.  */
680 
681 static void
682 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
683 {
684   bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
685 }
686 
687 /* Copy a bitmapped set ORIG, into bitmapped set DEST.  */
688 
689 static void
690 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
691 {
692   bitmap_copy (dest->expressions, orig->expressions);
693   bitmap_copy (dest->values, orig->values);
694 }
695 
696 
697 /* Free memory used up by SET.  */
698 static void
699 bitmap_set_free (bitmap_set_t set)
700 {
701   BITMAP_FREE (set->expressions);
702   BITMAP_FREE (set->values);
703 }
704 
705 
706 /* Generate an topological-ordered array of bitmap set SET.  */
707 
708 static VEC(pre_expr, heap) *
709 sorted_array_from_bitmap_set (bitmap_set_t set)
710 {
711   unsigned int i, j;
712   bitmap_iterator bi, bj;
713   VEC(pre_expr, heap) *result;
714 
715   /* Pre-allocate roughly enough space for the array.  */
716   result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
717 
718   FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
719     {
720       /* The number of expressions having a given value is usually
721 	 relatively small.  Thus, rather than making a vector of all
722 	 the expressions and sorting it by value-id, we walk the values
723 	 and check in the reverse mapping that tells us what expressions
724 	 have a given value, to filter those in our set.  As a result,
725 	 the expressions are inserted in value-id order, which means
726 	 topological order.
727 
728 	 If this is somehow a significant lose for some cases, we can
729 	 choose which set to walk based on the set size.  */
730       bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
731       FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
732 	{
733 	  if (bitmap_bit_p (set->expressions, j))
734 	    VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
735         }
736     }
737 
738   return result;
739 }
740 
741 /* Perform bitmapped set operation DEST &= ORIG.  */
742 
743 static void
744 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
745 {
746   bitmap_iterator bi;
747   unsigned int i;
748 
749   if (dest != orig)
750     {
751       bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
752 
753       bitmap_and_into (dest->values, orig->values);
754       bitmap_copy (temp, dest->expressions);
755       EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
756 	{
757 	  pre_expr expr = expression_for_id (i);
758 	  unsigned int value_id = get_expr_value_id (expr);
759 	  if (!bitmap_bit_p (dest->values, value_id))
760 	    bitmap_clear_bit (dest->expressions, i);
761 	}
762       BITMAP_FREE (temp);
763     }
764 }
765 
766 /* Subtract all values and expressions contained in ORIG from DEST.  */
767 
768 static bitmap_set_t
769 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
770 {
771   bitmap_set_t result = bitmap_set_new ();
772   bitmap_iterator bi;
773   unsigned int i;
774 
775   bitmap_and_compl (result->expressions, dest->expressions,
776 		    orig->expressions);
777 
778   FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
779     {
780       pre_expr expr = expression_for_id (i);
781       unsigned int value_id = get_expr_value_id (expr);
782       bitmap_set_bit (result->values, value_id);
783     }
784 
785   return result;
786 }
787 
788 /* Subtract all the values in bitmap set B from bitmap set A.  */
789 
790 static void
791 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
792 {
793   unsigned int i;
794   bitmap_iterator bi;
795   bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
796 
797   bitmap_copy (temp, a->expressions);
798   EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
799     {
800       pre_expr expr = expression_for_id (i);
801       if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
802 	bitmap_remove_from_set (a, expr);
803     }
804   BITMAP_FREE (temp);
805 }
806 
807 
808 /* Return true if bitmapped set SET contains the value VALUE_ID.  */
809 
810 static bool
811 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
812 {
813   if (value_id_constant_p (value_id))
814     return true;
815 
816   if (!set || bitmap_empty_p (set->expressions))
817     return false;
818 
819   return bitmap_bit_p (set->values, value_id);
820 }
821 
822 static inline bool
823 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
824 {
825   return bitmap_bit_p (set->expressions, get_expression_id (expr));
826 }
827 
828 /* Replace an instance of value LOOKFOR with expression EXPR in SET.  */
829 
830 static void
831 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
832 			  const pre_expr expr)
833 {
834   bitmap_set_t exprset;
835   unsigned int i;
836   bitmap_iterator bi;
837 
838   if (value_id_constant_p (lookfor))
839     return;
840 
841   if (!bitmap_set_contains_value (set, lookfor))
842     return;
843 
844   /* The number of expressions having a given value is usually
845      significantly less than the total number of expressions in SET.
846      Thus, rather than check, for each expression in SET, whether it
847      has the value LOOKFOR, we walk the reverse mapping that tells us
848      what expressions have a given value, and see if any of those
849      expressions are in our set.  For large testcases, this is about
850      5-10x faster than walking the bitmap.  If this is somehow a
851      significant lose for some cases, we can choose which set to walk
852      based on the set size.  */
853   exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
854   FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
855     {
856       if (bitmap_bit_p (set->expressions, i))
857 	{
858 	  bitmap_clear_bit (set->expressions, i);
859 	  bitmap_set_bit (set->expressions, get_expression_id (expr));
860 	  return;
861 	}
862     }
863 }
864 
865 /* Return true if two bitmap sets are equal.  */
866 
867 static bool
868 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
869 {
870   return bitmap_equal_p (a->values, b->values);
871 }
872 
873 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
874    and add it otherwise.  */
875 
876 static void
877 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
878 {
879   unsigned int val = get_expr_value_id (expr);
880 
881   if (bitmap_set_contains_value (set, val))
882     bitmap_set_replace_value (set, val, expr);
883   else
884     bitmap_insert_into_set (set, expr);
885 }
886 
887 /* Insert EXPR into SET if EXPR's value is not already present in
888    SET.  */
889 
890 static void
891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
892 {
893   unsigned int val = get_expr_value_id (expr);
894 
895 #ifdef ENABLE_CHECKING
896   gcc_assert (expr->id == get_or_alloc_expression_id (expr));
897 #endif
898 
899   /* Constant values are always considered to be part of the set.  */
900   if (value_id_constant_p (val))
901     return;
902 
903   /* If the value membership changed, add the expression.  */
904   if (bitmap_set_bit (set->values, val))
905     bitmap_set_bit (set->expressions, expr->id);
906 }
907 
908 /* Print out EXPR to outfile.  */
909 
910 static void
911 print_pre_expr (FILE *outfile, const pre_expr expr)
912 {
913   switch (expr->kind)
914     {
915     case CONSTANT:
916       print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
917       break;
918     case NAME:
919       print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
920       break;
921     case NARY:
922       {
923 	unsigned int i;
924 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
925 	fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
926 	for (i = 0; i < nary->length; i++)
927 	  {
928 	    print_generic_expr (outfile, nary->op[i], 0);
929 	    if (i != (unsigned) nary->length - 1)
930 	      fprintf (outfile, ",");
931 	  }
932 	fprintf (outfile, "}");
933       }
934       break;
935 
936     case REFERENCE:
937       {
938 	vn_reference_op_t vro;
939 	unsigned int i;
940 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
941 	fprintf (outfile, "{");
942 	for (i = 0;
943 	     VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
944 	     i++)
945 	  {
946 	    bool closebrace = false;
947 	    if (vro->opcode != SSA_NAME
948 		&& TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
949 	      {
950 		fprintf (outfile, "%s", tree_code_name [vro->opcode]);
951 		if (vro->op0)
952 		  {
953 		    fprintf (outfile, "<");
954 		    closebrace = true;
955 		  }
956 	      }
957 	    if (vro->op0)
958 	      {
959 		print_generic_expr (outfile, vro->op0, 0);
960 		if (vro->op1)
961 		  {
962 		    fprintf (outfile, ",");
963 		    print_generic_expr (outfile, vro->op1, 0);
964 		  }
965 		if (vro->op2)
966 		  {
967 		    fprintf (outfile, ",");
968 		    print_generic_expr (outfile, vro->op2, 0);
969 		  }
970 	      }
971 	    if (closebrace)
972 		fprintf (outfile, ">");
973 	    if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
974 	      fprintf (outfile, ",");
975 	  }
976 	fprintf (outfile, "}");
977 	if (ref->vuse)
978 	  {
979 	    fprintf (outfile, "@");
980 	    print_generic_expr (outfile, ref->vuse, 0);
981 	  }
982       }
983       break;
984     }
985 }
986 void debug_pre_expr (pre_expr);
987 
988 /* Like print_pre_expr but always prints to stderr.  */
989 void
990 debug_pre_expr (pre_expr e)
991 {
992   print_pre_expr (stderr, e);
993   fprintf (stderr, "\n");
994 }
995 
996 /* Print out SET to OUTFILE.  */
997 
998 static void
999 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1000 		  const char *setname, int blockindex)
1001 {
1002   fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1003   if (set)
1004     {
1005       bool first = true;
1006       unsigned i;
1007       bitmap_iterator bi;
1008 
1009       FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1010 	{
1011 	  const pre_expr expr = expression_for_id (i);
1012 
1013 	  if (!first)
1014 	    fprintf (outfile, ", ");
1015 	  first = false;
1016 	  print_pre_expr (outfile, expr);
1017 
1018 	  fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1019 	}
1020     }
1021   fprintf (outfile, " }\n");
1022 }
1023 
1024 void debug_bitmap_set (bitmap_set_t);
1025 
1026 void
1027 debug_bitmap_set (bitmap_set_t set)
1028 {
1029   print_bitmap_set (stderr, set, "debug", 0);
1030 }
1031 
1032 /* Print out the expressions that have VAL to OUTFILE.  */
1033 
1034 void
1035 print_value_expressions (FILE *outfile, unsigned int val)
1036 {
1037   bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1038   if (set)
1039     {
1040       char s[10];
1041       sprintf (s, "%04d", val);
1042       print_bitmap_set (outfile, set, s, 0);
1043     }
1044 }
1045 
1046 
1047 void
1048 debug_value_expressions (unsigned int val)
1049 {
1050   print_value_expressions (stderr, val);
1051 }
1052 
1053 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1054    represent it.  */
1055 
1056 static pre_expr
1057 get_or_alloc_expr_for_constant (tree constant)
1058 {
1059   unsigned int result_id;
1060   unsigned int value_id;
1061   struct pre_expr_d expr;
1062   pre_expr newexpr;
1063 
1064   expr.kind = CONSTANT;
1065   PRE_EXPR_CONSTANT (&expr) = constant;
1066   result_id = lookup_expression_id (&expr);
1067   if (result_id != 0)
1068     return expression_for_id (result_id);
1069 
1070   newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1071   newexpr->kind = CONSTANT;
1072   PRE_EXPR_CONSTANT (newexpr) = constant;
1073   alloc_expression_id (newexpr);
1074   value_id = get_or_alloc_constant_value_id (constant);
1075   add_to_value (value_id, newexpr);
1076   return newexpr;
1077 }
1078 
1079 /* Given a value id V, find the actual tree representing the constant
1080    value if there is one, and return it. Return NULL if we can't find
1081    a constant.  */
1082 
1083 static tree
1084 get_constant_for_value_id (unsigned int v)
1085 {
1086   if (value_id_constant_p (v))
1087     {
1088       unsigned int i;
1089       bitmap_iterator bi;
1090       bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1091 
1092       FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1093 	{
1094 	  pre_expr expr = expression_for_id (i);
1095 	  if (expr->kind == CONSTANT)
1096 	    return PRE_EXPR_CONSTANT (expr);
1097 	}
1098     }
1099   return NULL;
1100 }
1101 
1102 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1103    Currently only supports constants and SSA_NAMES.  */
1104 static pre_expr
1105 get_or_alloc_expr_for (tree t)
1106 {
1107   if (TREE_CODE (t) == SSA_NAME)
1108     return get_or_alloc_expr_for_name (t);
1109   else if (is_gimple_min_invariant (t))
1110     return get_or_alloc_expr_for_constant (t);
1111   else
1112     {
1113       /* More complex expressions can result from SCCVN expression
1114 	 simplification that inserts values for them.  As they all
1115 	 do not have VOPs the get handled by the nary ops struct.  */
1116       vn_nary_op_t result;
1117       unsigned int result_id;
1118       vn_nary_op_lookup (t, &result);
1119       if (result != NULL)
1120 	{
1121 	  pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1122 	  e->kind = NARY;
1123 	  PRE_EXPR_NARY (e) = result;
1124 	  result_id = lookup_expression_id (e);
1125 	  if (result_id != 0)
1126 	    {
1127 	      pool_free (pre_expr_pool, e);
1128 	      e = expression_for_id (result_id);
1129 	      return e;
1130 	    }
1131 	  alloc_expression_id (e);
1132 	  return e;
1133 	}
1134     }
1135   return NULL;
1136 }
1137 
1138 /* Return the folded version of T if T, when folded, is a gimple
1139    min_invariant.  Otherwise, return T.  */
1140 
1141 static pre_expr
1142 fully_constant_expression (pre_expr e)
1143 {
1144   switch (e->kind)
1145     {
1146     case CONSTANT:
1147       return e;
1148     case NARY:
1149       {
1150 	vn_nary_op_t nary = PRE_EXPR_NARY (e);
1151 	switch (TREE_CODE_CLASS (nary->opcode))
1152 	  {
1153 	  case tcc_expression:
1154 	    if (nary->opcode == TRUTH_NOT_EXPR)
1155 	      goto do_unary;
1156 	    if (nary->opcode != TRUTH_AND_EXPR
1157 		&& nary->opcode != TRUTH_OR_EXPR
1158 		&& nary->opcode != TRUTH_XOR_EXPR)
1159 	      return e;
1160 	    /* Fallthrough.  */
1161 	  case tcc_binary:
1162 	  case tcc_comparison:
1163 	    {
1164 	      /* We have to go from trees to pre exprs to value ids to
1165 		 constants.  */
1166 	      tree naryop0 = nary->op[0];
1167 	      tree naryop1 = nary->op[1];
1168 	      tree result;
1169 	      if (!is_gimple_min_invariant (naryop0))
1170 		{
1171 		  pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1172 		  unsigned int vrep0 = get_expr_value_id (rep0);
1173 		  tree const0 = get_constant_for_value_id (vrep0);
1174 		  if (const0)
1175 		    naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1176 		}
1177 	      if (!is_gimple_min_invariant (naryop1))
1178 		{
1179 		  pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1180 		  unsigned int vrep1 = get_expr_value_id (rep1);
1181 		  tree const1 = get_constant_for_value_id (vrep1);
1182 		  if (const1)
1183 		    naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1184 		}
1185 	      result = fold_binary (nary->opcode, nary->type,
1186 				    naryop0, naryop1);
1187 	      if (result && is_gimple_min_invariant (result))
1188 		return get_or_alloc_expr_for_constant (result);
1189 	      /* We might have simplified the expression to a
1190 		 SSA_NAME for example from x_1 * 1.  But we cannot
1191 		 insert a PHI for x_1 unconditionally as x_1 might
1192 		 not be available readily.  */
1193 	      return e;
1194 	    }
1195 	  case tcc_reference:
1196 	    if (nary->opcode != REALPART_EXPR
1197 		&& nary->opcode != IMAGPART_EXPR
1198 		&& nary->opcode != VIEW_CONVERT_EXPR)
1199 	      return e;
1200 	    /* Fallthrough.  */
1201 	  case tcc_unary:
1202 do_unary:
1203 	    {
1204 	      /* We have to go from trees to pre exprs to value ids to
1205 		 constants.  */
1206 	      tree naryop0 = nary->op[0];
1207 	      tree const0, result;
1208 	      if (is_gimple_min_invariant (naryop0))
1209 		const0 = naryop0;
1210 	      else
1211 		{
1212 		  pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1213 		  unsigned int vrep0 = get_expr_value_id (rep0);
1214 		  const0 = get_constant_for_value_id (vrep0);
1215 		}
1216 	      result = NULL;
1217 	      if (const0)
1218 		{
1219 		  tree type1 = TREE_TYPE (nary->op[0]);
1220 		  const0 = fold_convert (type1, const0);
1221 		  result = fold_unary (nary->opcode, nary->type, const0);
1222 		}
1223 	      if (result && is_gimple_min_invariant (result))
1224 		return get_or_alloc_expr_for_constant (result);
1225 	      return e;
1226 	    }
1227 	  default:
1228 	    return e;
1229 	  }
1230       }
1231     case REFERENCE:
1232       {
1233 	vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1234 	VEC (vn_reference_op_s, heap) *operands = ref->operands;
1235 	vn_reference_op_t op;
1236 
1237 	/* Try to simplify the translated expression if it is
1238 	   a call to a builtin function with at most two arguments.  */
1239 	op = VEC_index (vn_reference_op_s, operands, 0);
1240 	if (op->opcode == CALL_EXPR
1241 	    && TREE_CODE (op->op0) == ADDR_EXPR
1242 	    && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1243 	    && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1244 	    && VEC_length (vn_reference_op_s, operands) >= 2
1245 	    && VEC_length (vn_reference_op_s, operands) <= 3)
1246 	  {
1247 	    vn_reference_op_t arg0, arg1 = NULL;
1248 	    bool anyconst = false;
1249 	    arg0 = VEC_index (vn_reference_op_s, operands, 1);
1250 	    if (VEC_length (vn_reference_op_s, operands) > 2)
1251 	      arg1 = VEC_index (vn_reference_op_s, operands, 2);
1252 	    if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1253 		|| (arg0->opcode == ADDR_EXPR
1254 		    && is_gimple_min_invariant (arg0->op0)))
1255 	      anyconst = true;
1256 	    if (arg1
1257 		&& (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1258 		    || (arg1->opcode == ADDR_EXPR
1259 			&& is_gimple_min_invariant (arg1->op0))))
1260 	      anyconst = true;
1261 	    if (anyconst)
1262 	      {
1263 		tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1264 					       arg1 ? 2 : 1,
1265 					       arg0->op0,
1266 					       arg1 ? arg1->op0 : NULL);
1267 		if (folded
1268 		    && TREE_CODE (folded) == NOP_EXPR)
1269 		  folded = TREE_OPERAND (folded, 0);
1270 		if (folded
1271 		    && is_gimple_min_invariant (folded))
1272 		  return get_or_alloc_expr_for_constant (folded);
1273 	      }
1274 	  }
1275 	  return e;
1276 	}
1277     default:
1278       return e;
1279     }
1280   return e;
1281 }
1282 
1283 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1284    it has the value it would have in BLOCK.  Set *SAME_VALID to true
1285    in case the new vuse doesn't change the value id of the OPERANDS.  */
1286 
1287 static tree
1288 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1289 			      alias_set_type set, tree type, tree vuse,
1290 			      basic_block phiblock,
1291 			      basic_block block, bool *same_valid)
1292 {
1293   gimple phi = SSA_NAME_DEF_STMT (vuse);
1294   ao_ref ref;
1295   edge e = NULL;
1296   bool use_oracle;
1297 
1298   *same_valid = true;
1299 
1300   if (gimple_bb (phi) != phiblock)
1301     return vuse;
1302 
1303   use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1304 
1305   /* Use the alias-oracle to find either the PHI node in this block,
1306      the first VUSE used in this block that is equivalent to vuse or
1307      the first VUSE which definition in this block kills the value.  */
1308   if (gimple_code (phi) == GIMPLE_PHI)
1309     e = find_edge (block, phiblock);
1310   else if (use_oracle)
1311     while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1312       {
1313 	vuse = gimple_vuse (phi);
1314 	phi = SSA_NAME_DEF_STMT (vuse);
1315 	if (gimple_bb (phi) != phiblock)
1316 	  return vuse;
1317 	if (gimple_code (phi) == GIMPLE_PHI)
1318 	  {
1319 	    e = find_edge (block, phiblock);
1320 	    break;
1321 	  }
1322       }
1323   else
1324     return NULL_TREE;
1325 
1326   if (e)
1327     {
1328       if (use_oracle)
1329 	{
1330 	  bitmap visited = NULL;
1331 	  /* Try to find a vuse that dominates this phi node by skipping
1332 	     non-clobbering statements.  */
1333 	  vuse = get_continuation_for_phi (phi, &ref, &visited);
1334 	  if (visited)
1335 	    BITMAP_FREE (visited);
1336 	}
1337       else
1338 	vuse = NULL_TREE;
1339       if (!vuse)
1340 	{
1341 	  /* If we didn't find any, the value ID can't stay the same,
1342 	     but return the translated vuse.  */
1343 	  *same_valid = false;
1344 	  vuse = PHI_ARG_DEF (phi, e->dest_idx);
1345 	}
1346       /* ??? We would like to return vuse here as this is the canonical
1347          upmost vdef that this reference is associated with.  But during
1348 	 insertion of the references into the hash tables we only ever
1349 	 directly insert with their direct gimple_vuse, hence returning
1350 	 something else would make us not find the other expression.  */
1351       return PHI_ARG_DEF (phi, e->dest_idx);
1352     }
1353 
1354   return NULL_TREE;
1355 }
1356 
1357 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1358    SET2.  This is used to avoid making a set consisting of the union
1359    of PA_IN and ANTIC_IN during insert.  */
1360 
1361 static inline pre_expr
1362 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1363 {
1364   pre_expr result;
1365 
1366   result = bitmap_find_leader (set1, val, NULL);
1367   if (!result && set2)
1368     result = bitmap_find_leader (set2, val, NULL);
1369   return result;
1370 }
1371 
1372 /* Get the tree type for our PRE expression e.  */
1373 
1374 static tree
1375 get_expr_type (const pre_expr e)
1376 {
1377   switch (e->kind)
1378     {
1379     case NAME:
1380       return TREE_TYPE (PRE_EXPR_NAME (e));
1381     case CONSTANT:
1382       return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1383     case REFERENCE:
1384       return PRE_EXPR_REFERENCE (e)->type;
1385     case NARY:
1386       return PRE_EXPR_NARY (e)->type;
1387     }
1388   gcc_unreachable();
1389 }
1390 
1391 /* Get a representative SSA_NAME for a given expression.
1392    Since all of our sub-expressions are treated as values, we require
1393    them to be SSA_NAME's for simplicity.
1394    Prior versions of GVNPRE used to use "value handles" here, so that
1395    an expression would be VH.11 + VH.10 instead of d_3 + e_6.  In
1396    either case, the operands are really values (IE we do not expect
1397    them to be usable without finding leaders).  */
1398 
1399 static tree
1400 get_representative_for (const pre_expr e)
1401 {
1402   tree exprtype;
1403   tree name;
1404   unsigned int value_id = get_expr_value_id (e);
1405 
1406   switch (e->kind)
1407     {
1408     case NAME:
1409       return PRE_EXPR_NAME (e);
1410     case CONSTANT:
1411       return PRE_EXPR_CONSTANT (e);
1412     case NARY:
1413     case REFERENCE:
1414       {
1415 	/* Go through all of the expressions representing this value
1416 	   and pick out an SSA_NAME.  */
1417 	unsigned int i;
1418 	bitmap_iterator bi;
1419 	bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1420 					value_id);
1421 	FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1422 	  {
1423 	    pre_expr rep = expression_for_id (i);
1424 	    if (rep->kind == NAME)
1425 	      return PRE_EXPR_NAME (rep);
1426 	  }
1427       }
1428       break;
1429     }
1430   /* If we reached here we couldn't find an SSA_NAME.  This can
1431      happen when we've discovered a value that has never appeared in
1432      the program as set to an SSA_NAME, most likely as the result of
1433      phi translation.  */
1434   if (dump_file)
1435     {
1436       fprintf (dump_file,
1437 	       "Could not find SSA_NAME representative for expression:");
1438       print_pre_expr (dump_file, e);
1439       fprintf (dump_file, "\n");
1440     }
1441 
1442   exprtype = get_expr_type (e);
1443 
1444   /* Build and insert the assignment of the end result to the temporary
1445      that we will return.  */
1446   if (!pretemp || exprtype != TREE_TYPE (pretemp))
1447     {
1448       pretemp = create_tmp_var (exprtype, "pretmp");
1449       get_var_ann (pretemp);
1450     }
1451 
1452   name = make_ssa_name (pretemp, gimple_build_nop ());
1453   VN_INFO_GET (name)->value_id = value_id;
1454   if (e->kind == CONSTANT)
1455     VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1456   else
1457     VN_INFO (name)->valnum = name;
1458 
1459   add_to_value (value_id, get_or_alloc_expr_for_name (name));
1460   if (dump_file)
1461     {
1462       fprintf (dump_file, "Created SSA_NAME representative ");
1463       print_generic_expr (dump_file, name, 0);
1464       fprintf (dump_file, " for expression:");
1465       print_pre_expr (dump_file, e);
1466       fprintf (dump_file, "\n");
1467     }
1468 
1469   return name;
1470 }
1471 
1472 
1473 
1474 static pre_expr
1475 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1476 	       basic_block pred, basic_block phiblock);
1477 
1478 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1479    the phis in PRED.  Return NULL if we can't find a leader for each part
1480    of the translated expression.  */
1481 
1482 static pre_expr
1483 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1484 		 basic_block pred, basic_block phiblock)
1485 {
1486   switch (expr->kind)
1487     {
1488     case NARY:
1489       {
1490 	unsigned int i;
1491 	bool changed = false;
1492 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1493 	struct vn_nary_op_s newnary;
1494 	/* The NARY structure is only guaranteed to have been
1495 	   allocated to the nary->length operands.  */
1496 	memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1497 				 - sizeof (tree) * (4 - nary->length)));
1498 
1499 	for (i = 0; i < newnary.length; i++)
1500 	  {
1501 	    if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1502 	      continue;
1503 	    else
1504 	      {
1505                 pre_expr leader, result;
1506 		unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1507 		leader = find_leader_in_sets (op_val_id, set1, set2);
1508                 result = phi_translate (leader, set1, set2, pred, phiblock);
1509 		if (result && result != leader)
1510 		  {
1511 		    tree name = get_representative_for (result);
1512 		    if (!name)
1513 		      return NULL;
1514 		    newnary.op[i] = name;
1515 		  }
1516 		else if (!result)
1517 		  return NULL;
1518 
1519 		changed |= newnary.op[i] != nary->op[i];
1520 	      }
1521 	  }
1522 	if (changed)
1523 	  {
1524 	    pre_expr constant;
1525 	    unsigned int new_val_id;
1526 
1527 	    tree result = vn_nary_op_lookup_pieces (newnary.length,
1528 						    newnary.opcode,
1529 						    newnary.type,
1530 						    newnary.op[0],
1531 						    newnary.op[1],
1532 						    newnary.op[2],
1533 						    newnary.op[3],
1534 						    &nary);
1535 	    if (result && is_gimple_min_invariant (result))
1536 	      return get_or_alloc_expr_for_constant (result);
1537 
1538 	    expr = (pre_expr) pool_alloc (pre_expr_pool);
1539 	    expr->kind = NARY;
1540 	    expr->id = 0;
1541 	    if (nary)
1542 	      {
1543 		PRE_EXPR_NARY (expr) = nary;
1544 		constant = fully_constant_expression (expr);
1545 		if (constant != expr)
1546 		  return constant;
1547 
1548 		new_val_id = nary->value_id;
1549 		get_or_alloc_expression_id (expr);
1550 	      }
1551 	    else
1552 	      {
1553 		new_val_id = get_next_value_id ();
1554 		VEC_safe_grow_cleared (bitmap_set_t, heap,
1555 				       value_expressions,
1556 				       get_max_value_id() + 1);
1557 		nary = vn_nary_op_insert_pieces (newnary.length,
1558 						 newnary.opcode,
1559 						 newnary.type,
1560 						 newnary.op[0],
1561 						 newnary.op[1],
1562 						 newnary.op[2],
1563 						 newnary.op[3],
1564 						 result, new_val_id);
1565 		PRE_EXPR_NARY (expr) = nary;
1566 		constant = fully_constant_expression (expr);
1567 		if (constant != expr)
1568 		  return constant;
1569 		get_or_alloc_expression_id (expr);
1570 	      }
1571 	    add_to_value (new_val_id, expr);
1572 	  }
1573 	return expr;
1574       }
1575       break;
1576 
1577     case REFERENCE:
1578       {
1579 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1580 	VEC (vn_reference_op_s, heap) *operands = ref->operands;
1581 	tree vuse = ref->vuse;
1582 	tree newvuse = vuse;
1583 	VEC (vn_reference_op_s, heap) *newoperands = NULL;
1584 	bool changed = false, same_valid = true;
1585 	unsigned int i, j;
1586 	vn_reference_op_t operand;
1587 	vn_reference_t newref;
1588 
1589 	for (i = 0, j = 0;
1590 	     VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1591 	  {
1592 	    pre_expr opresult;
1593 	    pre_expr leader;
1594 	    tree oldop0 = operand->op0;
1595 	    tree oldop1 = operand->op1;
1596 	    tree oldop2 = operand->op2;
1597 	    tree op0 = oldop0;
1598 	    tree op1 = oldop1;
1599 	    tree op2 = oldop2;
1600 	    tree type = operand->type;
1601 	    vn_reference_op_s newop = *operand;
1602 
1603 	    if (op0 && TREE_CODE (op0) == SSA_NAME)
1604 	      {
1605 		unsigned int op_val_id = VN_INFO (op0)->value_id;
1606 		leader = find_leader_in_sets (op_val_id, set1, set2);
1607 		opresult = phi_translate (leader, set1, set2, pred, phiblock);
1608 		if (opresult && opresult != leader)
1609 		  {
1610 		    tree name = get_representative_for (opresult);
1611 		    if (!name)
1612 		      break;
1613 		    op0 = name;
1614 		  }
1615 		else if (!opresult)
1616 		  break;
1617 	      }
1618 	    changed |= op0 != oldop0;
1619 
1620 	    if (op1 && TREE_CODE (op1) == SSA_NAME)
1621 	      {
1622 		unsigned int op_val_id = VN_INFO (op1)->value_id;
1623 		leader = find_leader_in_sets (op_val_id, set1, set2);
1624 		opresult = phi_translate (leader, set1, set2, pred, phiblock);
1625 		if (opresult && opresult != leader)
1626 		  {
1627 		    tree name = get_representative_for (opresult);
1628 		    if (!name)
1629 		      break;
1630 		    op1 = name;
1631 		  }
1632 		else if (!opresult)
1633 		  break;
1634 	      }
1635 	    /* We can't possibly insert these.  */
1636 	    else if (op1 && !is_gimple_min_invariant (op1))
1637 	      break;
1638 	    changed |= op1 != oldop1;
1639 	    if (op2 && TREE_CODE (op2) == SSA_NAME)
1640 	      {
1641 		unsigned int op_val_id = VN_INFO (op2)->value_id;
1642 		leader = find_leader_in_sets (op_val_id, set1, set2);
1643 		opresult = phi_translate (leader, set1, set2, pred, phiblock);
1644 		if (opresult && opresult != leader)
1645 		  {
1646 		    tree name = get_representative_for (opresult);
1647 		    if (!name)
1648 		      break;
1649 		    op2 = name;
1650 		  }
1651 		else if (!opresult)
1652 		  break;
1653 	      }
1654 	    /* We can't possibly insert these.  */
1655 	    else if (op2 && !is_gimple_min_invariant (op2))
1656 	      break;
1657 	    changed |= op2 != oldop2;
1658 
1659 	    if (!newoperands)
1660 	      newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1661 	    /* We may have changed from an SSA_NAME to a constant */
1662 	    if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1663 	      newop.opcode = TREE_CODE (op0);
1664 	    newop.type = type;
1665 	    newop.op0 = op0;
1666 	    newop.op1 = op1;
1667 	    newop.op2 = op2;
1668 	    VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1669 	    /* If it transforms from an SSA_NAME to an address, fold with
1670 	       a preceding indirect reference.  */
1671 	    if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1672 		&& VEC_index (vn_reference_op_s,
1673 			      newoperands, j - 1)->opcode == INDIRECT_REF)
1674 	      vn_reference_fold_indirect (&newoperands, &j);
1675 	  }
1676 	if (i != VEC_length (vn_reference_op_s, operands))
1677 	  {
1678 	    if (newoperands)
1679 	      VEC_free (vn_reference_op_s, heap, newoperands);
1680 	    return NULL;
1681 	  }
1682 
1683 	if (vuse)
1684 	  {
1685 	    newvuse = translate_vuse_through_block (newoperands,
1686 						    ref->set, ref->type,
1687 						    vuse, phiblock, pred,
1688 						    &same_valid);
1689 	    if (newvuse == NULL_TREE)
1690 	      {
1691 		VEC_free (vn_reference_op_s, heap, newoperands);
1692 		return NULL;
1693 	      }
1694 	  }
1695 
1696 	if (changed || newvuse != vuse)
1697 	  {
1698 	    unsigned int new_val_id;
1699 	    pre_expr constant;
1700 
1701 	    tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1702 						      ref->type,
1703 						      newoperands,
1704 						      &newref, VN_WALK);
1705 	    if (newref)
1706 	      VEC_free (vn_reference_op_s, heap, newoperands);
1707 
1708 	    if (result && is_gimple_min_invariant (result))
1709 	      {
1710 	        gcc_assert (!newoperands);
1711 	        return get_or_alloc_expr_for_constant (result);
1712 	      }
1713 
1714 	    expr = (pre_expr) pool_alloc (pre_expr_pool);
1715 	    expr->kind = REFERENCE;
1716 	    expr->id = 0;
1717 
1718 	    if (newref)
1719 	      {
1720 		PRE_EXPR_REFERENCE (expr) = newref;
1721 		constant = fully_constant_expression (expr);
1722 		if (constant != expr)
1723 		  return constant;
1724 
1725 		new_val_id = newref->value_id;
1726 		get_or_alloc_expression_id (expr);
1727 	      }
1728 	    else
1729 	      {
1730 		if (changed || !same_valid)
1731 		  {
1732 		    new_val_id = get_next_value_id ();
1733 		    VEC_safe_grow_cleared (bitmap_set_t, heap,
1734 					   value_expressions,
1735 					   get_max_value_id() + 1);
1736 		  }
1737 		else
1738 		  new_val_id = ref->value_id;
1739 		newref = vn_reference_insert_pieces (newvuse, ref->set,
1740 						     ref->type,
1741 						     newoperands,
1742 						     result, new_val_id);
1743 		newoperands = NULL;
1744 		PRE_EXPR_REFERENCE (expr) = newref;
1745 		constant = fully_constant_expression (expr);
1746 		if (constant != expr)
1747 		  return constant;
1748 		get_or_alloc_expression_id (expr);
1749 	      }
1750 	    add_to_value (new_val_id, expr);
1751 	  }
1752 	VEC_free (vn_reference_op_s, heap, newoperands);
1753 	return expr;
1754       }
1755       break;
1756 
1757     case NAME:
1758       {
1759 	gimple phi = NULL;
1760 	edge e;
1761 	gimple def_stmt;
1762 	tree name = PRE_EXPR_NAME (expr);
1763 
1764 	def_stmt = SSA_NAME_DEF_STMT (name);
1765 	if (gimple_code (def_stmt) == GIMPLE_PHI
1766 	    && gimple_bb (def_stmt) == phiblock)
1767 	  phi = def_stmt;
1768 	else
1769 	  return expr;
1770 
1771 	e = find_edge (pred, gimple_bb (phi));
1772 	if (e)
1773 	  {
1774 	    tree def = PHI_ARG_DEF (phi, e->dest_idx);
1775 	    pre_expr newexpr;
1776 
1777 	    if (TREE_CODE (def) == SSA_NAME)
1778 	      def = VN_INFO (def)->valnum;
1779 
1780 	    /* Handle constant. */
1781 	    if (is_gimple_min_invariant (def))
1782 	      return get_or_alloc_expr_for_constant (def);
1783 
1784 	    if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1785 	      return NULL;
1786 
1787 	    newexpr = get_or_alloc_expr_for_name (def);
1788 	    return newexpr;
1789 	  }
1790       }
1791       return expr;
1792 
1793     default:
1794       gcc_unreachable ();
1795     }
1796 }
1797 
1798 /* Wrapper around phi_translate_1 providing caching functionality.  */
1799 
1800 static pre_expr
1801 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1802 	       basic_block pred, basic_block phiblock)
1803 {
1804   pre_expr phitrans;
1805 
1806   if (!expr)
1807     return NULL;
1808 
1809   /* Constants contain no values that need translation.  */
1810   if (expr->kind == CONSTANT)
1811     return expr;
1812 
1813   if (value_id_constant_p (get_expr_value_id (expr)))
1814     return expr;
1815 
1816   if (expr->kind != NAME)
1817     {
1818       phitrans = phi_trans_lookup (expr, pred);
1819       if (phitrans)
1820 	return phitrans;
1821     }
1822 
1823   /* Translate.  */
1824   phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1825 
1826   /* Don't add empty translations to the cache.  Neither add
1827      translations of NAMEs as those are cheap to translate.  */
1828   if (phitrans
1829       && expr->kind != NAME)
1830     phi_trans_add (expr, phitrans, pred);
1831 
1832   return phitrans;
1833 }
1834 
1835 
1836 /* For each expression in SET, translate the values through phi nodes
1837    in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1838    expressions in DEST.  */
1839 
1840 static void
1841 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1842 		   basic_block phiblock)
1843 {
1844   VEC (pre_expr, heap) *exprs;
1845   pre_expr expr;
1846   int i;
1847 
1848   if (gimple_seq_empty_p (phi_nodes (phiblock)))
1849     {
1850       bitmap_set_copy (dest, set);
1851       return;
1852     }
1853 
1854   exprs = sorted_array_from_bitmap_set (set);
1855   for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1856     {
1857       pre_expr translated;
1858       translated = phi_translate (expr, set, NULL, pred, phiblock);
1859       if (!translated)
1860 	continue;
1861 
1862       /* We might end up with multiple expressions from SET being
1863 	 translated to the same value.  In this case we do not want
1864 	 to retain the NARY or REFERENCE expression but prefer a NAME
1865 	 which would be the leader.  */
1866       if (translated->kind == NAME)
1867 	bitmap_value_replace_in_set (dest, translated);
1868       else
1869 	bitmap_value_insert_into_set (dest, translated);
1870     }
1871   VEC_free (pre_expr, heap, exprs);
1872 }
1873 
1874 /* Find the leader for a value (i.e., the name representing that
1875    value) in a given set, and return it.  If STMT is non-NULL it
1876    makes sure the defining statement for the leader dominates it.
1877    Return NULL if no leader is found.  */
1878 
1879 static pre_expr
1880 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1881 {
1882   if (value_id_constant_p (val))
1883     {
1884       unsigned int i;
1885       bitmap_iterator bi;
1886       bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1887 
1888       FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1889 	{
1890 	  pre_expr expr = expression_for_id (i);
1891 	  if (expr->kind == CONSTANT)
1892 	    return expr;
1893 	}
1894     }
1895   if (bitmap_set_contains_value (set, val))
1896     {
1897       /* Rather than walk the entire bitmap of expressions, and see
1898 	 whether any of them has the value we are looking for, we look
1899 	 at the reverse mapping, which tells us the set of expressions
1900 	 that have a given value (IE value->expressions with that
1901 	 value) and see if any of those expressions are in our set.
1902 	 The number of expressions per value is usually significantly
1903 	 less than the number of expressions in the set.  In fact, for
1904 	 large testcases, doing it this way is roughly 5-10x faster
1905 	 than walking the bitmap.
1906 	 If this is somehow a significant lose for some cases, we can
1907 	 choose which set to walk based on which set is smaller.  */
1908       unsigned int i;
1909       bitmap_iterator bi;
1910       bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1911 
1912       EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
1913 				set->expressions, 0, i, bi)
1914 	{
1915 	  pre_expr val = expression_for_id (i);
1916 	  /* At the point where stmt is not null, there should always
1917 	     be an SSA_NAME first in the list of expressions.  */
1918 	  if (stmt)
1919 	    {
1920 	      gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1921 	      if (gimple_code (def_stmt) != GIMPLE_PHI
1922 		  && gimple_bb (def_stmt) == gimple_bb (stmt)
1923 		  && gimple_uid (def_stmt) >= gimple_uid (stmt))
1924 		continue;
1925 	    }
1926 	  return val;
1927 	}
1928     }
1929   return NULL;
1930 }
1931 
1932 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1933    BLOCK by seeing if it is not killed in the block.  Note that we are
1934    only determining whether there is a store that kills it.  Because
1935    of the order in which clean iterates over values, we are guaranteed
1936    that altered operands will have caused us to be eliminated from the
1937    ANTIC_IN set already.  */
1938 
1939 static bool
1940 value_dies_in_block_x (pre_expr expr, basic_block block)
1941 {
1942   tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1943   vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1944   gimple def;
1945   gimple_stmt_iterator gsi;
1946   unsigned id = get_expression_id (expr);
1947   bool res = false;
1948   ao_ref ref;
1949 
1950   if (!vuse)
1951     return false;
1952 
1953   /* Lookup a previously calculated result.  */
1954   if (EXPR_DIES (block)
1955       && bitmap_bit_p (EXPR_DIES (block), id * 2))
1956     return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1957 
1958   /* A memory expression {e, VUSE} dies in the block if there is a
1959      statement that may clobber e.  If, starting statement walk from the
1960      top of the basic block, a statement uses VUSE there can be no kill
1961      inbetween that use and the original statement that loaded {e, VUSE},
1962      so we can stop walking.  */
1963   ref.base = NULL_TREE;
1964   for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1965     {
1966       tree def_vuse, def_vdef;
1967       def = gsi_stmt (gsi);
1968       def_vuse = gimple_vuse (def);
1969       def_vdef = gimple_vdef (def);
1970 
1971       /* Not a memory statement.  */
1972       if (!def_vuse)
1973 	continue;
1974 
1975       /* Not a may-def.  */
1976       if (!def_vdef)
1977 	{
1978 	  /* A load with the same VUSE, we're done.  */
1979 	  if (def_vuse == vuse)
1980 	    break;
1981 
1982 	  continue;
1983 	}
1984 
1985       /* Init ref only if we really need it.  */
1986       if (ref.base == NULL_TREE
1987 	  && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1988 					     refx->operands))
1989 	{
1990 	  res = true;
1991 	  break;
1992 	}
1993       /* If the statement may clobber expr, it dies.  */
1994       if (stmt_may_clobber_ref_p_1 (def, &ref))
1995 	{
1996 	  res = true;
1997 	  break;
1998 	}
1999     }
2000 
2001   /* Remember the result.  */
2002   if (!EXPR_DIES (block))
2003     EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2004   bitmap_set_bit (EXPR_DIES (block), id * 2);
2005   if (res)
2006     bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2007 
2008   return res;
2009 }
2010 
2011 
2012 #define union_contains_value(SET1, SET2, VAL)			\
2013   (bitmap_set_contains_value ((SET1), (VAL))			\
2014    || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2015 
2016 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2017  */
2018 static bool
2019 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2020 		   vn_reference_op_t vro)
2021 {
2022   if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2023     {
2024       struct pre_expr_d temp;
2025       temp.kind = NAME;
2026       temp.id = 0;
2027       PRE_EXPR_NAME (&temp) = vro->op0;
2028       temp.id = lookup_expression_id (&temp);
2029       if (temp.id == 0)
2030 	return false;
2031       if (!union_contains_value (set1, set2,
2032 				 get_expr_value_id (&temp)))
2033 	return false;
2034     }
2035   if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2036     {
2037       struct pre_expr_d temp;
2038       temp.kind = NAME;
2039       temp.id = 0;
2040       PRE_EXPR_NAME (&temp) = vro->op1;
2041       temp.id = lookup_expression_id (&temp);
2042       if (temp.id == 0)
2043 	return false;
2044       if (!union_contains_value (set1, set2,
2045 				 get_expr_value_id (&temp)))
2046 	return false;
2047     }
2048 
2049   if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2050     {
2051       struct pre_expr_d temp;
2052       temp.kind = NAME;
2053       temp.id = 0;
2054       PRE_EXPR_NAME (&temp) = vro->op2;
2055       temp.id = lookup_expression_id (&temp);
2056       if (temp.id == 0)
2057 	return false;
2058       if (!union_contains_value (set1, set2,
2059 				 get_expr_value_id (&temp)))
2060 	return false;
2061     }
2062 
2063   return true;
2064 }
2065 
2066 /* Determine if the expression EXPR is valid in SET1 U SET2.
2067    ONLY SET2 CAN BE NULL.
2068    This means that we have a leader for each part of the expression
2069    (if it consists of values), or the expression is an SSA_NAME.
2070    For loads/calls, we also see if the vuse is killed in this block.  */
2071 
2072 static bool
2073 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2074 	       basic_block block)
2075 {
2076   switch (expr->kind)
2077     {
2078     case NAME:
2079       return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2080     case NARY:
2081       {
2082 	unsigned int i;
2083 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2084 	for (i = 0; i < nary->length; i++)
2085 	  {
2086 	    if (TREE_CODE (nary->op[i]) == SSA_NAME)
2087 	      {
2088 		struct pre_expr_d temp;
2089 		temp.kind = NAME;
2090 		temp.id = 0;
2091 		PRE_EXPR_NAME (&temp) = nary->op[i];
2092 		temp.id = lookup_expression_id (&temp);
2093 		if (temp.id == 0)
2094 		  return false;
2095 		if (!union_contains_value (set1, set2,
2096 					   get_expr_value_id (&temp)))
2097 		  return false;
2098 	      }
2099 	  }
2100 	/* If the NARY may trap make sure the block does not contain
2101 	   a possible exit point.
2102 	   ???  This is overly conservative if we translate AVAIL_OUT
2103 	   as the available expression might be after the exit point.  */
2104 	if (BB_MAY_NOTRETURN (block)
2105 	    && vn_nary_may_trap (nary))
2106 	  return false;
2107 	return true;
2108       }
2109       break;
2110     case REFERENCE:
2111       {
2112 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2113 	vn_reference_op_t vro;
2114 	unsigned int i;
2115 
2116 	for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
2117 	  {
2118 	    if (!vro_valid_in_sets (set1, set2, vro))
2119 	      return false;
2120 	  }
2121 	if (ref->vuse)
2122 	  {
2123 	    gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2124 	    if (!gimple_nop_p (def_stmt)
2125 		&& gimple_bb (def_stmt) != block
2126 		&& !dominated_by_p (CDI_DOMINATORS,
2127 				    block, gimple_bb (def_stmt)))
2128 	      return false;
2129 	  }
2130 	return !value_dies_in_block_x (expr, block);
2131       }
2132     default:
2133       gcc_unreachable ();
2134     }
2135 }
2136 
2137 /* Clean the set of expressions that are no longer valid in SET1 or
2138    SET2.  This means expressions that are made up of values we have no
2139    leaders for in SET1 or SET2.  This version is used for partial
2140    anticipation, which means it is not valid in either ANTIC_IN or
2141    PA_IN.  */
2142 
2143 static void
2144 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2145 {
2146   VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2147   pre_expr expr;
2148   int i;
2149 
2150   for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2151     {
2152       if (!valid_in_sets (set1, set2, expr, block))
2153 	bitmap_remove_from_set (set1, expr);
2154     }
2155   VEC_free (pre_expr, heap, exprs);
2156 }
2157 
2158 /* Clean the set of expressions that are no longer valid in SET.  This
2159    means expressions that are made up of values we have no leaders for
2160    in SET.  */
2161 
2162 static void
2163 clean (bitmap_set_t set, basic_block block)
2164 {
2165   VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2166   pre_expr expr;
2167   int i;
2168 
2169   for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2170     {
2171       if (!valid_in_sets (set, NULL, expr, block))
2172 	bitmap_remove_from_set (set, expr);
2173     }
2174   VEC_free (pre_expr, heap, exprs);
2175 }
2176 
2177 static sbitmap has_abnormal_preds;
2178 
2179 /* List of blocks that may have changed during ANTIC computation and
2180    thus need to be iterated over.  */
2181 
2182 static sbitmap changed_blocks;
2183 
2184 /* Decide whether to defer a block for a later iteration, or PHI
2185    translate SOURCE to DEST using phis in PHIBLOCK.  Return false if we
2186    should defer the block, and true if we processed it.  */
2187 
2188 static bool
2189 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2190 			      basic_block block, basic_block phiblock)
2191 {
2192   if (!BB_VISITED (phiblock))
2193     {
2194       SET_BIT (changed_blocks, block->index);
2195       BB_VISITED (block) = 0;
2196       BB_DEFERRED (block) = 1;
2197       return false;
2198     }
2199   else
2200     phi_translate_set (dest, source, block, phiblock);
2201   return true;
2202 }
2203 
2204 /* Compute the ANTIC set for BLOCK.
2205 
2206    If succs(BLOCK) > 1 then
2207      ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2208    else if succs(BLOCK) == 1 then
2209      ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2210 
2211    ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2212 */
2213 
2214 static bool
2215 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2216 {
2217   bool changed = false;
2218   bitmap_set_t S, old, ANTIC_OUT;
2219   bitmap_iterator bi;
2220   unsigned int bii;
2221   edge e;
2222   edge_iterator ei;
2223 
2224   old = ANTIC_OUT = S = NULL;
2225   BB_VISITED (block) = 1;
2226 
2227   /* If any edges from predecessors are abnormal, antic_in is empty,
2228      so do nothing.  */
2229   if (block_has_abnormal_pred_edge)
2230     goto maybe_dump_sets;
2231 
2232   old = ANTIC_IN (block);
2233   ANTIC_OUT = bitmap_set_new ();
2234 
2235   /* If the block has no successors, ANTIC_OUT is empty.  */
2236   if (EDGE_COUNT (block->succs) == 0)
2237     ;
2238   /* If we have one successor, we could have some phi nodes to
2239      translate through.  */
2240   else if (single_succ_p (block))
2241     {
2242       basic_block succ_bb = single_succ (block);
2243 
2244       /* We trade iterations of the dataflow equations for having to
2245 	 phi translate the maximal set, which is incredibly slow
2246 	 (since the maximal set often has 300+ members, even when you
2247 	 have a small number of blocks).
2248 	 Basically, we defer the computation of ANTIC for this block
2249 	 until we have processed it's successor, which will inevitably
2250 	 have a *much* smaller set of values to phi translate once
2251 	 clean has been run on it.
2252 	 The cost of doing this is that we technically perform more
2253 	 iterations, however, they are lower cost iterations.
2254 
2255 	 Timings for PRE on tramp3d-v4:
2256 	 without maximal set fix: 11 seconds
2257 	 with maximal set fix/without deferring: 26 seconds
2258 	 with maximal set fix/with deferring: 11 seconds
2259      */
2260 
2261       if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2262 					block, succ_bb))
2263 	{
2264 	  changed = true;
2265 	  goto maybe_dump_sets;
2266 	}
2267     }
2268   /* If we have multiple successors, we take the intersection of all of
2269      them.  Note that in the case of loop exit phi nodes, we may have
2270      phis to translate through.  */
2271   else
2272     {
2273       VEC(basic_block, heap) * worklist;
2274       size_t i;
2275       basic_block bprime, first = NULL;
2276 
2277       worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2278       FOR_EACH_EDGE (e, ei, block->succs)
2279 	{
2280 	  if (!first
2281 	      && BB_VISITED (e->dest))
2282 	    first = e->dest;
2283 	  else if (BB_VISITED (e->dest))
2284 	    VEC_quick_push (basic_block, worklist, e->dest);
2285 	}
2286 
2287       /* Of multiple successors we have to have visited one already.  */
2288       if (!first)
2289 	{
2290 	  SET_BIT (changed_blocks, block->index);
2291 	  BB_VISITED (block) = 0;
2292 	  BB_DEFERRED (block) = 1;
2293 	  changed = true;
2294 	  VEC_free (basic_block, heap, worklist);
2295 	  goto maybe_dump_sets;
2296 	}
2297 
2298       if (!gimple_seq_empty_p (phi_nodes (first)))
2299 	phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2300       else
2301 	bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2302 
2303       for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2304 	{
2305 	  if (!gimple_seq_empty_p (phi_nodes (bprime)))
2306 	    {
2307 	      bitmap_set_t tmp = bitmap_set_new ();
2308 	      phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2309 	      bitmap_set_and (ANTIC_OUT, tmp);
2310 	      bitmap_set_free (tmp);
2311 	    }
2312 	  else
2313 	    bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2314 	}
2315       VEC_free (basic_block, heap, worklist);
2316     }
2317 
2318   /* Generate ANTIC_OUT - TMP_GEN.  */
2319   S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2320 
2321   /* Start ANTIC_IN with EXP_GEN - TMP_GEN.  */
2322   ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2323 					  TMP_GEN (block));
2324 
2325   /* Then union in the ANTIC_OUT - TMP_GEN values,
2326      to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2327   FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2328     bitmap_value_insert_into_set (ANTIC_IN (block),
2329 				  expression_for_id (bii));
2330 
2331   clean (ANTIC_IN (block), block);
2332 
2333   /* !old->expressions can happen when we deferred a block.  */
2334   if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
2335     {
2336       changed = true;
2337       SET_BIT (changed_blocks, block->index);
2338       FOR_EACH_EDGE (e, ei, block->preds)
2339 	SET_BIT (changed_blocks, e->src->index);
2340     }
2341   else
2342     RESET_BIT (changed_blocks, block->index);
2343 
2344  maybe_dump_sets:
2345   if (dump_file && (dump_flags & TDF_DETAILS))
2346     {
2347       if (!BB_DEFERRED (block) || BB_VISITED (block))
2348 	{
2349 	  if (ANTIC_OUT)
2350 	    print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2351 
2352 	  print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2353 			    block->index);
2354 
2355 	  if (S)
2356 	    print_bitmap_set (dump_file, S, "S", block->index);
2357 	}
2358       else
2359 	{
2360 	  fprintf (dump_file,
2361 		   "Block %d was deferred for a future iteration.\n",
2362 		   block->index);
2363 	}
2364     }
2365   if (old)
2366     bitmap_set_free (old);
2367   if (S)
2368     bitmap_set_free (S);
2369   if (ANTIC_OUT)
2370     bitmap_set_free (ANTIC_OUT);
2371   return changed;
2372 }
2373 
2374 /* Compute PARTIAL_ANTIC for BLOCK.
2375 
2376    If succs(BLOCK) > 1 then
2377      PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2378      in ANTIC_OUT for all succ(BLOCK)
2379    else if succs(BLOCK) == 1 then
2380      PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2381 
2382    PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2383 				  - ANTIC_IN[BLOCK])
2384 
2385 */
2386 static bool
2387 compute_partial_antic_aux (basic_block block,
2388 			   bool block_has_abnormal_pred_edge)
2389 {
2390   bool changed = false;
2391   bitmap_set_t old_PA_IN;
2392   bitmap_set_t PA_OUT;
2393   edge e;
2394   edge_iterator ei;
2395   unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2396 
2397   old_PA_IN = PA_OUT = NULL;
2398 
2399   /* If any edges from predecessors are abnormal, antic_in is empty,
2400      so do nothing.  */
2401   if (block_has_abnormal_pred_edge)
2402     goto maybe_dump_sets;
2403 
2404   /* If there are too many partially anticipatable values in the
2405      block, phi_translate_set can take an exponential time: stop
2406      before the translation starts.  */
2407   if (max_pa
2408       && single_succ_p (block)
2409       && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa)
2410     goto maybe_dump_sets;
2411 
2412   old_PA_IN = PA_IN (block);
2413   PA_OUT = bitmap_set_new ();
2414 
2415   /* If the block has no successors, ANTIC_OUT is empty.  */
2416   if (EDGE_COUNT (block->succs) == 0)
2417     ;
2418   /* If we have one successor, we could have some phi nodes to
2419      translate through.  Note that we can't phi translate across DFS
2420      back edges in partial antic, because it uses a union operation on
2421      the successors.  For recurrences like IV's, we will end up
2422      generating a new value in the set on each go around (i + 3 (VH.1)
2423      VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever.  */
2424   else if (single_succ_p (block))
2425     {
2426       basic_block succ = single_succ (block);
2427       if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2428 	phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2429     }
2430   /* If we have multiple successors, we take the union of all of
2431      them.  */
2432   else
2433     {
2434       VEC(basic_block, heap) * worklist;
2435       size_t i;
2436       basic_block bprime;
2437 
2438       worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2439       FOR_EACH_EDGE (e, ei, block->succs)
2440 	{
2441 	  if (e->flags & EDGE_DFS_BACK)
2442 	    continue;
2443 	  VEC_quick_push (basic_block, worklist, e->dest);
2444 	}
2445       if (VEC_length (basic_block, worklist) > 0)
2446 	{
2447 	  for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2448 	    {
2449 	      unsigned int i;
2450 	      bitmap_iterator bi;
2451 
2452 	      FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2453 		bitmap_value_insert_into_set (PA_OUT,
2454 					      expression_for_id (i));
2455 	      if (!gimple_seq_empty_p (phi_nodes (bprime)))
2456 		{
2457 		  bitmap_set_t pa_in = bitmap_set_new ();
2458 		  phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2459 		  FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2460 		    bitmap_value_insert_into_set (PA_OUT,
2461 						  expression_for_id (i));
2462 		  bitmap_set_free (pa_in);
2463 		}
2464 	      else
2465 		FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2466 		  bitmap_value_insert_into_set (PA_OUT,
2467 						expression_for_id (i));
2468 	    }
2469 	}
2470       VEC_free (basic_block, heap, worklist);
2471     }
2472 
2473   /* PA_IN starts with PA_OUT - TMP_GEN.
2474      Then we subtract things from ANTIC_IN.  */
2475   PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2476 
2477   /* For partial antic, we want to put back in the phi results, since
2478      we will properly avoid making them partially antic over backedges.  */
2479   bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
2480   bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
2481 
2482   /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2483   bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2484 
2485   dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2486 
2487   if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2488     {
2489       changed = true;
2490       SET_BIT (changed_blocks, block->index);
2491       FOR_EACH_EDGE (e, ei, block->preds)
2492 	SET_BIT (changed_blocks, e->src->index);
2493     }
2494   else
2495     RESET_BIT (changed_blocks, block->index);
2496 
2497  maybe_dump_sets:
2498   if (dump_file && (dump_flags & TDF_DETAILS))
2499     {
2500       if (PA_OUT)
2501 	print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2502 
2503       print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2504     }
2505   if (old_PA_IN)
2506     bitmap_set_free (old_PA_IN);
2507   if (PA_OUT)
2508     bitmap_set_free (PA_OUT);
2509   return changed;
2510 }
2511 
2512 /* Compute ANTIC and partial ANTIC sets.  */
2513 
2514 static void
2515 compute_antic (void)
2516 {
2517   bool changed = true;
2518   int num_iterations = 0;
2519   basic_block block;
2520   int i;
2521 
2522   /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2523      We pre-build the map of blocks with incoming abnormal edges here.  */
2524   has_abnormal_preds = sbitmap_alloc (last_basic_block);
2525   sbitmap_zero (has_abnormal_preds);
2526 
2527   FOR_EACH_BB (block)
2528     {
2529       edge_iterator ei;
2530       edge e;
2531 
2532       FOR_EACH_EDGE (e, ei, block->preds)
2533 	{
2534 	  e->flags &= ~EDGE_DFS_BACK;
2535 	  if (e->flags & EDGE_ABNORMAL)
2536 	    {
2537 	      SET_BIT (has_abnormal_preds, block->index);
2538 	      break;
2539 	    }
2540 	}
2541 
2542       BB_VISITED (block) = 0;
2543       BB_DEFERRED (block) = 0;
2544 
2545       /* While we are here, give empty ANTIC_IN sets to each block.  */
2546       ANTIC_IN (block) = bitmap_set_new ();
2547       PA_IN (block) = bitmap_set_new ();
2548     }
2549 
2550   /* At the exit block we anticipate nothing.  */
2551   ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2552   BB_VISITED (EXIT_BLOCK_PTR) = 1;
2553   PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2554 
2555   changed_blocks = sbitmap_alloc (last_basic_block + 1);
2556   sbitmap_ones (changed_blocks);
2557   while (changed)
2558     {
2559       if (dump_file && (dump_flags & TDF_DETAILS))
2560 	fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2561       /* ???  We need to clear our PHI translation cache here as the
2562          ANTIC sets shrink and we restrict valid translations to
2563 	 those having operands with leaders in ANTIC.  Same below
2564 	 for PA ANTIC computation.  */
2565       num_iterations++;
2566       changed = false;
2567       for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2568 	{
2569 	  if (TEST_BIT (changed_blocks, postorder[i]))
2570 	    {
2571 	      basic_block block = BASIC_BLOCK (postorder[i]);
2572 	      changed |= compute_antic_aux (block,
2573 					    TEST_BIT (has_abnormal_preds,
2574 						      block->index));
2575 	    }
2576 	}
2577 #ifdef ENABLE_CHECKING
2578       /* Theoretically possible, but *highly* unlikely.  */
2579       gcc_assert (num_iterations < 500);
2580 #endif
2581     }
2582 
2583   statistics_histogram_event (cfun, "compute_antic iterations",
2584 			      num_iterations);
2585 
2586   if (do_partial_partial)
2587     {
2588       sbitmap_ones (changed_blocks);
2589       mark_dfs_back_edges ();
2590       num_iterations = 0;
2591       changed = true;
2592       while (changed)
2593 	{
2594 	  if (dump_file && (dump_flags & TDF_DETAILS))
2595 	    fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2596 	  num_iterations++;
2597 	  changed = false;
2598 	  for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2599 	    {
2600 	      if (TEST_BIT (changed_blocks, postorder[i]))
2601 		{
2602 		  basic_block block = BASIC_BLOCK (postorder[i]);
2603 		  changed
2604 		    |= compute_partial_antic_aux (block,
2605 						  TEST_BIT (has_abnormal_preds,
2606 							    block->index));
2607 		}
2608 	    }
2609 #ifdef ENABLE_CHECKING
2610 	  /* Theoretically possible, but *highly* unlikely.  */
2611 	  gcc_assert (num_iterations < 500);
2612 #endif
2613 	}
2614       statistics_histogram_event (cfun, "compute_partial_antic iterations",
2615 				  num_iterations);
2616     }
2617   sbitmap_free (has_abnormal_preds);
2618   sbitmap_free (changed_blocks);
2619 }
2620 
2621 /* Return true if we can value number the call in STMT.  This is true
2622    if we have a pure or constant call.  */
2623 
2624 static bool
2625 can_value_number_call (gimple stmt)
2626 {
2627   if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2628     return true;
2629   return false;
2630 }
2631 
2632 /* Return true if OP is a tree which we can perform PRE on.
2633    This may not match the operations we can value number, but in
2634    a perfect world would.  */
2635 
2636 static bool
2637 can_PRE_operation (tree op)
2638 {
2639   return UNARY_CLASS_P (op)
2640     || BINARY_CLASS_P (op)
2641     || COMPARISON_CLASS_P (op)
2642     || TREE_CODE (op) == INDIRECT_REF
2643     || TREE_CODE (op) == COMPONENT_REF
2644     || TREE_CODE (op) == VIEW_CONVERT_EXPR
2645     || TREE_CODE (op) == CALL_EXPR
2646     || TREE_CODE (op) == ARRAY_REF;
2647 }
2648 
2649 
2650 /* Inserted expressions are placed onto this worklist, which is used
2651    for performing quick dead code elimination of insertions we made
2652    that didn't turn out to be necessary.   */
2653 static VEC(gimple,heap) *inserted_exprs;
2654 static bitmap inserted_phi_names;
2655 
2656 /* Pool allocated fake store expressions are placed onto this
2657    worklist, which, after performing dead code elimination, is walked
2658    to see which expressions need to be put into GC'able memory  */
2659 static VEC(gimple, heap) *need_creation;
2660 
2661 /* The actual worker for create_component_ref_by_pieces.  */
2662 
2663 static tree
2664 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2665 				  unsigned int *operand, gimple_seq *stmts,
2666 				  gimple domstmt)
2667 {
2668   vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2669 					*operand);
2670   tree genop;
2671   ++*operand;
2672   switch (currop->opcode)
2673     {
2674     case CALL_EXPR:
2675       {
2676 	tree folded, sc = NULL_TREE;
2677 	unsigned int nargs = 0;
2678 	tree fn, *args;
2679 	if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2680 	  fn = currop->op0;
2681 	else
2682 	  {
2683 	    pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2684 	    fn = find_or_generate_expression (block, op0, stmts, domstmt);
2685 	    if (!fn)
2686 	      return NULL_TREE;
2687 	  }
2688 	if (currop->op1)
2689 	  {
2690 	    pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2691 	    sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2692 	    if (!sc)
2693 	      return NULL_TREE;
2694 	  }
2695 	args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2696 					  ref->operands) - 1);
2697 	while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2698 	  {
2699 	    args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2700 							    operand, stmts,
2701 							    domstmt);
2702 	    if (!args[nargs])
2703 	      {
2704 		free (args);
2705 		return NULL_TREE;
2706 	      }
2707 	    nargs++;
2708 	  }
2709 	folded = build_call_array (currop->type,
2710 				   (TREE_CODE (fn) == FUNCTION_DECL
2711 				    ? build_fold_addr_expr (fn) : fn),
2712 				   nargs, args);
2713 	free (args);
2714 	if (sc)
2715 	  CALL_EXPR_STATIC_CHAIN (folded) = sc;
2716 	return folded;
2717       }
2718       break;
2719     case TARGET_MEM_REF:
2720       {
2721 	vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2722 					      *operand);
2723 	pre_expr op0expr;
2724 	tree genop0 = NULL_TREE;
2725 	tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2726 							stmts, domstmt);
2727 	if (!baseop)
2728 	  return NULL_TREE;
2729 	if (currop->op0)
2730 	  {
2731 	    op0expr = get_or_alloc_expr_for (currop->op0);
2732 	    genop0 = find_or_generate_expression (block, op0expr,
2733 						  stmts, domstmt);
2734 	    if (!genop0)
2735 	      return NULL_TREE;
2736 	  }
2737 	if (DECL_P (baseop))
2738 	  return build6 (TARGET_MEM_REF, currop->type,
2739 			 baseop, NULL_TREE,
2740 			 genop0, currop->op1, currop->op2,
2741 			 unshare_expr (nextop->op1));
2742 	else
2743 	  return build6 (TARGET_MEM_REF, currop->type,
2744 			 NULL_TREE, baseop,
2745 			 genop0, currop->op1, currop->op2,
2746 			 unshare_expr (nextop->op1));
2747       }
2748       break;
2749     case ADDR_EXPR:
2750       if (currop->op0)
2751 	{
2752 	  gcc_assert (is_gimple_min_invariant (currop->op0));
2753 	  return currop->op0;
2754 	}
2755       /* Fallthrough.  */
2756     case REALPART_EXPR:
2757     case IMAGPART_EXPR:
2758     case VIEW_CONVERT_EXPR:
2759       {
2760 	tree folded;
2761 	tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2762 							operand,
2763 							stmts, domstmt);
2764 	if (!genop0)
2765 	  return NULL_TREE;
2766 	folded = fold_build1 (currop->opcode, currop->type,
2767 			      genop0);
2768 	return folded;
2769       }
2770       break;
2771     case ALIGN_INDIRECT_REF:
2772     case MISALIGNED_INDIRECT_REF:
2773     case INDIRECT_REF:
2774       {
2775 	tree folded;
2776 	tree genop1 = create_component_ref_by_pieces_1 (block, ref,
2777 							operand,
2778 							stmts, domstmt);
2779 	if (!genop1)
2780 	  return NULL_TREE;
2781 	genop1 = fold_convert (build_pointer_type (currop->type),
2782 			       genop1);
2783 
2784 	if (currop->opcode == MISALIGNED_INDIRECT_REF)
2785 	  folded = fold_build2 (currop->opcode, currop->type,
2786 				genop1, currop->op1);
2787 	else
2788 	  folded = fold_build1 (currop->opcode, currop->type,
2789 				genop1);
2790 	return folded;
2791       }
2792       break;
2793     case BIT_FIELD_REF:
2794       {
2795 	tree folded;
2796 	tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2797 							stmts, domstmt);
2798 	pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2799 	pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2800 	tree genop1;
2801 	tree genop2;
2802 
2803 	if (!genop0)
2804 	  return NULL_TREE;
2805 	genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2806 	if (!genop1)
2807 	  return NULL_TREE;
2808 	genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2809 	if (!genop2)
2810 	  return NULL_TREE;
2811 	folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2812 			      genop2);
2813 	return folded;
2814       }
2815 
2816       /* For array ref vn_reference_op's, operand 1 of the array ref
2817 	 is op0 of the reference op and operand 3 of the array ref is
2818 	 op1.  */
2819     case ARRAY_RANGE_REF:
2820     case ARRAY_REF:
2821       {
2822 	tree genop0;
2823 	tree genop1 = currop->op0;
2824 	pre_expr op1expr;
2825 	tree genop2 = currop->op1;
2826 	pre_expr op2expr;
2827 	tree genop3 = currop->op2;
2828 	pre_expr op3expr;
2829 	genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2830 						   stmts, domstmt);
2831 	if (!genop0)
2832 	  return NULL_TREE;
2833 	op1expr = get_or_alloc_expr_for (genop1);
2834 	genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2835 	if (!genop1)
2836 	  return NULL_TREE;
2837 	if (genop2)
2838 	  {
2839 	    /* Drop zero minimum index.  */
2840 	    if (tree_int_cst_equal (genop2, integer_zero_node))
2841 	      genop2 = NULL_TREE;
2842 	    else
2843 	      {
2844 		op2expr = get_or_alloc_expr_for (genop2);
2845 		genop2 = find_or_generate_expression (block, op2expr, stmts,
2846 						      domstmt);
2847 		if (!genop2)
2848 		  return NULL_TREE;
2849 	      }
2850 	  }
2851 	if (genop3)
2852 	  {
2853 	    tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2854 	    /* We can't always put a size in units of the element alignment
2855 	       here as the element alignment may be not visible.  See
2856 	       PR43783.  Simply drop the element size for constant
2857 	       sizes.  */
2858 	    if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2859 	      genop3 = NULL_TREE;
2860 	    else
2861 	      {
2862 		genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2863 				     size_int (TYPE_ALIGN_UNIT (elmt_type)));
2864 		op3expr = get_or_alloc_expr_for (genop3);
2865 		genop3 = find_or_generate_expression (block, op3expr, stmts,
2866 						      domstmt);
2867 		if (!genop3)
2868 		  return NULL_TREE;
2869 	      }
2870 	  }
2871 	return build4 (currop->opcode, currop->type, genop0, genop1,
2872 		       genop2, genop3);
2873       }
2874     case COMPONENT_REF:
2875       {
2876 	tree op0;
2877 	tree op1;
2878 	tree genop2 = currop->op1;
2879 	pre_expr op2expr;
2880 	op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2881 						stmts, domstmt);
2882 	if (!op0)
2883 	  return NULL_TREE;
2884 	/* op1 should be a FIELD_DECL, which are represented by
2885 	   themselves.  */
2886 	op1 = currop->op0;
2887 	if (genop2)
2888 	  {
2889 	    op2expr = get_or_alloc_expr_for (genop2);
2890 	    genop2 = find_or_generate_expression (block, op2expr, stmts,
2891 						  domstmt);
2892 	    if (!genop2)
2893 	      return NULL_TREE;
2894 	  }
2895 
2896 	return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2897 			    genop2);
2898       }
2899       break;
2900     case SSA_NAME:
2901       {
2902 	pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2903 	genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2904 	return genop;
2905       }
2906     case STRING_CST:
2907     case INTEGER_CST:
2908     case COMPLEX_CST:
2909     case VECTOR_CST:
2910     case REAL_CST:
2911     case CONSTRUCTOR:
2912     case VAR_DECL:
2913     case PARM_DECL:
2914     case CONST_DECL:
2915     case RESULT_DECL:
2916     case FUNCTION_DECL:
2917       return currop->op0;
2918 
2919     default:
2920       gcc_unreachable ();
2921     }
2922 }
2923 
2924 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2925    COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2926    trying to rename aggregates into ssa form directly, which is a no no.
2927 
2928    Thus, this routine doesn't create temporaries, it just builds a
2929    single access expression for the array, calling
2930    find_or_generate_expression to build the innermost pieces.
2931 
2932    This function is a subroutine of create_expression_by_pieces, and
2933    should not be called on it's own unless you really know what you
2934    are doing.  */
2935 
2936 static tree
2937 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2938 				gimple_seq *stmts, gimple domstmt)
2939 {
2940   unsigned int op = 0;
2941   return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2942 }
2943 
2944 /* Find a leader for an expression, or generate one using
2945    create_expression_by_pieces if it's ANTIC but
2946    complex.
2947    BLOCK is the basic_block we are looking for leaders in.
2948    EXPR is the expression to find a leader or generate for.
2949    STMTS is the statement list to put the inserted expressions on.
2950    Returns the SSA_NAME of the LHS of the generated expression or the
2951    leader.
2952    DOMSTMT if non-NULL is a statement that should be dominated by
2953    all uses in the generated expression.  If DOMSTMT is non-NULL this
2954    routine can fail and return NULL_TREE.  Otherwise it will assert
2955    on failure.  */
2956 
2957 static tree
2958 find_or_generate_expression (basic_block block, pre_expr expr,
2959 			     gimple_seq *stmts, gimple domstmt)
2960 {
2961   pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2962 					get_expr_value_id (expr), domstmt);
2963   tree genop = NULL;
2964   if (leader)
2965     {
2966       if (leader->kind == NAME)
2967 	genop = PRE_EXPR_NAME (leader);
2968       else if (leader->kind == CONSTANT)
2969 	genop = PRE_EXPR_CONSTANT (leader);
2970     }
2971 
2972   /* If it's still NULL, it must be a complex expression, so generate
2973      it recursively.  Not so for FRE though.  */
2974   if (genop == NULL
2975       && !in_fre)
2976     {
2977       bitmap_set_t exprset;
2978       unsigned int lookfor = get_expr_value_id (expr);
2979       bool handled = false;
2980       bitmap_iterator bi;
2981       unsigned int i;
2982 
2983       exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2984       FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2985 	{
2986 	  pre_expr temp = expression_for_id (i);
2987 	  if (temp->kind != NAME)
2988 	    {
2989 	      handled = true;
2990 	      genop = create_expression_by_pieces (block, temp, stmts,
2991 						   domstmt,
2992 						   get_expr_type (expr));
2993 	      break;
2994 	    }
2995 	}
2996       if (!handled && domstmt)
2997 	return NULL_TREE;
2998 
2999       gcc_assert (handled);
3000     }
3001   return genop;
3002 }
3003 
3004 #define NECESSARY GF_PLF_1
3005 
3006 /* Create an expression in pieces, so that we can handle very complex
3007    expressions that may be ANTIC, but not necessary GIMPLE.
3008    BLOCK is the basic block the expression will be inserted into,
3009    EXPR is the expression to insert (in value form)
3010    STMTS is a statement list to append the necessary insertions into.
3011 
3012    This function will die if we hit some value that shouldn't be
3013    ANTIC but is (IE there is no leader for it, or its components).
3014    This function may also generate expressions that are themselves
3015    partially or fully redundant.  Those that are will be either made
3016    fully redundant during the next iteration of insert (for partially
3017    redundant ones), or eliminated by eliminate (for fully redundant
3018    ones).
3019 
3020    If DOMSTMT is non-NULL then we make sure that all uses in the
3021    expressions dominate that statement.  In this case the function
3022    can return NULL_TREE to signal failure.  */
3023 
3024 static tree
3025 create_expression_by_pieces (basic_block block, pre_expr expr,
3026 			     gimple_seq *stmts, gimple domstmt, tree type)
3027 {
3028   tree temp, name;
3029   tree folded;
3030   gimple_seq forced_stmts = NULL;
3031   unsigned int value_id;
3032   gimple_stmt_iterator gsi;
3033   tree exprtype = type ? type : get_expr_type (expr);
3034   pre_expr nameexpr;
3035   gimple newstmt;
3036 
3037   switch (expr->kind)
3038     {
3039       /* We may hit the NAME/CONSTANT case if we have to convert types
3040 	 that value numbering saw through.  */
3041     case NAME:
3042       folded = PRE_EXPR_NAME (expr);
3043       break;
3044     case CONSTANT:
3045       folded = PRE_EXPR_CONSTANT (expr);
3046       break;
3047     case REFERENCE:
3048       {
3049 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3050 	folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3051       }
3052       break;
3053     case NARY:
3054       {
3055 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3056 	switch (nary->length)
3057 	  {
3058 	  case 2:
3059 	    {
3060 	      pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3061 	      pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3062 	      tree genop1 = find_or_generate_expression (block, op1,
3063 							 stmts, domstmt);
3064 	      tree genop2 = find_or_generate_expression (block, op2,
3065 							 stmts, domstmt);
3066 	      if (!genop1 || !genop2)
3067 		return NULL_TREE;
3068 	      /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR.  It
3069 		 may be a constant with the wrong type.  */
3070 	      if (nary->opcode == POINTER_PLUS_EXPR)
3071 		{
3072 		  genop1 = fold_convert (nary->type, genop1);
3073 		  genop2 = fold_convert (sizetype, genop2);
3074 		}
3075 	      else
3076 		{
3077 		  genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3078 		  genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3079 		}
3080 
3081 	      folded = fold_build2 (nary->opcode, nary->type,
3082 				    genop1, genop2);
3083 	    }
3084 	    break;
3085 	  case 1:
3086 	    {
3087 	      pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3088 	      tree genop1 = find_or_generate_expression (block, op1,
3089 							 stmts, domstmt);
3090 	      if (!genop1)
3091 		return NULL_TREE;
3092 	      genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3093 
3094 	      folded = fold_build1 (nary->opcode, nary->type,
3095 				    genop1);
3096 	    }
3097 	    break;
3098 	  default:
3099 	    return NULL_TREE;
3100 	  }
3101       }
3102       break;
3103     default:
3104       return NULL_TREE;
3105     }
3106 
3107   if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3108     folded = fold_convert (exprtype, folded);
3109 
3110   /* Force the generated expression to be a sequence of GIMPLE
3111      statements.
3112      We have to call unshare_expr because force_gimple_operand may
3113      modify the tree we pass to it.  */
3114   folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3115 				 false, NULL);
3116 
3117   /* If we have any intermediate expressions to the value sets, add them
3118      to the value sets and chain them in the instruction stream.  */
3119   if (forced_stmts)
3120     {
3121       gsi = gsi_start (forced_stmts);
3122       for (; !gsi_end_p (gsi); gsi_next (&gsi))
3123 	{
3124 	  gimple stmt = gsi_stmt (gsi);
3125 	  tree forcedname = gimple_get_lhs (stmt);
3126 	  pre_expr nameexpr;
3127 
3128 	  VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3129 	  if (TREE_CODE (forcedname) == SSA_NAME)
3130 	    {
3131 	      VN_INFO_GET (forcedname)->valnum = forcedname;
3132 	      VN_INFO (forcedname)->value_id = get_next_value_id ();
3133 	      nameexpr = get_or_alloc_expr_for_name (forcedname);
3134 	      add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3135 	      if (!in_fre)
3136 		bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3137 	      bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3138 	    }
3139 	  mark_symbols_for_renaming (stmt);
3140 	}
3141       gimple_seq_add_seq (stmts, forced_stmts);
3142     }
3143 
3144   /* Build and insert the assignment of the end result to the temporary
3145      that we will return.  */
3146   if (!pretemp || exprtype != TREE_TYPE (pretemp))
3147     {
3148       pretemp = create_tmp_var (exprtype, "pretmp");
3149       get_var_ann (pretemp);
3150     }
3151 
3152   temp = pretemp;
3153   add_referenced_var (temp);
3154 
3155   if (TREE_CODE (exprtype) == COMPLEX_TYPE
3156       || TREE_CODE (exprtype) == VECTOR_TYPE)
3157     DECL_GIMPLE_REG_P (temp) = 1;
3158 
3159   newstmt = gimple_build_assign (temp, folded);
3160   name = make_ssa_name (temp, newstmt);
3161   gimple_assign_set_lhs (newstmt, name);
3162   gimple_set_plf (newstmt, NECESSARY, false);
3163 
3164   gimple_seq_add_stmt (stmts, newstmt);
3165   VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
3166 
3167   /* All the symbols in NEWEXPR should be put into SSA form.  */
3168   mark_symbols_for_renaming (newstmt);
3169 
3170   /* Add a value number to the temporary.
3171      The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3172      we are creating the expression by pieces, and this particular piece of
3173      the expression may have been represented.  There is no harm in replacing
3174      here.  */
3175   VN_INFO_GET (name)->valnum = name;
3176   value_id = get_expr_value_id (expr);
3177   VN_INFO (name)->value_id = value_id;
3178   nameexpr = get_or_alloc_expr_for_name (name);
3179   add_to_value (value_id, nameexpr);
3180   if (!in_fre)
3181     bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3182   bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3183 
3184   pre_stats.insertions++;
3185   if (dump_file && (dump_flags & TDF_DETAILS))
3186     {
3187       fprintf (dump_file, "Inserted ");
3188       print_gimple_stmt (dump_file, newstmt, 0, 0);
3189       fprintf (dump_file, " in predecessor %d\n", block->index);
3190     }
3191 
3192   return name;
3193 }
3194 
3195 
3196 /* Returns true if we want to inhibit the insertions of PHI nodes
3197    for the given EXPR for basic block BB (a member of a loop).
3198    We want to do this, when we fear that the induction variable we
3199    create might inhibit vectorization.  */
3200 
3201 static bool
3202 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3203 {
3204   vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3205   VEC (vn_reference_op_s, heap) *ops = vr->operands;
3206   vn_reference_op_t op;
3207   unsigned i;
3208 
3209   /* If we aren't going to vectorize we don't inhibit anything.  */
3210   if (!flag_tree_vectorize)
3211     return false;
3212 
3213   /* Otherwise we inhibit the insertion when the address of the
3214      memory reference is a simple induction variable.  In other
3215      cases the vectorizer won't do anything anyway (either it's
3216      loop invariant or a complicated expression).  */
3217   for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
3218     {
3219       switch (op->opcode)
3220 	{
3221 	case ARRAY_REF:
3222 	case ARRAY_RANGE_REF:
3223 	  if (TREE_CODE (op->op0) != SSA_NAME)
3224 	    break;
3225 	  /* Fallthru.  */
3226 	case SSA_NAME:
3227 	  {
3228 	    basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3229 	    affine_iv iv;
3230 	    /* Default defs are loop invariant.  */
3231 	    if (!defbb)
3232 	      break;
3233 	    /* Defined outside this loop, also loop invariant.  */
3234 	    if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3235 	      break;
3236 	    /* If it's a simple induction variable inhibit insertion,
3237 	       the vectorizer might be interested in this one.  */
3238 	    if (simple_iv (bb->loop_father, bb->loop_father,
3239 			   op->op0, &iv, true))
3240 	      return true;
3241 	    /* No simple IV, vectorizer can't do anything, hence no
3242 	       reason to inhibit the transformation for this operand.  */
3243 	    break;
3244 	  }
3245 	default:
3246 	  break;
3247 	}
3248     }
3249   return false;
3250 }
3251 
3252 /* Insert the to-be-made-available values of expression EXPRNUM for each
3253    predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3254    merge the result with a phi node, given the same value number as
3255    NODE.  Return true if we have inserted new stuff.  */
3256 
3257 static bool
3258 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3259 			    pre_expr *avail)
3260 {
3261   pre_expr expr = expression_for_id (exprnum);
3262   pre_expr newphi;
3263   unsigned int val = get_expr_value_id (expr);
3264   edge pred;
3265   bool insertions = false;
3266   bool nophi = false;
3267   basic_block bprime;
3268   pre_expr eprime;
3269   edge_iterator ei;
3270   tree type = get_expr_type (expr);
3271   tree temp;
3272   gimple phi;
3273 
3274   if (dump_file && (dump_flags & TDF_DETAILS))
3275     {
3276       fprintf (dump_file, "Found partial redundancy for expression ");
3277       print_pre_expr (dump_file, expr);
3278       fprintf (dump_file, " (%04d)\n", val);
3279     }
3280 
3281   /* Make sure we aren't creating an induction variable.  */
3282   if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3283     {
3284       bool firstinsideloop = false;
3285       bool secondinsideloop = false;
3286       firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3287 					       EDGE_PRED (block, 0)->src);
3288       secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3289 						EDGE_PRED (block, 1)->src);
3290       /* Induction variables only have one edge inside the loop.  */
3291       if ((firstinsideloop ^ secondinsideloop)
3292 	  && (expr->kind != REFERENCE
3293 	      || inhibit_phi_insertion (block, expr)))
3294 	{
3295 	  if (dump_file && (dump_flags & TDF_DETAILS))
3296 	    fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3297 	  nophi = true;
3298 	}
3299     }
3300 
3301   /* Make the necessary insertions.  */
3302   FOR_EACH_EDGE (pred, ei, block->preds)
3303     {
3304       gimple_seq stmts = NULL;
3305       tree builtexpr;
3306       bprime = pred->src;
3307       eprime = avail[bprime->index];
3308 
3309       if (eprime->kind != NAME && eprime->kind != CONSTANT)
3310 	{
3311 	  builtexpr = create_expression_by_pieces (bprime,
3312 						   eprime,
3313 						   &stmts, NULL,
3314 						   type);
3315 	  gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3316 	  gsi_insert_seq_on_edge (pred, stmts);
3317 	  avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3318 	  insertions = true;
3319 	}
3320       else if (eprime->kind == CONSTANT)
3321 	{
3322 	  /* Constants may not have the right type, fold_convert
3323 	     should give us back a constant with the right type.
3324 	  */
3325 	  tree constant = PRE_EXPR_CONSTANT (eprime);
3326 	  if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3327 	    {
3328 	      tree builtexpr = fold_convert (type, constant);
3329 	      if (!is_gimple_min_invariant (builtexpr))
3330 		{
3331 		  tree forcedexpr = force_gimple_operand (builtexpr,
3332 							  &stmts, true,
3333 							  NULL);
3334 		  if (!is_gimple_min_invariant (forcedexpr))
3335 		    {
3336 		      if (forcedexpr != builtexpr)
3337 			{
3338 			  VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3339 			  VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3340 			}
3341 		      if (stmts)
3342 			{
3343 			  gimple_stmt_iterator gsi;
3344 			  gsi = gsi_start (stmts);
3345 			  for (; !gsi_end_p (gsi); gsi_next (&gsi))
3346 			    {
3347 			      gimple stmt = gsi_stmt (gsi);
3348 			      VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3349 			      gimple_set_plf (stmt, NECESSARY, false);
3350 			    }
3351 			  gsi_insert_seq_on_edge (pred, stmts);
3352 			}
3353 		      avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3354 		    }
3355 		}
3356 	    }
3357 	}
3358       else if (eprime->kind == NAME)
3359 	{
3360 	  /* We may have to do a conversion because our value
3361 	     numbering can look through types in certain cases, but
3362 	     our IL requires all operands of a phi node have the same
3363 	     type.  */
3364 	  tree name = PRE_EXPR_NAME (eprime);
3365 	  if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3366 	    {
3367 	      tree builtexpr;
3368 	      tree forcedexpr;
3369 	      builtexpr = fold_convert (type, name);
3370 	      forcedexpr = force_gimple_operand (builtexpr,
3371 						 &stmts, true,
3372 						 NULL);
3373 
3374 	      if (forcedexpr != name)
3375 		{
3376 		  VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3377 		  VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3378 		}
3379 
3380 	      if (stmts)
3381 		{
3382 		  gimple_stmt_iterator gsi;
3383 		  gsi = gsi_start (stmts);
3384 		  for (; !gsi_end_p (gsi); gsi_next (&gsi))
3385 		    {
3386 		      gimple stmt = gsi_stmt (gsi);
3387 		      VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3388 		      gimple_set_plf (stmt, NECESSARY, false);
3389 		    }
3390 		  gsi_insert_seq_on_edge (pred, stmts);
3391 		}
3392 	      avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3393 	    }
3394 	}
3395     }
3396   /* If we didn't want a phi node, and we made insertions, we still have
3397      inserted new stuff, and thus return true.  If we didn't want a phi node,
3398      and didn't make insertions, we haven't added anything new, so return
3399      false.  */
3400   if (nophi && insertions)
3401     return true;
3402   else if (nophi && !insertions)
3403     return false;
3404 
3405   /* Now build a phi for the new variable.  */
3406   if (!prephitemp || TREE_TYPE (prephitemp) != type)
3407     {
3408       prephitemp = create_tmp_var (type, "prephitmp");
3409       get_var_ann (prephitemp);
3410     }
3411 
3412   temp = prephitemp;
3413   add_referenced_var (temp);
3414 
3415   if (TREE_CODE (type) == COMPLEX_TYPE
3416       || TREE_CODE (type) == VECTOR_TYPE)
3417     DECL_GIMPLE_REG_P (temp) = 1;
3418   phi = create_phi_node (temp, block);
3419 
3420   gimple_set_plf (phi, NECESSARY, false);
3421   VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3422   VN_INFO (gimple_phi_result (phi))->value_id = val;
3423   VEC_safe_push (gimple, heap, inserted_exprs, phi);
3424   bitmap_set_bit (inserted_phi_names,
3425 		  SSA_NAME_VERSION (gimple_phi_result (phi)));
3426   FOR_EACH_EDGE (pred, ei, block->preds)
3427     {
3428       pre_expr ae = avail[pred->src->index];
3429       gcc_assert (get_expr_type (ae) == type
3430 		  || useless_type_conversion_p (type, get_expr_type (ae)));
3431       if (ae->kind == CONSTANT)
3432 	add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3433       else
3434 	add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3435 		     UNKNOWN_LOCATION);
3436     }
3437 
3438   newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3439   add_to_value (val, newphi);
3440 
3441   /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3442      this insertion, since we test for the existence of this value in PHI_GEN
3443      before proceeding with the partial redundancy checks in insert_aux.
3444 
3445      The value may exist in AVAIL_OUT, in particular, it could be represented
3446      by the expression we are trying to eliminate, in which case we want the
3447      replacement to occur.  If it's not existing in AVAIL_OUT, we want it
3448      inserted there.
3449 
3450      Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3451      this block, because if it did, it would have existed in our dominator's
3452      AVAIL_OUT, and would have been skipped due to the full redundancy check.
3453   */
3454 
3455   bitmap_insert_into_set (PHI_GEN (block), newphi);
3456   bitmap_value_replace_in_set (AVAIL_OUT (block),
3457 			       newphi);
3458   bitmap_insert_into_set (NEW_SETS (block),
3459 			  newphi);
3460 
3461   if (dump_file && (dump_flags & TDF_DETAILS))
3462     {
3463       fprintf (dump_file, "Created phi ");
3464       print_gimple_stmt (dump_file, phi, 0, 0);
3465       fprintf (dump_file, " in block %d\n", block->index);
3466     }
3467   pre_stats.phis++;
3468   return true;
3469 }
3470 
3471 
3472 
3473 /* Perform insertion of partially redundant values.
3474    For BLOCK, do the following:
3475    1.  Propagate the NEW_SETS of the dominator into the current block.
3476    If the block has multiple predecessors,
3477        2a. Iterate over the ANTIC expressions for the block to see if
3478 	   any of them are partially redundant.
3479        2b. If so, insert them into the necessary predecessors to make
3480 	   the expression fully redundant.
3481        2c. Insert a new PHI merging the values of the predecessors.
3482        2d. Insert the new PHI, and the new expressions, into the
3483 	   NEW_SETS set.
3484    3. Recursively call ourselves on the dominator children of BLOCK.
3485 
3486    Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3487    do_regular_insertion and do_partial_insertion.
3488 
3489 */
3490 
3491 static bool
3492 do_regular_insertion (basic_block block, basic_block dom)
3493 {
3494   bool new_stuff = false;
3495   VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3496   pre_expr expr;
3497   int i;
3498 
3499   for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3500     {
3501       if (expr->kind != NAME)
3502 	{
3503 	  pre_expr *avail;
3504 	  unsigned int val;
3505 	  bool by_some = false;
3506 	  bool cant_insert = false;
3507 	  bool all_same = true;
3508 	  pre_expr first_s = NULL;
3509 	  edge pred;
3510 	  basic_block bprime;
3511 	  pre_expr eprime = NULL;
3512 	  edge_iterator ei;
3513 	  pre_expr edoubleprime = NULL;
3514 	  bool do_insertion = false;
3515 
3516 	  val = get_expr_value_id (expr);
3517 	  if (bitmap_set_contains_value (PHI_GEN (block), val))
3518 	    continue;
3519 	  if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3520 	    {
3521 	      if (dump_file && (dump_flags & TDF_DETAILS))
3522 		fprintf (dump_file, "Found fully redundant value\n");
3523 	      continue;
3524 	    }
3525 
3526 	  avail = XCNEWVEC (pre_expr, last_basic_block);
3527 	  FOR_EACH_EDGE (pred, ei, block->preds)
3528 	    {
3529 	      unsigned int vprime;
3530 
3531 	      /* We should never run insertion for the exit block
3532 	         and so not come across fake pred edges.  */
3533 	      gcc_assert (!(pred->flags & EDGE_FAKE));
3534 	      bprime = pred->src;
3535 	      eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3536 				      bprime, block);
3537 
3538 	      /* eprime will generally only be NULL if the
3539 		 value of the expression, translated
3540 		 through the PHI for this predecessor, is
3541 		 undefined.  If that is the case, we can't
3542 		 make the expression fully redundant,
3543 		 because its value is undefined along a
3544 		 predecessor path.  We can thus break out
3545 		 early because it doesn't matter what the
3546 		 rest of the results are.  */
3547 	      if (eprime == NULL)
3548 		{
3549 		  cant_insert = true;
3550 		  break;
3551 		}
3552 
3553 	      eprime = fully_constant_expression (eprime);
3554 	      vprime = get_expr_value_id (eprime);
3555 	      edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3556 						 vprime, NULL);
3557 	      if (edoubleprime == NULL)
3558 		{
3559 		  avail[bprime->index] = eprime;
3560 		  all_same = false;
3561 		}
3562 	      else
3563 		{
3564 		  avail[bprime->index] = edoubleprime;
3565 		  by_some = true;
3566 		  /* We want to perform insertions to remove a redundancy on
3567 		     a path in the CFG we want to optimize for speed.  */
3568 		  if (optimize_edge_for_speed_p (pred))
3569 		    do_insertion = true;
3570 		  if (first_s == NULL)
3571 		    first_s = edoubleprime;
3572 		  else if (!pre_expr_eq (first_s, edoubleprime))
3573 		    all_same = false;
3574 		}
3575 	    }
3576 	  /* If we can insert it, it's not the same value
3577 	     already existing along every predecessor, and
3578 	     it's defined by some predecessor, it is
3579 	     partially redundant.  */
3580 	  if (!cant_insert && !all_same && by_some && do_insertion
3581 	      && dbg_cnt (treepre_insert))
3582 	    {
3583 	      if (insert_into_preds_of_block (block, get_expression_id (expr),
3584 					      avail))
3585 		new_stuff = true;
3586 	    }
3587 	  /* If all edges produce the same value and that value is
3588 	     an invariant, then the PHI has the same value on all
3589 	     edges.  Note this.  */
3590 	  else if (!cant_insert && all_same && eprime
3591 		   && (edoubleprime->kind == CONSTANT
3592 		       || edoubleprime->kind == NAME)
3593 		   && !value_id_constant_p (val))
3594 	    {
3595 	      unsigned int j;
3596 	      bitmap_iterator bi;
3597 	      bitmap_set_t exprset = VEC_index (bitmap_set_t,
3598 						value_expressions, val);
3599 
3600 	      unsigned int new_val = get_expr_value_id (edoubleprime);
3601 	      FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3602 		{
3603 		  pre_expr expr = expression_for_id (j);
3604 
3605 		  if (expr->kind == NAME)
3606 		    {
3607 		      vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3608 		      /* Just reset the value id and valnum so it is
3609 			 the same as the constant we have discovered.  */
3610 		      if (edoubleprime->kind == CONSTANT)
3611 			{
3612 			  info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3613 			  pre_stats.constified++;
3614 			}
3615 		      else
3616 			info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3617 		      info->value_id = new_val;
3618 		    }
3619 		}
3620 	    }
3621 	  free (avail);
3622 	}
3623     }
3624 
3625   VEC_free (pre_expr, heap, exprs);
3626   return new_stuff;
3627 }
3628 
3629 
3630 /* Perform insertion for partially anticipatable expressions.  There
3631    is only one case we will perform insertion for these.  This case is
3632    if the expression is partially anticipatable, and fully available.
3633    In this case, we know that putting it earlier will enable us to
3634    remove the later computation.  */
3635 
3636 
3637 static bool
3638 do_partial_partial_insertion (basic_block block, basic_block dom)
3639 {
3640   bool new_stuff = false;
3641   VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3642   pre_expr expr;
3643   int i;
3644 
3645   for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3646     {
3647       if (expr->kind != NAME)
3648 	{
3649 	  pre_expr *avail;
3650 	  unsigned int val;
3651 	  bool by_all = true;
3652 	  bool cant_insert = false;
3653 	  edge pred;
3654 	  basic_block bprime;
3655 	  pre_expr eprime = NULL;
3656 	  edge_iterator ei;
3657 
3658 	  val = get_expr_value_id (expr);
3659 	  if (bitmap_set_contains_value (PHI_GEN (block), val))
3660 	    continue;
3661 	  if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3662 	    continue;
3663 
3664 	  avail = XCNEWVEC (pre_expr, last_basic_block);
3665 	  FOR_EACH_EDGE (pred, ei, block->preds)
3666 	    {
3667 	      unsigned int vprime;
3668 	      pre_expr edoubleprime;
3669 
3670 	      /* We should never run insertion for the exit block
3671 	         and so not come across fake pred edges.  */
3672 	      gcc_assert (!(pred->flags & EDGE_FAKE));
3673 	      bprime = pred->src;
3674 	      eprime = phi_translate (expr, ANTIC_IN (block),
3675 				      PA_IN (block),
3676 				      bprime, block);
3677 
3678 	      /* eprime will generally only be NULL if the
3679 		 value of the expression, translated
3680 		 through the PHI for this predecessor, is
3681 		 undefined.  If that is the case, we can't
3682 		 make the expression fully redundant,
3683 		 because its value is undefined along a
3684 		 predecessor path.  We can thus break out
3685 		 early because it doesn't matter what the
3686 		 rest of the results are.  */
3687 	      if (eprime == NULL)
3688 		{
3689 		  cant_insert = true;
3690 		  break;
3691 		}
3692 
3693 	      eprime = fully_constant_expression (eprime);
3694 	      vprime = get_expr_value_id (eprime);
3695 	      edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3696 						 vprime, NULL);
3697 	      if (edoubleprime == NULL)
3698 		{
3699 		  by_all = false;
3700 		  break;
3701 		}
3702 	      else
3703 		avail[bprime->index] = edoubleprime;
3704 
3705 	    }
3706 
3707 	  /* If we can insert it, it's not the same value
3708 	     already existing along every predecessor, and
3709 	     it's defined by some predecessor, it is
3710 	     partially redundant.  */
3711 	  if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3712 	    {
3713 	      pre_stats.pa_insert++;
3714 	      if (insert_into_preds_of_block (block, get_expression_id (expr),
3715 					      avail))
3716 		new_stuff = true;
3717 	    }
3718 	  free (avail);
3719 	}
3720     }
3721 
3722   VEC_free (pre_expr, heap, exprs);
3723   return new_stuff;
3724 }
3725 
3726 static bool
3727 insert_aux (basic_block block)
3728 {
3729   basic_block son;
3730   bool new_stuff = false;
3731 
3732   if (block)
3733     {
3734       basic_block dom;
3735       dom = get_immediate_dominator (CDI_DOMINATORS, block);
3736       if (dom)
3737 	{
3738 	  unsigned i;
3739 	  bitmap_iterator bi;
3740 	  bitmap_set_t newset = NEW_SETS (dom);
3741 	  if (newset)
3742 	    {
3743 	      /* Note that we need to value_replace both NEW_SETS, and
3744 		 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3745 		 represented by some non-simple expression here that we want
3746 		 to replace it with.  */
3747 	      FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3748 		{
3749 		  pre_expr expr = expression_for_id (i);
3750 		  bitmap_value_replace_in_set (NEW_SETS (block), expr);
3751 		  bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3752 		}
3753 	    }
3754 	  if (!single_pred_p (block))
3755 	    {
3756 	      new_stuff |= do_regular_insertion (block, dom);
3757 	      if (do_partial_partial)
3758 		new_stuff |= do_partial_partial_insertion (block, dom);
3759 	    }
3760 	}
3761     }
3762   for (son = first_dom_son (CDI_DOMINATORS, block);
3763        son;
3764        son = next_dom_son (CDI_DOMINATORS, son))
3765     {
3766       new_stuff |= insert_aux (son);
3767     }
3768 
3769   return new_stuff;
3770 }
3771 
3772 /* Perform insertion of partially redundant values.  */
3773 
3774 static void
3775 insert (void)
3776 {
3777   bool new_stuff = true;
3778   basic_block bb;
3779   int num_iterations = 0;
3780 
3781   FOR_ALL_BB (bb)
3782     NEW_SETS (bb) = bitmap_set_new ();
3783 
3784   while (new_stuff)
3785     {
3786       num_iterations++;
3787       new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3788     }
3789   statistics_histogram_event (cfun, "insert iterations", num_iterations);
3790 }
3791 
3792 
3793 /* Add OP to EXP_GEN (block), and possibly to the maximal set.  */
3794 
3795 static void
3796 add_to_exp_gen (basic_block block, tree op)
3797 {
3798   if (!in_fre)
3799     {
3800       pre_expr result;
3801       if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3802 	return;
3803       result = get_or_alloc_expr_for_name (op);
3804       bitmap_value_insert_into_set (EXP_GEN (block), result);
3805     }
3806 }
3807 
3808 /* Create value ids for PHI in BLOCK.  */
3809 
3810 static void
3811 make_values_for_phi (gimple phi, basic_block block)
3812 {
3813   tree result = gimple_phi_result (phi);
3814 
3815   /* We have no need for virtual phis, as they don't represent
3816      actual computations.  */
3817   if (is_gimple_reg (result))
3818     {
3819       pre_expr e = get_or_alloc_expr_for_name (result);
3820       add_to_value (get_expr_value_id (e), e);
3821       bitmap_insert_into_set (PHI_GEN (block), e);
3822       bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3823       if (!in_fre)
3824 	{
3825 	  unsigned i;
3826 	  for (i = 0; i < gimple_phi_num_args (phi); ++i)
3827 	    {
3828 	      tree arg = gimple_phi_arg_def (phi, i);
3829 	      if (TREE_CODE (arg) == SSA_NAME)
3830 		{
3831 		  e = get_or_alloc_expr_for_name (arg);
3832 		  add_to_value (get_expr_value_id (e), e);
3833 		}
3834 	    }
3835 	}
3836     }
3837 }
3838 
3839 /* Compute the AVAIL set for all basic blocks.
3840 
3841    This function performs value numbering of the statements in each basic
3842    block.  The AVAIL sets are built from information we glean while doing
3843    this value numbering, since the AVAIL sets contain only one entry per
3844    value.
3845 
3846    AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3847    AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK].  */
3848 
3849 static void
3850 compute_avail (void)
3851 {
3852 
3853   basic_block block, son;
3854   basic_block *worklist;
3855   size_t sp = 0;
3856   unsigned i;
3857 
3858   /* We pretend that default definitions are defined in the entry block.
3859      This includes function arguments and the static chain decl.  */
3860   for (i = 1; i < num_ssa_names; ++i)
3861     {
3862       tree name = ssa_name (i);
3863       pre_expr e;
3864       if (!name
3865 	  || !SSA_NAME_IS_DEFAULT_DEF (name)
3866 	  || has_zero_uses (name)
3867 	  || !is_gimple_reg (name))
3868 	continue;
3869 
3870       e = get_or_alloc_expr_for_name (name);
3871       add_to_value (get_expr_value_id (e), e);
3872       if (!in_fre)
3873 	bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3874       bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3875     }
3876 
3877   /* Allocate the worklist.  */
3878   worklist = XNEWVEC (basic_block, n_basic_blocks);
3879 
3880   /* Seed the algorithm by putting the dominator children of the entry
3881      block on the worklist.  */
3882   for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3883        son;
3884        son = next_dom_son (CDI_DOMINATORS, son))
3885     worklist[sp++] = son;
3886 
3887   /* Loop until the worklist is empty.  */
3888   while (sp)
3889     {
3890       gimple_stmt_iterator gsi;
3891       gimple stmt;
3892       basic_block dom;
3893       unsigned int stmt_uid = 1;
3894 
3895       /* Pick a block from the worklist.  */
3896       block = worklist[--sp];
3897 
3898       /* Initially, the set of available values in BLOCK is that of
3899 	 its immediate dominator.  */
3900       dom = get_immediate_dominator (CDI_DOMINATORS, block);
3901       if (dom)
3902 	bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3903 
3904       /* Generate values for PHI nodes.  */
3905       for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3906 	make_values_for_phi (gsi_stmt (gsi), block);
3907 
3908       BB_MAY_NOTRETURN (block) = 0;
3909 
3910       /* Now compute value numbers and populate value sets with all
3911 	 the expressions computed in BLOCK.  */
3912       for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3913 	{
3914 	  ssa_op_iter iter;
3915 	  tree op;
3916 
3917 	  stmt = gsi_stmt (gsi);
3918 	  gimple_set_uid (stmt, stmt_uid++);
3919 
3920 	  /* Cache whether the basic-block has any non-visible side-effect
3921 	     or control flow.
3922 	     If this isn't a call or it is the last stmt in the
3923 	     basic-block then the CFG represents things correctly.  */
3924 	  if (is_gimple_call (stmt)
3925 	      && !stmt_ends_bb_p (stmt))
3926 	    {
3927 	      /* Non-looping const functions always return normally.
3928 		 Otherwise the call might not return or have side-effects
3929 		 that forbids hoisting possibly trapping expressions
3930 		 before it.  */
3931 	      int flags = gimple_call_flags (stmt);
3932 	      if (!(flags & ECF_CONST)
3933 		  || (flags & ECF_LOOPING_CONST_OR_PURE))
3934 		BB_MAY_NOTRETURN (block) = 1;
3935 	    }
3936 
3937 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3938 	    {
3939 	      pre_expr e = get_or_alloc_expr_for_name (op);
3940 
3941 	      add_to_value (get_expr_value_id (e), e);
3942 	      if (!in_fre)
3943 		bitmap_insert_into_set (TMP_GEN (block), e);
3944 	      bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3945 	    }
3946 
3947 	  if (gimple_has_volatile_ops (stmt)
3948 	      || stmt_could_throw_p (stmt))
3949 	    continue;
3950 
3951 	  switch (gimple_code (stmt))
3952 	    {
3953 	    case GIMPLE_RETURN:
3954 	      FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3955 		add_to_exp_gen (block, op);
3956 	      continue;
3957 
3958 	    case GIMPLE_CALL:
3959 	      {
3960 		vn_reference_t ref;
3961 		unsigned int i;
3962 		vn_reference_op_t vro;
3963 		pre_expr result = NULL;
3964 		VEC(vn_reference_op_s, heap) *ops = NULL;
3965 
3966 		if (!can_value_number_call (stmt))
3967 		  continue;
3968 
3969 		copy_reference_ops_from_call (stmt, &ops);
3970 		vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3971 					    gimple_expr_type (stmt),
3972 					    ops, &ref, VN_NOWALK);
3973 		VEC_free (vn_reference_op_s, heap, ops);
3974 		if (!ref)
3975 		  continue;
3976 
3977 		for (i = 0; VEC_iterate (vn_reference_op_s,
3978 					 ref->operands, i,
3979 					 vro); i++)
3980 		  {
3981 		    if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3982 		      add_to_exp_gen (block, vro->op0);
3983 		    if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3984 		      add_to_exp_gen (block, vro->op1);
3985 		    if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3986 		      add_to_exp_gen (block, vro->op2);
3987 		  }
3988 		result = (pre_expr) pool_alloc (pre_expr_pool);
3989 		result->kind = REFERENCE;
3990 		result->id = 0;
3991 		PRE_EXPR_REFERENCE (result) = ref;
3992 
3993 		get_or_alloc_expression_id (result);
3994 		add_to_value (get_expr_value_id (result), result);
3995 		if (!in_fre)
3996 		  bitmap_value_insert_into_set (EXP_GEN (block), result);
3997 		continue;
3998 	      }
3999 
4000 	    case GIMPLE_ASSIGN:
4001 	      {
4002 		pre_expr result = NULL;
4003 		switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4004 		  {
4005 		  case tcc_unary:
4006 		  case tcc_binary:
4007 		  case tcc_comparison:
4008 		    {
4009 		      vn_nary_op_t nary;
4010 		      unsigned int i;
4011 
4012 		      vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4013 						gimple_assign_rhs_code (stmt),
4014 						gimple_expr_type (stmt),
4015 						gimple_assign_rhs1 (stmt),
4016 						gimple_assign_rhs2 (stmt),
4017 						NULL_TREE, NULL_TREE, &nary);
4018 
4019 		      if (!nary)
4020 			continue;
4021 
4022 		      for (i = 0; i < nary->length; i++)
4023 			if (TREE_CODE (nary->op[i]) == SSA_NAME)
4024 			  add_to_exp_gen (block, nary->op[i]);
4025 
4026 		      result = (pre_expr) pool_alloc (pre_expr_pool);
4027 		      result->kind = NARY;
4028 		      result->id = 0;
4029 		      PRE_EXPR_NARY (result) = nary;
4030 		      break;
4031 		    }
4032 
4033 		  case tcc_declaration:
4034 		  case tcc_reference:
4035 		    {
4036 		      vn_reference_t ref;
4037 		      unsigned int i;
4038 		      vn_reference_op_t vro;
4039 
4040 		      vn_reference_lookup (gimple_assign_rhs1 (stmt),
4041 					   gimple_vuse (stmt),
4042 					   VN_WALK, &ref);
4043 		      if (!ref)
4044 			continue;
4045 
4046 		      for (i = 0; VEC_iterate (vn_reference_op_s,
4047 					       ref->operands, i,
4048 					       vro); i++)
4049 			{
4050 			  if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4051 			    add_to_exp_gen (block, vro->op0);
4052 			  if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4053 			    add_to_exp_gen (block, vro->op1);
4054 			  if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4055 			    add_to_exp_gen (block, vro->op2);
4056 			}
4057 		      result = (pre_expr) pool_alloc (pre_expr_pool);
4058 		      result->kind = REFERENCE;
4059 		      result->id = 0;
4060 		      PRE_EXPR_REFERENCE (result) = ref;
4061 		      break;
4062 		    }
4063 
4064 		  default:
4065 		    /* For any other statement that we don't
4066 		       recognize, simply add all referenced
4067 		       SSA_NAMEs to EXP_GEN.  */
4068 		    FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4069 		      add_to_exp_gen (block, op);
4070 		    continue;
4071 		  }
4072 
4073 		get_or_alloc_expression_id (result);
4074 		add_to_value (get_expr_value_id (result), result);
4075 		if (!in_fre)
4076 		  bitmap_value_insert_into_set (EXP_GEN (block), result);
4077 
4078 		continue;
4079 	      }
4080 	    default:
4081 	      break;
4082 	    }
4083 	}
4084 
4085       /* Put the dominator children of BLOCK on the worklist of blocks
4086 	 to compute available sets for.  */
4087       for (son = first_dom_son (CDI_DOMINATORS, block);
4088 	   son;
4089 	   son = next_dom_son (CDI_DOMINATORS, son))
4090 	worklist[sp++] = son;
4091     }
4092 
4093   free (worklist);
4094 }
4095 
4096 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4097    than the available expressions for it.  The insertion point is
4098    right before the first use in STMT.  Returns the SSA_NAME that should
4099    be used for replacement.  */
4100 
4101 static tree
4102 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4103 {
4104   basic_block bb = gimple_bb (stmt);
4105   gimple_stmt_iterator gsi;
4106   gimple_seq stmts = NULL;
4107   tree expr;
4108   pre_expr e;
4109 
4110   /* First create a value expression from the expression we want
4111      to insert and associate it with the value handle for SSA_VN.  */
4112   e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4113   if (e == NULL)
4114     return NULL_TREE;
4115 
4116   /* Then use create_expression_by_pieces to generate a valid
4117      expression to insert at this point of the IL stream.  */
4118   expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4119   if (expr == NULL_TREE)
4120     return NULL_TREE;
4121   gsi = gsi_for_stmt (stmt);
4122   gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4123 
4124   return expr;
4125 }
4126 
4127 /* Eliminate fully redundant computations.  */
4128 
4129 static unsigned int
4130 eliminate (void)
4131 {
4132   VEC (gimple, heap) *to_remove = NULL;
4133   basic_block b;
4134   unsigned int todo = 0;
4135   gimple_stmt_iterator gsi;
4136   gimple stmt;
4137   unsigned i;
4138 
4139   FOR_EACH_BB (b)
4140     {
4141       for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4142 	{
4143 	  stmt = gsi_stmt (gsi);
4144 
4145 	  /* Lookup the RHS of the expression, see if we have an
4146 	     available computation for it.  If so, replace the RHS with
4147 	     the available computation.  */
4148 	  if (gimple_has_lhs (stmt)
4149 	      && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4150 	      && !gimple_assign_ssa_name_copy_p (stmt)
4151 	      && (!gimple_assign_single_p (stmt)
4152 		  || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4153 	      && !gimple_has_volatile_ops  (stmt)
4154 	      && !has_zero_uses (gimple_get_lhs (stmt)))
4155 	    {
4156 	      tree lhs = gimple_get_lhs (stmt);
4157 	      tree rhs = NULL_TREE;
4158 	      tree sprime = NULL;
4159 	      pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4160 	      pre_expr sprimeexpr;
4161 
4162 	      if (gimple_assign_single_p (stmt))
4163 		rhs = gimple_assign_rhs1 (stmt);
4164 
4165 	      sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4166 					       get_expr_value_id (lhsexpr),
4167 					       NULL);
4168 
4169 	      if (sprimeexpr)
4170 		{
4171 		  if (sprimeexpr->kind == CONSTANT)
4172 		    sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4173 		  else if (sprimeexpr->kind == NAME)
4174 		    sprime = PRE_EXPR_NAME (sprimeexpr);
4175 		  else
4176 		    gcc_unreachable ();
4177 		}
4178 
4179 	      /* If there is no existing leader but SCCVN knows this
4180 		 value is constant, use that constant.  */
4181 	      if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4182 		{
4183 		  sprime = VN_INFO (lhs)->valnum;
4184 		  if (!useless_type_conversion_p (TREE_TYPE (lhs),
4185 						  TREE_TYPE (sprime)))
4186 		    sprime = fold_convert (TREE_TYPE (lhs), sprime);
4187 
4188 		  if (dump_file && (dump_flags & TDF_DETAILS))
4189 		    {
4190 		      fprintf (dump_file, "Replaced ");
4191 		      print_gimple_expr (dump_file, stmt, 0, 0);
4192 		      fprintf (dump_file, " with ");
4193 		      print_generic_expr (dump_file, sprime, 0);
4194 		      fprintf (dump_file, " in ");
4195 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4196 		    }
4197 		  pre_stats.eliminations++;
4198 		  propagate_tree_value_into_stmt (&gsi, sprime);
4199 		  stmt = gsi_stmt (gsi);
4200 		  update_stmt (stmt);
4201 		  continue;
4202 		}
4203 
4204 	      /* If there is no existing usable leader but SCCVN thinks
4205 		 it has an expression it wants to use as replacement,
4206 		 insert that.  */
4207 	      if (!sprime || sprime == lhs)
4208 		{
4209 		  tree val = VN_INFO (lhs)->valnum;
4210 		  if (val != VN_TOP
4211 		      && TREE_CODE (val) == SSA_NAME
4212 		      && VN_INFO (val)->needs_insertion
4213 		      && can_PRE_operation (vn_get_expr_for (val)))
4214 		    sprime = do_SCCVN_insertion (stmt, val);
4215 		}
4216 	      if (sprime
4217 		  && sprime != lhs
4218 		  && (rhs == NULL_TREE
4219 		      || TREE_CODE (rhs) != SSA_NAME
4220 		      || may_propagate_copy (rhs, sprime)))
4221 		{
4222 		  gcc_assert (sprime != rhs);
4223 
4224 		  if (dump_file && (dump_flags & TDF_DETAILS))
4225 		    {
4226 		      fprintf (dump_file, "Replaced ");
4227 		      print_gimple_expr (dump_file, stmt, 0, 0);
4228 		      fprintf (dump_file, " with ");
4229 		      print_generic_expr (dump_file, sprime, 0);
4230 		      fprintf (dump_file, " in ");
4231 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4232 		    }
4233 
4234 		  if (TREE_CODE (sprime) == SSA_NAME)
4235 		    gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4236 				    NECESSARY, true);
4237 		  /* We need to make sure the new and old types actually match,
4238 		     which may require adding a simple cast, which fold_convert
4239 		     will do for us.  */
4240 		  if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4241 		      && !useless_type_conversion_p (gimple_expr_type (stmt),
4242 						     TREE_TYPE (sprime)))
4243 		    sprime = fold_convert (gimple_expr_type (stmt), sprime);
4244 
4245 		  pre_stats.eliminations++;
4246 		  propagate_tree_value_into_stmt (&gsi, sprime);
4247 		  stmt = gsi_stmt (gsi);
4248 		  update_stmt (stmt);
4249 
4250 		  /* If we removed EH side effects from the statement, clean
4251 		     its EH information.  */
4252 		  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4253 		    {
4254 		      bitmap_set_bit (need_eh_cleanup,
4255 				      gimple_bb (stmt)->index);
4256 		      if (dump_file && (dump_flags & TDF_DETAILS))
4257 			fprintf (dump_file, "  Removed EH side effects.\n");
4258 		    }
4259 		}
4260 	    }
4261 	  /* If the statement is a scalar store, see if the expression
4262 	     has the same value number as its rhs.  If so, the store is
4263 	     dead.  */
4264 	  else if (gimple_assign_single_p (stmt)
4265 		   && !is_gimple_reg (gimple_assign_lhs (stmt))
4266 		   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4267 		       || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4268 	    {
4269 	      tree rhs = gimple_assign_rhs1 (stmt);
4270 	      tree val;
4271 	      val = vn_reference_lookup (gimple_assign_lhs (stmt),
4272 					 gimple_vuse (stmt), VN_WALK, NULL);
4273 	      if (TREE_CODE (rhs) == SSA_NAME)
4274 		rhs = VN_INFO (rhs)->valnum;
4275 	      if (val
4276 		  && operand_equal_p (val, rhs, 0))
4277 		{
4278 		  if (dump_file && (dump_flags & TDF_DETAILS))
4279 		    {
4280 		      fprintf (dump_file, "Deleted redundant store ");
4281 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4282 		    }
4283 
4284 		  /* Queue stmt for removal.  */
4285 		  VEC_safe_push (gimple, heap, to_remove, stmt);
4286 		}
4287 	    }
4288 	  /* Visit COND_EXPRs and fold the comparison with the
4289 	     available value-numbers.  */
4290 	  else if (gimple_code (stmt) == GIMPLE_COND)
4291 	    {
4292 	      tree op0 = gimple_cond_lhs (stmt);
4293 	      tree op1 = gimple_cond_rhs (stmt);
4294 	      tree result;
4295 
4296 	      if (TREE_CODE (op0) == SSA_NAME)
4297 		op0 = VN_INFO (op0)->valnum;
4298 	      if (TREE_CODE (op1) == SSA_NAME)
4299 		op1 = VN_INFO (op1)->valnum;
4300 	      result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4301 				    op0, op1);
4302 	      if (result && TREE_CODE (result) == INTEGER_CST)
4303 		{
4304 		  if (integer_zerop (result))
4305 		    gimple_cond_make_false (stmt);
4306 		  else
4307 		    gimple_cond_make_true (stmt);
4308 		  update_stmt (stmt);
4309 		  todo = TODO_cleanup_cfg;
4310 		}
4311 	    }
4312 	  /* Visit indirect calls and turn them into direct calls if
4313 	     possible.  */
4314 	  if (gimple_code (stmt) == GIMPLE_CALL
4315 	      && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4316 	    {
4317 	      tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4318 	      bool was_noreturn = gimple_call_noreturn_p (stmt);
4319 
4320 	      if (TREE_CODE (fn) == ADDR_EXPR
4321 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4322 		{
4323 		  if (dump_file && (dump_flags & TDF_DETAILS))
4324 		    {
4325 		      fprintf (dump_file, "Replacing call target with ");
4326 		      print_generic_expr (dump_file, fn, 0);
4327 		      fprintf (dump_file, " in ");
4328 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4329 		    }
4330 
4331 		  gimple_call_set_fn (stmt, fn);
4332 		  update_stmt (stmt);
4333 
4334 		  /* When changing a call into a noreturn call, cfg cleanup
4335 		     is needed to fix up the noreturn call.  */
4336 		  if (!was_noreturn && gimple_call_noreturn_p (stmt))
4337 		    todo |= TODO_cleanup_cfg;
4338 
4339 		  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4340 		    {
4341 		      bitmap_set_bit (need_eh_cleanup,
4342 				      gimple_bb (stmt)->index);
4343 		      if (dump_file && (dump_flags & TDF_DETAILS))
4344 			fprintf (dump_file, "  Removed EH side effects.\n");
4345 		    }
4346 
4347 		  /* Changing an indirect call to a direct call may
4348 		     have exposed different semantics.  This may
4349 		     require an SSA update.  */
4350 		  todo |= TODO_update_ssa_only_virtuals;
4351 		}
4352 	    }
4353 	}
4354 
4355       for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4356 	{
4357 	  gimple stmt, phi = gsi_stmt (gsi);
4358 	  tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4359 	  pre_expr sprimeexpr, resexpr;
4360 	  gimple_stmt_iterator gsi2;
4361 
4362 	  /* We want to perform redundant PHI elimination.  Do so by
4363 	     replacing the PHI with a single copy if possible.
4364 	     Do not touch inserted, single-argument or virtual PHIs.  */
4365 	  if (gimple_phi_num_args (phi) == 1
4366 	      || !is_gimple_reg (res)
4367 	      || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
4368 	    {
4369 	      gsi_next (&gsi);
4370 	      continue;
4371 	    }
4372 
4373 	  resexpr = get_or_alloc_expr_for_name (res);
4374 	  sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4375 					   get_expr_value_id (resexpr), NULL);
4376 	  if (sprimeexpr)
4377 	    {
4378 	      if (sprimeexpr->kind == CONSTANT)
4379 		sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4380 	      else if (sprimeexpr->kind == NAME)
4381 		sprime = PRE_EXPR_NAME (sprimeexpr);
4382 	      else
4383 		gcc_unreachable ();
4384 	    }
4385 	  if (!sprimeexpr
4386 	      || sprime == res)
4387 	    {
4388 	      gsi_next (&gsi);
4389 	      continue;
4390 	    }
4391 
4392 	  if (dump_file && (dump_flags & TDF_DETAILS))
4393 	    {
4394 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
4395 	      print_generic_expr (dump_file, res, 0);
4396 	      fprintf (dump_file, " with ");
4397 	      print_generic_expr (dump_file, sprime, 0);
4398 	      fprintf (dump_file, "\n");
4399 	    }
4400 
4401 	  remove_phi_node (&gsi, false);
4402 
4403 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4404 	    sprime = fold_convert (TREE_TYPE (res), sprime);
4405 	  stmt = gimple_build_assign (res, sprime);
4406 	  SSA_NAME_DEF_STMT (res) = stmt;
4407 	  if (TREE_CODE (sprime) == SSA_NAME)
4408 	    gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4409 			    NECESSARY, true);
4410 	  gsi2 = gsi_after_labels (b);
4411 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4412 	  /* Queue the copy for eventual removal.  */
4413 	  VEC_safe_push (gimple, heap, to_remove, stmt);
4414 	  pre_stats.eliminations++;
4415 	}
4416     }
4417 
4418   /* We cannot remove stmts during BB walk, especially not release SSA
4419      names there as this confuses the VN machinery.  The stmts ending
4420      up in to_remove are either stores or simple copies.  */
4421   for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4422     {
4423       tree lhs = gimple_assign_lhs (stmt);
4424       tree rhs = gimple_assign_rhs1 (stmt);
4425       use_operand_p use_p;
4426       gimple use_stmt;
4427 
4428       /* If there is a single use only, propagate the equivalency
4429 	 instead of keeping the copy.  */
4430       if (TREE_CODE (lhs) == SSA_NAME
4431 	  && TREE_CODE (rhs) == SSA_NAME
4432 	  && single_imm_use (lhs, &use_p, &use_stmt)
4433 	  && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4434 	{
4435 	  SET_USE (use_p, gimple_assign_rhs1 (stmt));
4436 	  update_stmt (use_stmt);
4437 	}
4438 
4439       /* If this is a store or a now unused copy, remove it.  */
4440       if (TREE_CODE (lhs) != SSA_NAME
4441 	  || has_zero_uses (lhs))
4442 	{
4443 	  gsi = gsi_for_stmt (stmt);
4444 	  unlink_stmt_vdef (stmt);
4445 	  gsi_remove (&gsi, true);
4446 	  release_defs (stmt);
4447 	}
4448     }
4449   VEC_free (gimple, heap, to_remove);
4450 
4451   return todo;
4452 }
4453 
4454 /* Borrow a bit of tree-ssa-dce.c for the moment.
4455    XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4456    this may be a bit faster, and we may want critical edges kept split.  */
4457 
4458 /* If OP's defining statement has not already been determined to be necessary,
4459    mark that statement necessary. Return the stmt, if it is newly
4460    necessary.  */
4461 
4462 static inline gimple
4463 mark_operand_necessary (tree op)
4464 {
4465   gimple stmt;
4466 
4467   gcc_assert (op);
4468 
4469   if (TREE_CODE (op) != SSA_NAME)
4470     return NULL;
4471 
4472   stmt = SSA_NAME_DEF_STMT (op);
4473   gcc_assert (stmt);
4474 
4475   if (gimple_plf (stmt, NECESSARY)
4476       || gimple_nop_p (stmt))
4477     return NULL;
4478 
4479   gimple_set_plf (stmt, NECESSARY, true);
4480   return stmt;
4481 }
4482 
4483 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4484    to insert PHI nodes sometimes, and because value numbering of casts isn't
4485    perfect, we sometimes end up inserting dead code.   This simple DCE-like
4486    pass removes any insertions we made that weren't actually used.  */
4487 
4488 static void
4489 remove_dead_inserted_code (void)
4490 {
4491   VEC(gimple,heap) *worklist = NULL;
4492   int i;
4493   gimple t;
4494 
4495   worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
4496   for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4497     {
4498       if (gimple_plf (t, NECESSARY))
4499 	VEC_quick_push (gimple, worklist, t);
4500     }
4501   while (VEC_length (gimple, worklist) > 0)
4502     {
4503       t = VEC_pop (gimple, worklist);
4504 
4505       /* PHI nodes are somewhat special in that each PHI alternative has
4506 	 data and control dependencies.  All the statements feeding the
4507 	 PHI node's arguments are always necessary. */
4508       if (gimple_code (t) == GIMPLE_PHI)
4509 	{
4510 	  unsigned k;
4511 
4512 	  VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
4513 	  for (k = 0; k < gimple_phi_num_args (t); k++)
4514 	    {
4515 	      tree arg = PHI_ARG_DEF (t, k);
4516 	      if (TREE_CODE (arg) == SSA_NAME)
4517 		{
4518 		  gimple n = mark_operand_necessary (arg);
4519 		  if (n)
4520 		    VEC_quick_push (gimple, worklist, n);
4521 		}
4522 	    }
4523 	}
4524       else
4525 	{
4526 	  /* Propagate through the operands.  Examine all the USE, VUSE and
4527 	     VDEF operands in this statement.  Mark all the statements
4528 	     which feed this statement's uses as necessary.  */
4529 	  ssa_op_iter iter;
4530 	  tree use;
4531 
4532 	  /* The operands of VDEF expressions are also needed as they
4533 	     represent potential definitions that may reach this
4534 	     statement (VDEF operands allow us to follow def-def
4535 	     links).  */
4536 
4537 	  FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4538 	    {
4539 	      gimple n = mark_operand_necessary (use);
4540 	      if (n)
4541 		VEC_safe_push (gimple, heap, worklist, n);
4542 	    }
4543 	}
4544     }
4545 
4546   for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4547     {
4548       if (!gimple_plf (t, NECESSARY))
4549 	{
4550 	  gimple_stmt_iterator gsi;
4551 
4552 	  if (dump_file && (dump_flags & TDF_DETAILS))
4553 	    {
4554 	      fprintf (dump_file, "Removing unnecessary insertion:");
4555 	      print_gimple_stmt (dump_file, t, 0, 0);
4556 	    }
4557 
4558 	  gsi = gsi_for_stmt (t);
4559 	  if (gimple_code (t) == GIMPLE_PHI)
4560 	    remove_phi_node (&gsi, true);
4561 	  else
4562 	    {
4563 	      gsi_remove (&gsi, true);
4564 	      release_defs (t);
4565 	    }
4566 	}
4567     }
4568   VEC_free (gimple, heap, worklist);
4569 }
4570 
4571 /* Compute a reverse post-order in *POST_ORDER.  If INCLUDE_ENTRY_EXIT is
4572    true, then then ENTRY_BLOCK and EXIT_BLOCK are included.  Returns
4573    the number of visited blocks.  */
4574 
4575 static int
4576 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4577 {
4578   edge_iterator *stack;
4579   int sp;
4580   int post_order_num = 0;
4581   sbitmap visited;
4582   int count;
4583 
4584   if (include_entry_exit)
4585     post_order[post_order_num++] = EXIT_BLOCK;
4586 
4587   /* Allocate stack for back-tracking up CFG.  */
4588   stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4589   sp = 0;
4590 
4591   /* Allocate bitmap to track nodes that have been visited.  */
4592   visited = sbitmap_alloc (last_basic_block);
4593 
4594   /* None of the nodes in the CFG have been visited yet.  */
4595   sbitmap_zero (visited);
4596 
4597   /* Push the last edge on to the stack.  */
4598   stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4599 
4600   while (sp)
4601     {
4602       edge_iterator ei;
4603       basic_block src;
4604       basic_block dest;
4605 
4606       /* Look at the edge on the top of the stack.  */
4607       ei = stack[sp - 1];
4608       src = ei_edge (ei)->src;
4609       dest = ei_edge (ei)->dest;
4610 
4611       /* Check if the edge destination has been visited yet.  */
4612       if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4613         {
4614           /* Mark that we have visited the destination.  */
4615           SET_BIT (visited, src->index);
4616 
4617           if (EDGE_COUNT (src->preds) > 0)
4618             /* Since the DEST node has been visited for the first
4619                time, check its successors.  */
4620             stack[sp++] = ei_start (src->preds);
4621           else
4622             post_order[post_order_num++] = src->index;
4623         }
4624       else
4625         {
4626           if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4627             post_order[post_order_num++] = dest->index;
4628 
4629           if (!ei_one_before_end_p (ei))
4630             ei_next (&stack[sp - 1]);
4631           else
4632             sp--;
4633         }
4634     }
4635 
4636   if (include_entry_exit)
4637     {
4638       post_order[post_order_num++] = ENTRY_BLOCK;
4639       count = post_order_num;
4640     }
4641   else
4642     count = post_order_num + 2;
4643 
4644   free (stack);
4645   sbitmap_free (visited);
4646   return post_order_num;
4647 }
4648 
4649 
4650 /* Initialize data structures used by PRE.  */
4651 
4652 static void
4653 init_pre (bool do_fre)
4654 {
4655   basic_block bb;
4656 
4657   next_expression_id = 1;
4658   expressions = NULL;
4659   VEC_safe_push (pre_expr, heap, expressions, NULL);
4660   value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4661   VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4662 			 get_max_value_id() + 1);
4663   name_to_id = NULL;
4664 
4665   in_fre = do_fre;
4666 
4667   inserted_exprs = NULL;
4668   need_creation = NULL;
4669   pretemp = NULL_TREE;
4670   storetemp = NULL_TREE;
4671   prephitemp = NULL_TREE;
4672 
4673   connect_infinite_loops_to_exit ();
4674   memset (&pre_stats, 0, sizeof (pre_stats));
4675 
4676 
4677   postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4678   my_rev_post_order_compute (postorder, false);
4679 
4680   FOR_ALL_BB (bb)
4681     bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4682 
4683   calculate_dominance_info (CDI_POST_DOMINATORS);
4684   calculate_dominance_info (CDI_DOMINATORS);
4685 
4686   bitmap_obstack_initialize (&grand_bitmap_obstack);
4687   inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
4688   phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4689 				     expr_pred_trans_eq, free);
4690   expression_to_id = htab_create (num_ssa_names * 3,
4691 				  pre_expr_hash,
4692 				  pre_expr_eq, NULL);
4693   bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4694 				       sizeof (struct bitmap_set), 30);
4695   pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4696 				     sizeof (struct pre_expr_d), 30);
4697   FOR_ALL_BB (bb)
4698     {
4699       EXP_GEN (bb) = bitmap_set_new ();
4700       PHI_GEN (bb) = bitmap_set_new ();
4701       TMP_GEN (bb) = bitmap_set_new ();
4702       AVAIL_OUT (bb) = bitmap_set_new ();
4703     }
4704 
4705   need_eh_cleanup = BITMAP_ALLOC (NULL);
4706 }
4707 
4708 
4709 /* Deallocate data structures used by PRE.  */
4710 
4711 static void
4712 fini_pre (bool do_fre)
4713 {
4714   basic_block bb;
4715 
4716   free (postorder);
4717   VEC_free (bitmap_set_t, heap, value_expressions);
4718   VEC_free (gimple, heap, inserted_exprs);
4719   VEC_free (gimple, heap, need_creation);
4720   bitmap_obstack_release (&grand_bitmap_obstack);
4721   free_alloc_pool (bitmap_set_pool);
4722   free_alloc_pool (pre_expr_pool);
4723   htab_delete (phi_translate_table);
4724   htab_delete (expression_to_id);
4725   VEC_free (unsigned, heap, name_to_id);
4726 
4727   FOR_ALL_BB (bb)
4728     {
4729       free (bb->aux);
4730       bb->aux = NULL;
4731     }
4732 
4733   free_dominance_info (CDI_POST_DOMINATORS);
4734 
4735   if (!bitmap_empty_p (need_eh_cleanup))
4736     {
4737       gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4738       cleanup_tree_cfg ();
4739     }
4740 
4741   BITMAP_FREE (need_eh_cleanup);
4742 
4743   if (!do_fre)
4744     loop_optimizer_finalize ();
4745 }
4746 
4747 /* Main entry point to the SSA-PRE pass.  DO_FRE is true if the caller
4748    only wants to do full redundancy elimination.  */
4749 
4750 static unsigned int
4751 execute_pre (bool do_fre)
4752 {
4753   unsigned int todo = 0;
4754 
4755   do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4756 
4757   /* This has to happen before SCCVN runs because
4758      loop_optimizer_init may create new phis, etc.  */
4759   if (!do_fre)
4760     loop_optimizer_init (LOOPS_NORMAL);
4761 
4762   if (!run_scc_vn (do_fre, do_fre ? VN_WALKREWRITE : VN_WALK))
4763     {
4764       if (!do_fre)
4765 	{
4766 	  remove_dead_inserted_code ();
4767 	  loop_optimizer_finalize ();
4768 	}
4769 
4770       return 0;
4771     }
4772   init_pre (do_fre);
4773   scev_initialize ();
4774 
4775 
4776   /* Collect and value number expressions computed in each basic block.  */
4777   compute_avail ();
4778 
4779   if (dump_file && (dump_flags & TDF_DETAILS))
4780     {
4781       basic_block bb;
4782 
4783       FOR_ALL_BB (bb)
4784 	{
4785 	  print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4786 	  print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4787 	  print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4788 	  print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4789 	}
4790     }
4791 
4792   /* Insert can get quite slow on an incredibly large number of basic
4793      blocks due to some quadratic behavior.  Until this behavior is
4794      fixed, don't run it when he have an incredibly large number of
4795      bb's.  If we aren't going to run insert, there is no point in
4796      computing ANTIC, either, even though it's plenty fast.  */
4797   if (!do_fre && n_basic_blocks < 4000)
4798     {
4799       compute_antic ();
4800       insert ();
4801     }
4802 
4803   /* Remove all the redundant expressions.  */
4804   todo |= eliminate ();
4805 
4806   statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4807   statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4808   statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4809   statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4810   statistics_counter_event (cfun, "Constified", pre_stats.constified);
4811 
4812   /* Make sure to remove fake edges before committing our inserts.
4813      This makes sure we don't end up with extra critical edges that
4814      we would need to split.  */
4815   remove_fake_exit_edges ();
4816   gsi_commit_edge_inserts ();
4817 
4818   clear_expression_ids ();
4819   free_scc_vn ();
4820   if (!do_fre)
4821     remove_dead_inserted_code ();
4822 
4823   scev_finalize ();
4824   fini_pre (do_fre);
4825 
4826   return todo;
4827 }
4828 
4829 /* Gate and execute functions for PRE.  */
4830 
4831 static unsigned int
4832 do_pre (void)
4833 {
4834   return execute_pre (false);
4835 }
4836 
4837 static bool
4838 gate_pre (void)
4839 {
4840   return flag_tree_pre != 0;
4841 }
4842 
4843 struct gimple_opt_pass pass_pre =
4844 {
4845  {
4846   GIMPLE_PASS,
4847   "pre",				/* name */
4848   gate_pre,				/* gate */
4849   do_pre,				/* execute */
4850   NULL,					/* sub */
4851   NULL,					/* next */
4852   0,					/* static_pass_number */
4853   TV_TREE_PRE,				/* tv_id */
4854   PROP_no_crit_edges | PROP_cfg
4855     | PROP_ssa,				/* properties_required */
4856   0,					/* properties_provided */
4857   0,					/* properties_destroyed */
4858   TODO_rebuild_alias,			/* todo_flags_start */
4859   TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4860   | TODO_verify_ssa /* todo_flags_finish */
4861  }
4862 };
4863 
4864 
4865 /* Gate and execute functions for FRE.  */
4866 
4867 static unsigned int
4868 execute_fre (void)
4869 {
4870   return execute_pre (true);
4871 }
4872 
4873 static bool
4874 gate_fre (void)
4875 {
4876   return flag_tree_fre != 0;
4877 }
4878 
4879 struct gimple_opt_pass pass_fre =
4880 {
4881  {
4882   GIMPLE_PASS,
4883   "fre",				/* name */
4884   gate_fre,				/* gate */
4885   execute_fre,				/* execute */
4886   NULL,					/* sub */
4887   NULL,					/* next */
4888   0,					/* static_pass_number */
4889   TV_TREE_FRE,				/* tv_id */
4890   PROP_cfg | PROP_ssa,			/* properties_required */
4891   0,					/* properties_provided */
4892   0,					/* properties_destroyed */
4893   0,					/* todo_flags_start */
4894   TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
4895  }
4896 };
4897