xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-pre.c (revision d90047b5d07facf36e6c01dcc0bded8997ce9cc2)
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2    Copyright (C) 2001-2017 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4    <stevenb@suse.de>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa-loop.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-ssa.h"
46 #include "cfgloop.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
49 #include "params.h"
50 #include "dbgcnt.h"
51 #include "domwalk.h"
52 #include "tree-ssa-propagate.h"
53 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
55 #include "langhooks.h"
56 #include "alias.h"
57 
58 /* Even though this file is called tree-ssa-pre.c, we actually
59    implement a bit more than just PRE here.  All of them piggy-back
60    on GVN which is implemented in tree-ssa-sccvn.c.
61 
62      1. Full Redundancy Elimination (FRE)
63 	This is the elimination phase of GVN.
64 
65      2. Partial Redundancy Elimination (PRE)
66 	This is adds computation of AVAIL_OUT and ANTIC_IN and
67 	doing expression insertion to form GVN-PRE.
68 
69      3. Code hoisting
70 	This optimization uses the ANTIC_IN sets computed for PRE
71 	to move expressions further up than PRE would do, to make
72 	multiple computations of the same value fully redundant.
73 	This pass is explained below (after the explanation of the
74 	basic algorithm for PRE).
75 */
76 
77 /* TODO:
78 
79    1. Avail sets can be shared by making an avail_find_leader that
80       walks up the dominator tree and looks in those avail sets.
81       This might affect code optimality, it's unclear right now.
82       Currently the AVAIL_OUT sets are the remaining quadraticness in
83       memory of GVN-PRE.
84    2. Strength reduction can be performed by anticipating expressions
85       we can repair later on.
86    3. We can do back-substitution or smarter value numbering to catch
87       commutative expressions split up over multiple statements.
88 */
89 
90 /* For ease of terminology, "expression node" in the below refers to
91    every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
92    represent the actual statement containing the expressions we care about,
93    and we cache the value number by putting it in the expression.  */
94 
95 /* Basic algorithm for Partial Redundancy Elimination:
96 
97    First we walk the statements to generate the AVAIL sets, the
98    EXP_GEN sets, and the tmp_gen sets.  EXP_GEN sets represent the
99    generation of values/expressions by a given block.  We use them
100    when computing the ANTIC sets.  The AVAIL sets consist of
101    SSA_NAME's that represent values, so we know what values are
102    available in what blocks.  AVAIL is a forward dataflow problem.  In
103    SSA, values are never killed, so we don't need a kill set, or a
104    fixpoint iteration, in order to calculate the AVAIL sets.  In
105    traditional parlance, AVAIL sets tell us the downsafety of the
106    expressions/values.
107 
108    Next, we generate the ANTIC sets.  These sets represent the
109    anticipatable expressions.  ANTIC is a backwards dataflow
110    problem.  An expression is anticipatable in a given block if it could
111    be generated in that block.  This means that if we had to perform
112    an insertion in that block, of the value of that expression, we
113    could.  Calculating the ANTIC sets requires phi translation of
114    expressions, because the flow goes backwards through phis.  We must
115    iterate to a fixpoint of the ANTIC sets, because we have a kill
116    set.  Even in SSA form, values are not live over the entire
117    function, only from their definition point onwards.  So we have to
118    remove values from the ANTIC set once we go past the definition
119    point of the leaders that make them up.
120    compute_antic/compute_antic_aux performs this computation.
121 
122    Third, we perform insertions to make partially redundant
123    expressions fully redundant.
124 
125    An expression is partially redundant (excluding partial
126    anticipation) if:
127 
128    1. It is AVAIL in some, but not all, of the predecessors of a
129       given block.
130    2. It is ANTIC in all the predecessors.
131 
132    In order to make it fully redundant, we insert the expression into
133    the predecessors where it is not available, but is ANTIC.
134 
135    When optimizing for size, we only eliminate the partial redundancy
136    if we need to insert in only one predecessor.  This avoids almost
137    completely the code size increase that PRE usually causes.
138 
139    For the partial anticipation case, we only perform insertion if it
140    is partially anticipated in some block, and fully available in all
141    of the predecessors.
142 
143    do_pre_regular_insertion/do_pre_partial_partial_insertion
144    performs these steps, driven by insert/insert_aux.
145 
146    Fourth, we eliminate fully redundant expressions.
147    This is a simple statement walk that replaces redundant
148    calculations with the now available values.  */
149 
150 /* Basic algorithm for Code Hoisting:
151 
152    Code hoisting is: Moving value computations up in the control flow
153    graph to make multiple copies redundant.  Typically this is a size
154    optimization, but there are cases where it also is helpful for speed.
155 
156    A simple code hoisting algorithm is implemented that piggy-backs on
157    the PRE infrastructure.  For code hoisting, we have to know ANTIC_OUT
158    which is effectively ANTIC_IN - AVAIL_OUT.  The latter two have to be
159    computed for PRE, and we can use them to perform a limited version of
160    code hoisting, too.
161 
162    For the purpose of this implementation, a value is hoistable to a basic
163    block B if the following properties are met:
164 
165    1. The value is in ANTIC_IN(B) -- the value will be computed on all
166       paths from B to function exit and it can be computed in B);
167 
168    2. The value is not in AVAIL_OUT(B) -- there would be no need to
169       compute the value again and make it available twice;
170 
171    3. All successors of B are dominated by B -- makes sure that inserting
172       a computation of the value in B will make the remaining
173       computations fully redundant;
174 
175    4. At least one successor has the value in AVAIL_OUT -- to avoid
176       hoisting values up too far;
177 
178    5. There are at least two successors of B -- hoisting in straight
179       line code is pointless.
180 
181    The third condition is not strictly necessary, but it would complicate
182    the hoisting pass a lot.  In fact, I don't know of any code hoisting
183    algorithm that does not have this requirement.  Fortunately, experiments
184    have show that most candidate hoistable values are in regions that meet
185    this condition (e.g. diamond-shape regions).
186 
187    The forth condition is necessary to avoid hoisting things up too far
188    away from the uses of the value.  Nothing else limits the algorithm
189    from hoisting everything up as far as ANTIC_IN allows.  Experiments
190    with SPEC and CSiBE have shown that hoisting up too far results in more
191    spilling, less benefits for code size, and worse benchmark scores.
192    Fortunately, in practice most of the interesting hoisting opportunities
193    are caught despite this limitation.
194 
195    For hoistable values that meet all conditions, expressions are inserted
196    to make the calculation of the hoistable value fully redundant.  We
197    perform code hoisting insertions after each round of PRE insertions,
198    because code hoisting never exposes new PRE opportunities, but PRE can
199    create new code hoisting opportunities.
200 
201    The code hoisting algorithm is implemented in do_hoist_insert, driven
202    by insert/insert_aux.  */
203 
204 /* Representations of value numbers:
205 
206    Value numbers are represented by a representative SSA_NAME.  We
207    will create fake SSA_NAME's in situations where we need a
208    representative but do not have one (because it is a complex
209    expression).  In order to facilitate storing the value numbers in
210    bitmaps, and keep the number of wasted SSA_NAME's down, we also
211    associate a value_id with each value number, and create full blown
212    ssa_name's only where we actually need them (IE in operands of
213    existing expressions).
214 
215    Theoretically you could replace all the value_id's with
216    SSA_NAME_VERSION, but this would allocate a large number of
217    SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
218    It would also require an additional indirection at each point we
219    use the value id.  */
220 
221 /* Representation of expressions on value numbers:
222 
223    Expressions consisting of value numbers are represented the same
224    way as our VN internally represents them, with an additional
225    "pre_expr" wrapping around them in order to facilitate storing all
226    of the expressions in the same sets.  */
227 
228 /* Representation of sets:
229 
230    The dataflow sets do not need to be sorted in any particular order
231    for the majority of their lifetime, are simply represented as two
232    bitmaps, one that keeps track of values present in the set, and one
233    that keeps track of expressions present in the set.
234 
235    When we need them in topological order, we produce it on demand by
236    transforming the bitmap into an array and sorting it into topo
237    order.  */
238 
239 /* Type of expression, used to know which member of the PRE_EXPR union
240    is valid.  */
241 
242 enum pre_expr_kind
243 {
244     NAME,
245     NARY,
246     REFERENCE,
247     CONSTANT
248 };
249 
250 union pre_expr_union
251 {
252   tree name;
253   tree constant;
254   vn_nary_op_t nary;
255   vn_reference_t reference;
256 };
257 
258 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
259 {
260   enum pre_expr_kind kind;
261   unsigned int id;
262   pre_expr_union u;
263 
264   /* hash_table support.  */
265   static inline hashval_t hash (const pre_expr_d *);
266   static inline int equal (const pre_expr_d *, const pre_expr_d *);
267 } *pre_expr;
268 
269 #define PRE_EXPR_NAME(e) (e)->u.name
270 #define PRE_EXPR_NARY(e) (e)->u.nary
271 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
272 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
273 
274 /* Compare E1 and E1 for equality.  */
275 
276 inline int
277 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
278 {
279   if (e1->kind != e2->kind)
280     return false;
281 
282   switch (e1->kind)
283     {
284     case CONSTANT:
285       return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
286 				       PRE_EXPR_CONSTANT (e2));
287     case NAME:
288       return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
289     case NARY:
290       return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
291     case REFERENCE:
292       return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
293 			      PRE_EXPR_REFERENCE (e2));
294     default:
295       gcc_unreachable ();
296     }
297 }
298 
299 /* Hash E.  */
300 
301 inline hashval_t
302 pre_expr_d::hash (const pre_expr_d *e)
303 {
304   switch (e->kind)
305     {
306     case CONSTANT:
307       return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
308     case NAME:
309       return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
310     case NARY:
311       return PRE_EXPR_NARY (e)->hashcode;
312     case REFERENCE:
313       return PRE_EXPR_REFERENCE (e)->hashcode;
314     default:
315       gcc_unreachable ();
316     }
317 }
318 
319 /* Next global expression id number.  */
320 static unsigned int next_expression_id;
321 
322 /* Mapping from expression to id number we can use in bitmap sets.  */
323 static vec<pre_expr> expressions;
324 static hash_table<pre_expr_d> *expression_to_id;
325 static vec<unsigned> name_to_id;
326 
327 /* Allocate an expression id for EXPR.  */
328 
329 static inline unsigned int
330 alloc_expression_id (pre_expr expr)
331 {
332   struct pre_expr_d **slot;
333   /* Make sure we won't overflow. */
334   gcc_assert (next_expression_id + 1 > next_expression_id);
335   expr->id = next_expression_id++;
336   expressions.safe_push (expr);
337   if (expr->kind == NAME)
338     {
339       unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
340       /* vec::safe_grow_cleared allocates no headroom.  Avoid frequent
341 	 re-allocations by using vec::reserve upfront.  */
342       unsigned old_len = name_to_id.length ();
343       name_to_id.reserve (num_ssa_names - old_len);
344       name_to_id.quick_grow_cleared (num_ssa_names);
345       gcc_assert (name_to_id[version] == 0);
346       name_to_id[version] = expr->id;
347     }
348   else
349     {
350       slot = expression_to_id->find_slot (expr, INSERT);
351       gcc_assert (!*slot);
352       *slot = expr;
353     }
354   return next_expression_id - 1;
355 }
356 
357 /* Return the expression id for tree EXPR.  */
358 
359 static inline unsigned int
360 get_expression_id (const pre_expr expr)
361 {
362   return expr->id;
363 }
364 
365 static inline unsigned int
366 lookup_expression_id (const pre_expr expr)
367 {
368   struct pre_expr_d **slot;
369 
370   if (expr->kind == NAME)
371     {
372       unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
373       if (name_to_id.length () <= version)
374 	return 0;
375       return name_to_id[version];
376     }
377   else
378     {
379       slot = expression_to_id->find_slot (expr, NO_INSERT);
380       if (!slot)
381 	return 0;
382       return ((pre_expr)*slot)->id;
383     }
384 }
385 
386 /* Return the existing expression id for EXPR, or create one if one
387    does not exist yet.  */
388 
389 static inline unsigned int
390 get_or_alloc_expression_id (pre_expr expr)
391 {
392   unsigned int id = lookup_expression_id (expr);
393   if (id == 0)
394     return alloc_expression_id (expr);
395   return expr->id = id;
396 }
397 
398 /* Return the expression that has expression id ID */
399 
400 static inline pre_expr
401 expression_for_id (unsigned int id)
402 {
403   return expressions[id];
404 }
405 
406 /* Free the expression id field in all of our expressions,
407    and then destroy the expressions array.  */
408 
409 static void
410 clear_expression_ids (void)
411 {
412   expressions.release ();
413 }
414 
415 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
416 
417 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it.  */
418 
419 static pre_expr
420 get_or_alloc_expr_for_name (tree name)
421 {
422   struct pre_expr_d expr;
423   pre_expr result;
424   unsigned int result_id;
425 
426   expr.kind = NAME;
427   expr.id = 0;
428   PRE_EXPR_NAME (&expr) = name;
429   result_id = lookup_expression_id (&expr);
430   if (result_id != 0)
431     return expression_for_id (result_id);
432 
433   result = pre_expr_pool.allocate ();
434   result->kind = NAME;
435   PRE_EXPR_NAME (result) = name;
436   alloc_expression_id (result);
437   return result;
438 }
439 
440 /* An unordered bitmap set.  One bitmap tracks values, the other,
441    expressions.  */
442 typedef struct bitmap_set
443 {
444   bitmap_head expressions;
445   bitmap_head values;
446 } *bitmap_set_t;
447 
448 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi)		\
449   EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
450 
451 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi)		\
452   EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
453 
454 /* Mapping from value id to expressions with that value_id.  */
455 static vec<bitmap> value_expressions;
456 
457 /* Sets that we need to keep track of.  */
458 typedef struct bb_bitmap_sets
459 {
460   /* The EXP_GEN set, which represents expressions/values generated in
461      a basic block.  */
462   bitmap_set_t exp_gen;
463 
464   /* The PHI_GEN set, which represents PHI results generated in a
465      basic block.  */
466   bitmap_set_t phi_gen;
467 
468   /* The TMP_GEN set, which represents results/temporaries generated
469      in a basic block. IE the LHS of an expression.  */
470   bitmap_set_t tmp_gen;
471 
472   /* The AVAIL_OUT set, which represents which values are available in
473      a given basic block.  */
474   bitmap_set_t avail_out;
475 
476   /* The ANTIC_IN set, which represents which values are anticipatable
477      in a given basic block.  */
478   bitmap_set_t antic_in;
479 
480   /* The PA_IN set, which represents which values are
481      partially anticipatable in a given basic block.  */
482   bitmap_set_t pa_in;
483 
484   /* The NEW_SETS set, which is used during insertion to augment the
485      AVAIL_OUT set of blocks with the new insertions performed during
486      the current iteration.  */
487   bitmap_set_t new_sets;
488 
489   /* A cache for value_dies_in_block_x.  */
490   bitmap expr_dies;
491 
492   /* The live virtual operand on successor edges.  */
493   tree vop_on_exit;
494 
495   /* True if we have visited this block during ANTIC calculation.  */
496   unsigned int visited : 1;
497 
498   /* True when the block contains a call that might not return.  */
499   unsigned int contains_may_not_return_call : 1;
500 } *bb_value_sets_t;
501 
502 #define EXP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->exp_gen
503 #define PHI_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->phi_gen
504 #define TMP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->tmp_gen
505 #define AVAIL_OUT(BB)	((bb_value_sets_t) ((BB)->aux))->avail_out
506 #define ANTIC_IN(BB)	((bb_value_sets_t) ((BB)->aux))->antic_in
507 #define PA_IN(BB)	((bb_value_sets_t) ((BB)->aux))->pa_in
508 #define NEW_SETS(BB)	((bb_value_sets_t) ((BB)->aux))->new_sets
509 #define EXPR_DIES(BB)	((bb_value_sets_t) ((BB)->aux))->expr_dies
510 #define BB_VISITED(BB)	((bb_value_sets_t) ((BB)->aux))->visited
511 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
512 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
513 
514 
515 /* This structure is used to keep track of statistics on what
516    optimization PRE was able to perform.  */
517 static struct
518 {
519   /* The number of RHS computations eliminated by PRE.  */
520   int eliminations;
521 
522   /* The number of new expressions/temporaries generated by PRE.  */
523   int insertions;
524 
525   /* The number of inserts found due to partial anticipation  */
526   int pa_insert;
527 
528   /* The number of inserts made for code hoisting.  */
529   int hoist_insert;
530 
531   /* The number of new PHI nodes added by PRE.  */
532   int phis;
533 } pre_stats;
534 
535 static bool do_partial_partial;
536 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
537 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
538 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
539 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
540 static void bitmap_set_and (bitmap_set_t, bitmap_set_t);
541 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
542 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
543 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
544 				      unsigned int, bool);
545 static bitmap_set_t bitmap_set_new (void);
546 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
547 					 tree);
548 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
549 static unsigned int get_expr_value_id (pre_expr);
550 
551 /* We can add and remove elements and entries to and from sets
552    and hash tables, so we use alloc pools for them.  */
553 
554 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
555 static bitmap_obstack grand_bitmap_obstack;
556 
557 /* Set of blocks with statements that have had their EH properties changed.  */
558 static bitmap need_eh_cleanup;
559 
560 /* Set of blocks with statements that have had their AB properties changed.  */
561 static bitmap need_ab_cleanup;
562 
563 /* A three tuple {e, pred, v} used to cache phi translations in the
564    phi_translate_table.  */
565 
566 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
567 {
568   /* The expression.  */
569   pre_expr e;
570 
571   /* The predecessor block along which we translated the expression.  */
572   basic_block pred;
573 
574   /* The value that resulted from the translation.  */
575   pre_expr v;
576 
577   /* The hashcode for the expression, pred pair. This is cached for
578      speed reasons.  */
579   hashval_t hashcode;
580 
581   /* hash_table support.  */
582   static inline hashval_t hash (const expr_pred_trans_d *);
583   static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
584 } *expr_pred_trans_t;
585 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
586 
587 inline hashval_t
588 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
589 {
590   return e->hashcode;
591 }
592 
593 inline int
594 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
595 			  const expr_pred_trans_d *ve2)
596 {
597   basic_block b1 = ve1->pred;
598   basic_block b2 = ve2->pred;
599 
600   /* If they are not translations for the same basic block, they can't
601      be equal.  */
602   if (b1 != b2)
603     return false;
604   return pre_expr_d::equal (ve1->e, ve2->e);
605 }
606 
607 /* The phi_translate_table caches phi translations for a given
608    expression and predecessor.  */
609 static hash_table<expr_pred_trans_d> *phi_translate_table;
610 
611 /* Add the tuple mapping from {expression E, basic block PRED} to
612    the phi translation table and return whether it pre-existed.  */
613 
614 static inline bool
615 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
616 {
617   expr_pred_trans_t *slot;
618   expr_pred_trans_d tem;
619   hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
620 					     pred->index);
621   tem.e = e;
622   tem.pred = pred;
623   tem.hashcode = hash;
624   slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
625   if (*slot)
626     {
627       *entry = *slot;
628       return true;
629     }
630 
631   *entry = *slot = XNEW (struct expr_pred_trans_d);
632   (*entry)->e = e;
633   (*entry)->pred = pred;
634   (*entry)->hashcode = hash;
635   return false;
636 }
637 
638 
639 /* Add expression E to the expression set of value id V.  */
640 
641 static void
642 add_to_value (unsigned int v, pre_expr e)
643 {
644   bitmap set;
645 
646   gcc_checking_assert (get_expr_value_id (e) == v);
647 
648   if (v >= value_expressions.length ())
649     {
650       value_expressions.safe_grow_cleared (v + 1);
651     }
652 
653   set = value_expressions[v];
654   if (!set)
655     {
656       set = BITMAP_ALLOC (&grand_bitmap_obstack);
657       value_expressions[v] = set;
658     }
659 
660   bitmap_set_bit (set, get_or_alloc_expression_id (e));
661 }
662 
663 /* Create a new bitmap set and return it.  */
664 
665 static bitmap_set_t
666 bitmap_set_new (void)
667 {
668   bitmap_set_t ret = bitmap_set_pool.allocate ();
669   bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
670   bitmap_initialize (&ret->values, &grand_bitmap_obstack);
671   return ret;
672 }
673 
674 /* Return the value id for a PRE expression EXPR.  */
675 
676 static unsigned int
677 get_expr_value_id (pre_expr expr)
678 {
679   unsigned int id;
680   switch (expr->kind)
681     {
682     case CONSTANT:
683       id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
684       break;
685     case NAME:
686       id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
687       break;
688     case NARY:
689       id = PRE_EXPR_NARY (expr)->value_id;
690       break;
691     case REFERENCE:
692       id = PRE_EXPR_REFERENCE (expr)->value_id;
693       break;
694     default:
695       gcc_unreachable ();
696     }
697   /* ???  We cannot assert that expr has a value-id (it can be 0), because
698      we assign value-ids only to expressions that have a result
699      in set_hashtable_value_ids.  */
700   return id;
701 }
702 
703 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL.  */
704 
705 static tree
706 sccvn_valnum_from_value_id (unsigned int val)
707 {
708   bitmap_iterator bi;
709   unsigned int i;
710   bitmap exprset = value_expressions[val];
711   EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
712     {
713       pre_expr vexpr = expression_for_id (i);
714       if (vexpr->kind == NAME)
715 	return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
716       else if (vexpr->kind == CONSTANT)
717 	return PRE_EXPR_CONSTANT (vexpr);
718     }
719   return NULL_TREE;
720 }
721 
722 /* Remove an expression EXPR from a bitmapped set.  */
723 
724 static void
725 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
726 {
727   unsigned int val  = get_expr_value_id (expr);
728   if (!value_id_constant_p (val))
729     {
730       bitmap_clear_bit (&set->values, val);
731       bitmap_clear_bit (&set->expressions, get_expression_id (expr));
732     }
733 }
734 
735 static void
736 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
737 			  unsigned int val, bool allow_constants)
738 {
739   if (allow_constants || !value_id_constant_p (val))
740     {
741       /* We specifically expect this and only this function to be able to
742 	 insert constants into a set.  */
743       bitmap_set_bit (&set->values, val);
744       bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
745     }
746 }
747 
748 /* Insert an expression EXPR into a bitmapped set.  */
749 
750 static void
751 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
752 {
753   bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
754 }
755 
756 /* Copy a bitmapped set ORIG, into bitmapped set DEST.  */
757 
758 static void
759 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
760 {
761   bitmap_copy (&dest->expressions, &orig->expressions);
762   bitmap_copy (&dest->values, &orig->values);
763 }
764 
765 
766 /* Free memory used up by SET.  */
767 static void
768 bitmap_set_free (bitmap_set_t set)
769 {
770   bitmap_clear (&set->expressions);
771   bitmap_clear (&set->values);
772 }
773 
774 
775 /* Generate an topological-ordered array of bitmap set SET.  */
776 
777 static vec<pre_expr>
778 sorted_array_from_bitmap_set (bitmap_set_t set)
779 {
780   unsigned int i, j;
781   bitmap_iterator bi, bj;
782   vec<pre_expr> result;
783 
784   /* Pre-allocate enough space for the array.  */
785   result.create (bitmap_count_bits (&set->expressions));
786 
787   FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
788     {
789       /* The number of expressions having a given value is usually
790 	 relatively small.  Thus, rather than making a vector of all
791 	 the expressions and sorting it by value-id, we walk the values
792 	 and check in the reverse mapping that tells us what expressions
793 	 have a given value, to filter those in our set.  As a result,
794 	 the expressions are inserted in value-id order, which means
795 	 topological order.
796 
797 	 If this is somehow a significant lose for some cases, we can
798 	 choose which set to walk based on the set size.  */
799       bitmap exprset = value_expressions[i];
800       EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
801 	{
802 	  if (bitmap_bit_p (&set->expressions, j))
803 	    result.quick_push (expression_for_id (j));
804         }
805     }
806 
807   return result;
808 }
809 
810 /* Perform bitmapped set operation DEST &= ORIG.  */
811 
812 static void
813 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
814 {
815   bitmap_iterator bi;
816   unsigned int i;
817 
818   if (dest != orig)
819     {
820       bitmap_head temp;
821       bitmap_initialize (&temp, &grand_bitmap_obstack);
822 
823       bitmap_and_into (&dest->values, &orig->values);
824       bitmap_copy (&temp, &dest->expressions);
825       EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
826 	{
827 	  pre_expr expr = expression_for_id (i);
828 	  unsigned int value_id = get_expr_value_id (expr);
829 	  if (!bitmap_bit_p (&dest->values, value_id))
830 	    bitmap_clear_bit (&dest->expressions, i);
831 	}
832       bitmap_clear (&temp);
833     }
834 }
835 
836 /* Subtract all values and expressions contained in ORIG from DEST.  */
837 
838 static bitmap_set_t
839 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
840 {
841   bitmap_set_t result = bitmap_set_new ();
842   bitmap_iterator bi;
843   unsigned int i;
844 
845   bitmap_and_compl (&result->expressions, &dest->expressions,
846 		    &orig->expressions);
847 
848   FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
849     {
850       pre_expr expr = expression_for_id (i);
851       unsigned int value_id = get_expr_value_id (expr);
852       bitmap_set_bit (&result->values, value_id);
853     }
854 
855   return result;
856 }
857 
858 /* Subtract all the values in bitmap set B from bitmap set A.  */
859 
860 static void
861 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
862 {
863   unsigned int i;
864   bitmap_iterator bi;
865   bitmap_head temp;
866 
867   bitmap_initialize (&temp, &grand_bitmap_obstack);
868 
869   bitmap_copy (&temp, &a->expressions);
870   EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
871     {
872       pre_expr expr = expression_for_id (i);
873       if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
874 	bitmap_remove_from_set (a, expr);
875     }
876   bitmap_clear (&temp);
877 }
878 
879 
880 /* Return true if bitmapped set SET contains the value VALUE_ID.  */
881 
882 static bool
883 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
884 {
885   if (value_id_constant_p (value_id))
886     return true;
887 
888   if (!set || bitmap_empty_p (&set->expressions))
889     return false;
890 
891   return bitmap_bit_p (&set->values, value_id);
892 }
893 
894 static inline bool
895 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
896 {
897   return bitmap_bit_p (&set->expressions, get_expression_id (expr));
898 }
899 
900 /* Replace an instance of value LOOKFOR with expression EXPR in SET.  */
901 
902 static void
903 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
904 			  const pre_expr expr)
905 {
906   bitmap exprset;
907   unsigned int i;
908   bitmap_iterator bi;
909 
910   if (value_id_constant_p (lookfor))
911     return;
912 
913   if (!bitmap_set_contains_value (set, lookfor))
914     return;
915 
916   /* The number of expressions having a given value is usually
917      significantly less than the total number of expressions in SET.
918      Thus, rather than check, for each expression in SET, whether it
919      has the value LOOKFOR, we walk the reverse mapping that tells us
920      what expressions have a given value, and see if any of those
921      expressions are in our set.  For large testcases, this is about
922      5-10x faster than walking the bitmap.  If this is somehow a
923      significant lose for some cases, we can choose which set to walk
924      based on the set size.  */
925   exprset = value_expressions[lookfor];
926   EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
927     {
928       if (bitmap_clear_bit (&set->expressions, i))
929 	{
930 	  bitmap_set_bit (&set->expressions, get_expression_id (expr));
931 	  return;
932 	}
933     }
934 
935   gcc_unreachable ();
936 }
937 
938 /* Return true if two bitmap sets are equal.  */
939 
940 static bool
941 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
942 {
943   return bitmap_equal_p (&a->values, &b->values);
944 }
945 
946 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
947    and add it otherwise.  */
948 
949 static void
950 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
951 {
952   unsigned int val = get_expr_value_id (expr);
953 
954   if (bitmap_set_contains_value (set, val))
955     bitmap_set_replace_value (set, val, expr);
956   else
957     bitmap_insert_into_set (set, expr);
958 }
959 
960 /* Insert EXPR into SET if EXPR's value is not already present in
961    SET.  */
962 
963 static void
964 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
965 {
966   unsigned int val = get_expr_value_id (expr);
967 
968   gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
969 
970   /* Constant values are always considered to be part of the set.  */
971   if (value_id_constant_p (val))
972     return;
973 
974   /* If the value membership changed, add the expression.  */
975   if (bitmap_set_bit (&set->values, val))
976     bitmap_set_bit (&set->expressions, expr->id);
977 }
978 
979 /* Print out EXPR to outfile.  */
980 
981 static void
982 print_pre_expr (FILE *outfile, const pre_expr expr)
983 {
984   switch (expr->kind)
985     {
986     case CONSTANT:
987       print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
988       break;
989     case NAME:
990       print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
991       break;
992     case NARY:
993       {
994 	unsigned int i;
995 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
996 	fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
997 	for (i = 0; i < nary->length; i++)
998 	  {
999 	    print_generic_expr (outfile, nary->op[i], 0);
1000 	    if (i != (unsigned) nary->length - 1)
1001 	      fprintf (outfile, ",");
1002 	  }
1003 	fprintf (outfile, "}");
1004       }
1005       break;
1006 
1007     case REFERENCE:
1008       {
1009 	vn_reference_op_t vro;
1010 	unsigned int i;
1011 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1012 	fprintf (outfile, "{");
1013 	for (i = 0;
1014 	     ref->operands.iterate (i, &vro);
1015 	     i++)
1016 	  {
1017 	    bool closebrace = false;
1018 	    if (vro->opcode != SSA_NAME
1019 		&& TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
1020 	      {
1021 		fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
1022 		if (vro->op0)
1023 		  {
1024 		    fprintf (outfile, "<");
1025 		    closebrace = true;
1026 		  }
1027 	      }
1028 	    if (vro->op0)
1029 	      {
1030 		print_generic_expr (outfile, vro->op0, 0);
1031 		if (vro->op1)
1032 		  {
1033 		    fprintf (outfile, ",");
1034 		    print_generic_expr (outfile, vro->op1, 0);
1035 		  }
1036 		if (vro->op2)
1037 		  {
1038 		    fprintf (outfile, ",");
1039 		    print_generic_expr (outfile, vro->op2, 0);
1040 		  }
1041 	      }
1042 	    if (closebrace)
1043 		fprintf (outfile, ">");
1044 	    if (i != ref->operands.length () - 1)
1045 	      fprintf (outfile, ",");
1046 	  }
1047 	fprintf (outfile, "}");
1048 	if (ref->vuse)
1049 	  {
1050 	    fprintf (outfile, "@");
1051 	    print_generic_expr (outfile, ref->vuse, 0);
1052 	  }
1053       }
1054       break;
1055     }
1056 }
1057 void debug_pre_expr (pre_expr);
1058 
1059 /* Like print_pre_expr but always prints to stderr.  */
1060 DEBUG_FUNCTION void
1061 debug_pre_expr (pre_expr e)
1062 {
1063   print_pre_expr (stderr, e);
1064   fprintf (stderr, "\n");
1065 }
1066 
1067 /* Print out SET to OUTFILE.  */
1068 
1069 static void
1070 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1071 		  const char *setname, int blockindex)
1072 {
1073   fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1074   if (set)
1075     {
1076       bool first = true;
1077       unsigned i;
1078       bitmap_iterator bi;
1079 
1080       FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1081 	{
1082 	  const pre_expr expr = expression_for_id (i);
1083 
1084 	  if (!first)
1085 	    fprintf (outfile, ", ");
1086 	  first = false;
1087 	  print_pre_expr (outfile, expr);
1088 
1089 	  fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1090 	}
1091     }
1092   fprintf (outfile, " }\n");
1093 }
1094 
1095 void debug_bitmap_set (bitmap_set_t);
1096 
1097 DEBUG_FUNCTION void
1098 debug_bitmap_set (bitmap_set_t set)
1099 {
1100   print_bitmap_set (stderr, set, "debug", 0);
1101 }
1102 
1103 void debug_bitmap_sets_for (basic_block);
1104 
1105 DEBUG_FUNCTION void
1106 debug_bitmap_sets_for (basic_block bb)
1107 {
1108   print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1109   print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1110   print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1111   print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1112   print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1113   if (do_partial_partial)
1114     print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1115   print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1116 }
1117 
1118 /* Print out the expressions that have VAL to OUTFILE.  */
1119 
1120 static void
1121 print_value_expressions (FILE *outfile, unsigned int val)
1122 {
1123   bitmap set = value_expressions[val];
1124   if (set)
1125     {
1126       bitmap_set x;
1127       char s[10];
1128       sprintf (s, "%04d", val);
1129       x.expressions = *set;
1130       print_bitmap_set (outfile, &x, s, 0);
1131     }
1132 }
1133 
1134 
1135 DEBUG_FUNCTION void
1136 debug_value_expressions (unsigned int val)
1137 {
1138   print_value_expressions (stderr, val);
1139 }
1140 
1141 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1142    represent it.  */
1143 
1144 static pre_expr
1145 get_or_alloc_expr_for_constant (tree constant)
1146 {
1147   unsigned int result_id;
1148   unsigned int value_id;
1149   struct pre_expr_d expr;
1150   pre_expr newexpr;
1151 
1152   expr.kind = CONSTANT;
1153   PRE_EXPR_CONSTANT (&expr) = constant;
1154   result_id = lookup_expression_id (&expr);
1155   if (result_id != 0)
1156     return expression_for_id (result_id);
1157 
1158   newexpr = pre_expr_pool.allocate ();
1159   newexpr->kind = CONSTANT;
1160   PRE_EXPR_CONSTANT (newexpr) = constant;
1161   alloc_expression_id (newexpr);
1162   value_id = get_or_alloc_constant_value_id (constant);
1163   add_to_value (value_id, newexpr);
1164   return newexpr;
1165 }
1166 
1167 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1168    Currently only supports constants and SSA_NAMES.  */
1169 static pre_expr
1170 get_or_alloc_expr_for (tree t)
1171 {
1172   if (TREE_CODE (t) == SSA_NAME)
1173     return get_or_alloc_expr_for_name (t);
1174   else if (is_gimple_min_invariant (t))
1175     return get_or_alloc_expr_for_constant (t);
1176   else
1177     {
1178       /* More complex expressions can result from SCCVN expression
1179 	 simplification that inserts values for them.  As they all
1180 	 do not have VOPs the get handled by the nary ops struct.  */
1181       vn_nary_op_t result;
1182       unsigned int result_id;
1183       vn_nary_op_lookup (t, &result);
1184       if (result != NULL)
1185 	{
1186 	  pre_expr e = pre_expr_pool.allocate ();
1187 	  e->kind = NARY;
1188 	  PRE_EXPR_NARY (e) = result;
1189 	  result_id = lookup_expression_id (e);
1190 	  if (result_id != 0)
1191 	    {
1192 	      pre_expr_pool.remove (e);
1193 	      e = expression_for_id (result_id);
1194 	      return e;
1195 	    }
1196 	  alloc_expression_id (e);
1197 	  return e;
1198 	}
1199     }
1200   return NULL;
1201 }
1202 
1203 /* Return the folded version of T if T, when folded, is a gimple
1204    min_invariant or an SSA name.  Otherwise, return T.  */
1205 
1206 static pre_expr
1207 fully_constant_expression (pre_expr e)
1208 {
1209   switch (e->kind)
1210     {
1211     case CONSTANT:
1212       return e;
1213     case NARY:
1214       {
1215 	vn_nary_op_t nary = PRE_EXPR_NARY (e);
1216 	tree res = vn_nary_simplify (nary);
1217 	if (!res)
1218 	  return e;
1219 	if (is_gimple_min_invariant (res))
1220 	  return get_or_alloc_expr_for_constant (res);
1221 	if (TREE_CODE (res) == SSA_NAME)
1222 	  return get_or_alloc_expr_for_name (res);
1223 	return e;
1224       }
1225     case REFERENCE:
1226       {
1227 	vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1228 	tree folded;
1229 	if ((folded = fully_constant_vn_reference_p (ref)))
1230 	  return get_or_alloc_expr_for_constant (folded);
1231 	return e;
1232       }
1233     default:
1234       return e;
1235     }
1236   return e;
1237 }
1238 
1239 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1240    it has the value it would have in BLOCK.  Set *SAME_VALID to true
1241    in case the new vuse doesn't change the value id of the OPERANDS.  */
1242 
1243 static tree
1244 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1245 			      alias_set_type set, tree type, tree vuse,
1246 			      basic_block phiblock,
1247 			      basic_block block, bool *same_valid)
1248 {
1249   gimple *phi = SSA_NAME_DEF_STMT (vuse);
1250   ao_ref ref;
1251   edge e = NULL;
1252   bool use_oracle;
1253 
1254   *same_valid = true;
1255 
1256   if (gimple_bb (phi) != phiblock)
1257     return vuse;
1258 
1259   use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1260 
1261   /* Use the alias-oracle to find either the PHI node in this block,
1262      the first VUSE used in this block that is equivalent to vuse or
1263      the first VUSE which definition in this block kills the value.  */
1264   if (gimple_code (phi) == GIMPLE_PHI)
1265     e = find_edge (block, phiblock);
1266   else if (use_oracle)
1267     while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1268       {
1269 	vuse = gimple_vuse (phi);
1270 	phi = SSA_NAME_DEF_STMT (vuse);
1271 	if (gimple_bb (phi) != phiblock)
1272 	  return vuse;
1273 	if (gimple_code (phi) == GIMPLE_PHI)
1274 	  {
1275 	    e = find_edge (block, phiblock);
1276 	    break;
1277 	  }
1278       }
1279   else
1280     return NULL_TREE;
1281 
1282   if (e)
1283     {
1284       if (use_oracle)
1285 	{
1286 	  bitmap visited = NULL;
1287 	  unsigned int cnt;
1288 	  /* Try to find a vuse that dominates this phi node by skipping
1289 	     non-clobbering statements.  */
1290 	  vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1291 					   NULL, NULL);
1292 	  if (visited)
1293 	    BITMAP_FREE (visited);
1294 	}
1295       else
1296 	vuse = NULL_TREE;
1297       if (!vuse)
1298 	{
1299 	  /* If we didn't find any, the value ID can't stay the same,
1300 	     but return the translated vuse.  */
1301 	  *same_valid = false;
1302 	  vuse = PHI_ARG_DEF (phi, e->dest_idx);
1303 	}
1304       /* ??? We would like to return vuse here as this is the canonical
1305          upmost vdef that this reference is associated with.  But during
1306 	 insertion of the references into the hash tables we only ever
1307 	 directly insert with their direct gimple_vuse, hence returning
1308 	 something else would make us not find the other expression.  */
1309       return PHI_ARG_DEF (phi, e->dest_idx);
1310     }
1311 
1312   return NULL_TREE;
1313 }
1314 
1315 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1316    SET2.  This is used to avoid making a set consisting of the union
1317    of PA_IN and ANTIC_IN during insert.  */
1318 
1319 static inline pre_expr
1320 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1321 {
1322   pre_expr result;
1323 
1324   result = bitmap_find_leader (set1, val);
1325   if (!result && set2)
1326     result = bitmap_find_leader (set2, val);
1327   return result;
1328 }
1329 
1330 /* Get the tree type for our PRE expression e.  */
1331 
1332 static tree
1333 get_expr_type (const pre_expr e)
1334 {
1335   switch (e->kind)
1336     {
1337     case NAME:
1338       return TREE_TYPE (PRE_EXPR_NAME (e));
1339     case CONSTANT:
1340       return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1341     case REFERENCE:
1342       return PRE_EXPR_REFERENCE (e)->type;
1343     case NARY:
1344       return PRE_EXPR_NARY (e)->type;
1345     }
1346   gcc_unreachable ();
1347 }
1348 
1349 /* Get a representative SSA_NAME for a given expression.
1350    Since all of our sub-expressions are treated as values, we require
1351    them to be SSA_NAME's for simplicity.
1352    Prior versions of GVNPRE used to use "value handles" here, so that
1353    an expression would be VH.11 + VH.10 instead of d_3 + e_6.  In
1354    either case, the operands are really values (IE we do not expect
1355    them to be usable without finding leaders).  */
1356 
1357 static tree
1358 get_representative_for (const pre_expr e)
1359 {
1360   tree name;
1361   unsigned int value_id = get_expr_value_id (e);
1362 
1363   switch (e->kind)
1364     {
1365     case NAME:
1366       return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1367     case CONSTANT:
1368       return PRE_EXPR_CONSTANT (e);
1369     case NARY:
1370     case REFERENCE:
1371       {
1372 	/* Go through all of the expressions representing this value
1373 	   and pick out an SSA_NAME.  */
1374 	unsigned int i;
1375 	bitmap_iterator bi;
1376 	bitmap exprs = value_expressions[value_id];
1377 	EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1378 	  {
1379 	    pre_expr rep = expression_for_id (i);
1380 	    if (rep->kind == NAME)
1381 	      return VN_INFO (PRE_EXPR_NAME (rep))->valnum;
1382 	    else if (rep->kind == CONSTANT)
1383 	      return PRE_EXPR_CONSTANT (rep);
1384 	  }
1385       }
1386       break;
1387     }
1388 
1389   /* If we reached here we couldn't find an SSA_NAME.  This can
1390      happen when we've discovered a value that has never appeared in
1391      the program as set to an SSA_NAME, as the result of phi translation.
1392      Create one here.
1393      ???  We should be able to re-use this when we insert the statement
1394      to compute it.  */
1395   name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1396   VN_INFO_GET (name)->value_id = value_id;
1397   VN_INFO (name)->valnum = name;
1398   /* ???  For now mark this SSA name for release by SCCVN.  */
1399   VN_INFO (name)->needs_insertion = true;
1400   add_to_value (value_id, get_or_alloc_expr_for_name (name));
1401   if (dump_file && (dump_flags & TDF_DETAILS))
1402     {
1403       fprintf (dump_file, "Created SSA_NAME representative ");
1404       print_generic_expr (dump_file, name, 0);
1405       fprintf (dump_file, " for expression:");
1406       print_pre_expr (dump_file, e);
1407       fprintf (dump_file, " (%04d)\n", value_id);
1408     }
1409 
1410   return name;
1411 }
1412 
1413 
1414 
1415 static pre_expr
1416 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1417 	       basic_block pred, basic_block phiblock);
1418 
1419 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1420    the phis in PRED.  Return NULL if we can't find a leader for each part
1421    of the translated expression.  */
1422 
1423 static pre_expr
1424 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1425 		 basic_block pred, basic_block phiblock)
1426 {
1427   switch (expr->kind)
1428     {
1429     case NARY:
1430       {
1431 	unsigned int i;
1432 	bool changed = false;
1433 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1434 	vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1435 					   sizeof_vn_nary_op (nary->length));
1436 	memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1437 
1438 	for (i = 0; i < newnary->length; i++)
1439 	  {
1440 	    if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1441 	      continue;
1442 	    else
1443 	      {
1444                 pre_expr leader, result;
1445 		unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1446 		leader = find_leader_in_sets (op_val_id, set1, set2);
1447                 result = phi_translate (leader, set1, set2, pred, phiblock);
1448 		if (result && result != leader)
1449 		  newnary->op[i] = get_representative_for (result);
1450 		else if (!result)
1451 		  return NULL;
1452 
1453 		changed |= newnary->op[i] != nary->op[i];
1454 	      }
1455 	  }
1456 	if (changed)
1457 	  {
1458 	    pre_expr constant;
1459 	    unsigned int new_val_id;
1460 
1461 	    PRE_EXPR_NARY (expr) = newnary;
1462 	    constant = fully_constant_expression (expr);
1463 	    PRE_EXPR_NARY (expr) = nary;
1464 	    if (constant != expr)
1465 	      {
1466 		/* For non-CONSTANTs we have to make sure we can eventually
1467 		   insert the expression.  Which means we need to have a
1468 		   leader for it.  */
1469 		if (constant->kind != CONSTANT)
1470 		  {
1471 		    /* Do not allow simplifications to non-constants over
1472 		       backedges as this will likely result in a loop PHI node
1473 		       to be inserted and increased register pressure.
1474 		       See PR77498 - this avoids doing predcoms work in
1475 		       a less efficient way.  */
1476 		    if (find_edge (pred, phiblock)->flags & EDGE_DFS_BACK)
1477 		      ;
1478 		    else
1479 		      {
1480 			unsigned value_id = get_expr_value_id (constant);
1481 			constant = find_leader_in_sets (value_id, set1, set2);
1482 			if (constant)
1483 			  return constant;
1484 		      }
1485 		  }
1486 		else
1487 		  return constant;
1488 	      }
1489 
1490 	    tree result = vn_nary_op_lookup_pieces (newnary->length,
1491 						    newnary->opcode,
1492 						    newnary->type,
1493 						    &newnary->op[0],
1494 						    &nary);
1495 	    if (result && is_gimple_min_invariant (result))
1496 	      return get_or_alloc_expr_for_constant (result);
1497 
1498 	    expr = pre_expr_pool.allocate ();
1499 	    expr->kind = NARY;
1500 	    expr->id = 0;
1501 	    if (nary)
1502 	      {
1503 		PRE_EXPR_NARY (expr) = nary;
1504 		new_val_id = nary->value_id;
1505 		get_or_alloc_expression_id (expr);
1506 	      }
1507 	    else
1508 	      {
1509 		new_val_id = get_next_value_id ();
1510 		value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1511 		nary = vn_nary_op_insert_pieces (newnary->length,
1512 						 newnary->opcode,
1513 						 newnary->type,
1514 						 &newnary->op[0],
1515 						 result, new_val_id);
1516 		PRE_EXPR_NARY (expr) = nary;
1517 		get_or_alloc_expression_id (expr);
1518 	      }
1519 	    add_to_value (new_val_id, expr);
1520 	  }
1521 	return expr;
1522       }
1523       break;
1524 
1525     case REFERENCE:
1526       {
1527 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1528 	vec<vn_reference_op_s> operands = ref->operands;
1529 	tree vuse = ref->vuse;
1530 	tree newvuse = vuse;
1531 	vec<vn_reference_op_s> newoperands = vNULL;
1532 	bool changed = false, same_valid = true;
1533 	unsigned int i, n;
1534 	vn_reference_op_t operand;
1535 	vn_reference_t newref;
1536 
1537 	for (i = 0; operands.iterate (i, &operand); i++)
1538 	  {
1539 	    pre_expr opresult;
1540 	    pre_expr leader;
1541 	    tree op[3];
1542 	    tree type = operand->type;
1543 	    vn_reference_op_s newop = *operand;
1544 	    op[0] = operand->op0;
1545 	    op[1] = operand->op1;
1546 	    op[2] = operand->op2;
1547 	    for (n = 0; n < 3; ++n)
1548 	      {
1549 		unsigned int op_val_id;
1550 		if (!op[n])
1551 		  continue;
1552 		if (TREE_CODE (op[n]) != SSA_NAME)
1553 		  {
1554 		    /* We can't possibly insert these.  */
1555 		    if (n != 0
1556 			&& !is_gimple_min_invariant (op[n]))
1557 		      break;
1558 		    continue;
1559 		  }
1560 		op_val_id = VN_INFO (op[n])->value_id;
1561 		leader = find_leader_in_sets (op_val_id, set1, set2);
1562 		opresult = phi_translate (leader, set1, set2, pred, phiblock);
1563 		if (opresult && opresult != leader)
1564 		  {
1565 		    tree name = get_representative_for (opresult);
1566 		    changed |= name != op[n];
1567 		    op[n] = name;
1568 		  }
1569 		else if (!opresult)
1570 		  break;
1571 	      }
1572 	    if (n != 3)
1573 	      {
1574 		newoperands.release ();
1575 		return NULL;
1576 	      }
1577 	    if (!changed)
1578 	      continue;
1579 	    if (!newoperands.exists ())
1580 	      newoperands = operands.copy ();
1581 	    /* We may have changed from an SSA_NAME to a constant */
1582 	    if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1583 	      newop.opcode = TREE_CODE (op[0]);
1584 	    newop.type = type;
1585 	    newop.op0 = op[0];
1586 	    newop.op1 = op[1];
1587 	    newop.op2 = op[2];
1588 	    newoperands[i] = newop;
1589 	  }
1590 	gcc_checking_assert (i == operands.length ());
1591 
1592 	if (vuse)
1593 	  {
1594 	    newvuse = translate_vuse_through_block (newoperands.exists ()
1595 						    ? newoperands : operands,
1596 						    ref->set, ref->type,
1597 						    vuse, phiblock, pred,
1598 						    &same_valid);
1599 	    if (newvuse == NULL_TREE)
1600 	      {
1601 		newoperands.release ();
1602 		return NULL;
1603 	      }
1604 	  }
1605 
1606 	if (changed || newvuse != vuse)
1607 	  {
1608 	    unsigned int new_val_id;
1609 	    pre_expr constant;
1610 
1611 	    tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1612 						      ref->type,
1613 						      newoperands.exists ()
1614 						      ? newoperands : operands,
1615 						      &newref, VN_WALK);
1616 	    if (result)
1617 	      newoperands.release ();
1618 
1619 	    /* We can always insert constants, so if we have a partial
1620 	       redundant constant load of another type try to translate it
1621 	       to a constant of appropriate type.  */
1622 	    if (result && is_gimple_min_invariant (result))
1623 	      {
1624 		tree tem = result;
1625 		if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1626 		  {
1627 		    tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1628 		    if (tem && !is_gimple_min_invariant (tem))
1629 		      tem = NULL_TREE;
1630 		  }
1631 		if (tem)
1632 		  return get_or_alloc_expr_for_constant (tem);
1633 	      }
1634 
1635 	    /* If we'd have to convert things we would need to validate
1636 	       if we can insert the translated expression.  So fail
1637 	       here for now - we cannot insert an alias with a different
1638 	       type in the VN tables either, as that would assert.  */
1639 	    if (result
1640 		&& !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1641 	      return NULL;
1642 	    else if (!result && newref
1643 		     && !useless_type_conversion_p (ref->type, newref->type))
1644 	      {
1645 		newoperands.release ();
1646 		return NULL;
1647 	      }
1648 
1649 	    expr = pre_expr_pool.allocate ();
1650 	    expr->kind = REFERENCE;
1651 	    expr->id = 0;
1652 
1653 	    if (newref)
1654 	      {
1655 		PRE_EXPR_REFERENCE (expr) = newref;
1656 		constant = fully_constant_expression (expr);
1657 		if (constant != expr)
1658 		  return constant;
1659 
1660 		new_val_id = newref->value_id;
1661 		get_or_alloc_expression_id (expr);
1662 	      }
1663 	    else
1664 	      {
1665 		if (changed || !same_valid)
1666 		  {
1667 		    new_val_id = get_next_value_id ();
1668 		    value_expressions.safe_grow_cleared
1669 		      (get_max_value_id () + 1);
1670 		  }
1671 		else
1672 		  new_val_id = ref->value_id;
1673 		if (!newoperands.exists ())
1674 		  newoperands = operands.copy ();
1675 		newref = vn_reference_insert_pieces (newvuse, ref->set,
1676 						     ref->type,
1677 						     newoperands,
1678 						     result, new_val_id);
1679 		newoperands = vNULL;
1680 		PRE_EXPR_REFERENCE (expr) = newref;
1681 		constant = fully_constant_expression (expr);
1682 		if (constant != expr)
1683 		  return constant;
1684 		get_or_alloc_expression_id (expr);
1685 	      }
1686 	    add_to_value (new_val_id, expr);
1687 	  }
1688 	newoperands.release ();
1689 	return expr;
1690       }
1691       break;
1692 
1693     case NAME:
1694       {
1695 	tree name = PRE_EXPR_NAME (expr);
1696 	gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1697 	/* If the SSA name is defined by a PHI node in this block,
1698 	   translate it.  */
1699 	if (gimple_code (def_stmt) == GIMPLE_PHI
1700 	    && gimple_bb (def_stmt) == phiblock)
1701 	  {
1702 	    edge e = find_edge (pred, gimple_bb (def_stmt));
1703 	    tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1704 
1705 	    /* Handle constant. */
1706 	    if (is_gimple_min_invariant (def))
1707 	      return get_or_alloc_expr_for_constant (def);
1708 
1709 	    return get_or_alloc_expr_for_name (def);
1710 	  }
1711 	/* Otherwise return it unchanged - it will get removed if its
1712 	   value is not available in PREDs AVAIL_OUT set of expressions
1713 	   by the subtraction of TMP_GEN.  */
1714 	return expr;
1715       }
1716 
1717     default:
1718       gcc_unreachable ();
1719     }
1720 }
1721 
1722 /* Wrapper around phi_translate_1 providing caching functionality.  */
1723 
1724 static pre_expr
1725 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1726 	       basic_block pred, basic_block phiblock)
1727 {
1728   expr_pred_trans_t slot = NULL;
1729   pre_expr phitrans;
1730 
1731   if (!expr)
1732     return NULL;
1733 
1734   /* Constants contain no values that need translation.  */
1735   if (expr->kind == CONSTANT)
1736     return expr;
1737 
1738   if (value_id_constant_p (get_expr_value_id (expr)))
1739     return expr;
1740 
1741   /* Don't add translations of NAMEs as those are cheap to translate.  */
1742   if (expr->kind != NAME)
1743     {
1744       if (phi_trans_add (&slot, expr, pred))
1745 	return slot->v;
1746       /* Store NULL for the value we want to return in the case of
1747 	 recursing.  */
1748       slot->v = NULL;
1749     }
1750 
1751   /* Translate.  */
1752   phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1753 
1754   if (slot)
1755     {
1756       if (phitrans)
1757 	slot->v = phitrans;
1758       else
1759 	/* Remove failed translations again, they cause insert
1760 	   iteration to not pick up new opportunities reliably.  */
1761 	phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1762     }
1763 
1764   return phitrans;
1765 }
1766 
1767 
1768 /* For each expression in SET, translate the values through phi nodes
1769    in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1770    expressions in DEST.  */
1771 
1772 static void
1773 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1774 		   basic_block phiblock)
1775 {
1776   vec<pre_expr> exprs;
1777   pre_expr expr;
1778   int i;
1779 
1780   if (gimple_seq_empty_p (phi_nodes (phiblock)))
1781     {
1782       bitmap_set_copy (dest, set);
1783       return;
1784     }
1785 
1786   exprs = sorted_array_from_bitmap_set (set);
1787   FOR_EACH_VEC_ELT (exprs, i, expr)
1788     {
1789       pre_expr translated;
1790       translated = phi_translate (expr, set, NULL, pred, phiblock);
1791       if (!translated)
1792 	continue;
1793 
1794       /* We might end up with multiple expressions from SET being
1795 	 translated to the same value.  In this case we do not want
1796 	 to retain the NARY or REFERENCE expression but prefer a NAME
1797 	 which would be the leader.  */
1798       if (translated->kind == NAME)
1799 	bitmap_value_replace_in_set (dest, translated);
1800       else
1801 	bitmap_value_insert_into_set (dest, translated);
1802     }
1803   exprs.release ();
1804 }
1805 
1806 /* Find the leader for a value (i.e., the name representing that
1807    value) in a given set, and return it.  Return NULL if no leader
1808    is found.  */
1809 
1810 static pre_expr
1811 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1812 {
1813   if (value_id_constant_p (val))
1814     {
1815       unsigned int i;
1816       bitmap_iterator bi;
1817       bitmap exprset = value_expressions[val];
1818 
1819       EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1820 	{
1821 	  pre_expr expr = expression_for_id (i);
1822 	  if (expr->kind == CONSTANT)
1823 	    return expr;
1824 	}
1825     }
1826   if (bitmap_set_contains_value (set, val))
1827     {
1828       /* Rather than walk the entire bitmap of expressions, and see
1829 	 whether any of them has the value we are looking for, we look
1830 	 at the reverse mapping, which tells us the set of expressions
1831 	 that have a given value (IE value->expressions with that
1832 	 value) and see if any of those expressions are in our set.
1833 	 The number of expressions per value is usually significantly
1834 	 less than the number of expressions in the set.  In fact, for
1835 	 large testcases, doing it this way is roughly 5-10x faster
1836 	 than walking the bitmap.
1837 	 If this is somehow a significant lose for some cases, we can
1838 	 choose which set to walk based on which set is smaller.  */
1839       unsigned int i;
1840       bitmap_iterator bi;
1841       bitmap exprset = value_expressions[val];
1842 
1843       EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1844 	return expression_for_id (i);
1845     }
1846   return NULL;
1847 }
1848 
1849 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1850    BLOCK by seeing if it is not killed in the block.  Note that we are
1851    only determining whether there is a store that kills it.  Because
1852    of the order in which clean iterates over values, we are guaranteed
1853    that altered operands will have caused us to be eliminated from the
1854    ANTIC_IN set already.  */
1855 
1856 static bool
1857 value_dies_in_block_x (pre_expr expr, basic_block block)
1858 {
1859   tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1860   vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1861   gimple *def;
1862   gimple_stmt_iterator gsi;
1863   unsigned id = get_expression_id (expr);
1864   bool res = false;
1865   ao_ref ref;
1866 
1867   if (!vuse)
1868     return false;
1869 
1870   /* Lookup a previously calculated result.  */
1871   if (EXPR_DIES (block)
1872       && bitmap_bit_p (EXPR_DIES (block), id * 2))
1873     return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1874 
1875   /* A memory expression {e, VUSE} dies in the block if there is a
1876      statement that may clobber e.  If, starting statement walk from the
1877      top of the basic block, a statement uses VUSE there can be no kill
1878      inbetween that use and the original statement that loaded {e, VUSE},
1879      so we can stop walking.  */
1880   ref.base = NULL_TREE;
1881   for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1882     {
1883       tree def_vuse, def_vdef;
1884       def = gsi_stmt (gsi);
1885       def_vuse = gimple_vuse (def);
1886       def_vdef = gimple_vdef (def);
1887 
1888       /* Not a memory statement.  */
1889       if (!def_vuse)
1890 	continue;
1891 
1892       /* Not a may-def.  */
1893       if (!def_vdef)
1894 	{
1895 	  /* A load with the same VUSE, we're done.  */
1896 	  if (def_vuse == vuse)
1897 	    break;
1898 
1899 	  continue;
1900 	}
1901 
1902       /* Init ref only if we really need it.  */
1903       if (ref.base == NULL_TREE
1904 	  && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1905 					     refx->operands))
1906 	{
1907 	  res = true;
1908 	  break;
1909 	}
1910       /* If the statement may clobber expr, it dies.  */
1911       if (stmt_may_clobber_ref_p_1 (def, &ref))
1912 	{
1913 	  res = true;
1914 	  break;
1915 	}
1916     }
1917 
1918   /* Remember the result.  */
1919   if (!EXPR_DIES (block))
1920     EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1921   bitmap_set_bit (EXPR_DIES (block), id * 2);
1922   if (res)
1923     bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1924 
1925   return res;
1926 }
1927 
1928 
1929 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1930    contains its value-id.  */
1931 
1932 static bool
1933 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1934 {
1935   if (op && TREE_CODE (op) == SSA_NAME)
1936     {
1937       unsigned int value_id = VN_INFO (op)->value_id;
1938       if (!(bitmap_set_contains_value (set1, value_id)
1939 	    || (set2 && bitmap_set_contains_value  (set2, value_id))))
1940 	return false;
1941     }
1942   return true;
1943 }
1944 
1945 /* Determine if the expression EXPR is valid in SET1 U SET2.
1946    ONLY SET2 CAN BE NULL.
1947    This means that we have a leader for each part of the expression
1948    (if it consists of values), or the expression is an SSA_NAME.
1949    For loads/calls, we also see if the vuse is killed in this block.  */
1950 
1951 static bool
1952 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1953 {
1954   switch (expr->kind)
1955     {
1956     case NAME:
1957       /* By construction all NAMEs are available.  Non-available
1958 	 NAMEs are removed by subtracting TMP_GEN from the sets.  */
1959       return true;
1960     case NARY:
1961       {
1962 	unsigned int i;
1963 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1964 	for (i = 0; i < nary->length; i++)
1965 	  if (!op_valid_in_sets (set1, set2, nary->op[i]))
1966 	    return false;
1967 	return true;
1968       }
1969       break;
1970     case REFERENCE:
1971       {
1972 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1973 	vn_reference_op_t vro;
1974 	unsigned int i;
1975 
1976 	FOR_EACH_VEC_ELT (ref->operands, i, vro)
1977 	  {
1978 	    if (!op_valid_in_sets (set1, set2, vro->op0)
1979 		|| !op_valid_in_sets (set1, set2, vro->op1)
1980 		|| !op_valid_in_sets (set1, set2, vro->op2))
1981 	      return false;
1982 	  }
1983 	return true;
1984       }
1985     default:
1986       gcc_unreachable ();
1987     }
1988 }
1989 
1990 /* Clean the set of expressions that are no longer valid in SET1 or
1991    SET2.  This means expressions that are made up of values we have no
1992    leaders for in SET1 or SET2.  This version is used for partial
1993    anticipation, which means it is not valid in either ANTIC_IN or
1994    PA_IN.  */
1995 
1996 static void
1997 dependent_clean (bitmap_set_t set1, bitmap_set_t set2)
1998 {
1999   vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
2000   pre_expr expr;
2001   int i;
2002 
2003   FOR_EACH_VEC_ELT (exprs, i, expr)
2004     {
2005       if (!valid_in_sets (set1, set2, expr))
2006 	bitmap_remove_from_set (set1, expr);
2007     }
2008   exprs.release ();
2009 }
2010 
2011 /* Clean the set of expressions that are no longer valid in SET.  This
2012    means expressions that are made up of values we have no leaders for
2013    in SET.  */
2014 
2015 static void
2016 clean (bitmap_set_t set)
2017 {
2018   vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2019   pre_expr expr;
2020   int i;
2021 
2022   FOR_EACH_VEC_ELT (exprs, i, expr)
2023     {
2024       if (!valid_in_sets (set, NULL, expr))
2025 	bitmap_remove_from_set (set, expr);
2026     }
2027   exprs.release ();
2028 }
2029 
2030 /* Clean the set of expressions that are no longer valid in SET because
2031    they are clobbered in BLOCK or because they trap and may not be executed.  */
2032 
2033 static void
2034 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2035 {
2036   bitmap_iterator bi;
2037   unsigned i;
2038   pre_expr to_remove = NULL;
2039 
2040   FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2041     {
2042       /* Remove queued expr.  */
2043       if (to_remove)
2044 	{
2045 	  bitmap_remove_from_set (set, to_remove);
2046 	  to_remove = NULL;
2047 	}
2048 
2049       pre_expr expr = expression_for_id (i);
2050       if (expr->kind == REFERENCE)
2051 	{
2052 	  vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2053 	  if (ref->vuse)
2054 	    {
2055 	      gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2056 	      if (!gimple_nop_p (def_stmt)
2057 		  && ((gimple_bb (def_stmt) != block
2058 		       && !dominated_by_p (CDI_DOMINATORS,
2059 					   block, gimple_bb (def_stmt)))
2060 		      || (gimple_bb (def_stmt) == block
2061 			  && value_dies_in_block_x (expr, block))))
2062 		to_remove = expr;
2063 	    }
2064 	}
2065       else if (expr->kind == NARY)
2066 	{
2067 	  vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2068 	  /* If the NARY may trap make sure the block does not contain
2069 	     a possible exit point.
2070 	     ???  This is overly conservative if we translate AVAIL_OUT
2071 	     as the available expression might be after the exit point.  */
2072 	  if (BB_MAY_NOTRETURN (block)
2073 	      && vn_nary_may_trap (nary))
2074 	    to_remove = expr;
2075 	}
2076     }
2077 
2078   /* Remove queued expr.  */
2079   if (to_remove)
2080     bitmap_remove_from_set (set, to_remove);
2081 }
2082 
2083 static sbitmap has_abnormal_preds;
2084 
2085 /* Compute the ANTIC set for BLOCK.
2086 
2087    If succs(BLOCK) > 1 then
2088      ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2089    else if succs(BLOCK) == 1 then
2090      ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2091 
2092    ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2093 
2094    Note that clean() is deferred until after the iteration.  */
2095 
2096 static bool
2097 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2098 {
2099   bool changed = false;
2100   bitmap_set_t S, old, ANTIC_OUT;
2101   bitmap_iterator bi;
2102   unsigned int bii;
2103   edge e;
2104   edge_iterator ei;
2105   bool was_visited = BB_VISITED (block);
2106 
2107   old = ANTIC_OUT = S = NULL;
2108   BB_VISITED (block) = 1;
2109 
2110   /* If any edges from predecessors are abnormal, antic_in is empty,
2111      so do nothing.  */
2112   if (block_has_abnormal_pred_edge)
2113     goto maybe_dump_sets;
2114 
2115   old = ANTIC_IN (block);
2116   ANTIC_OUT = bitmap_set_new ();
2117 
2118   /* If the block has no successors, ANTIC_OUT is empty.  */
2119   if (EDGE_COUNT (block->succs) == 0)
2120     ;
2121   /* If we have one successor, we could have some phi nodes to
2122      translate through.  */
2123   else if (single_succ_p (block))
2124     {
2125       basic_block succ_bb = single_succ (block);
2126       gcc_assert (BB_VISITED (succ_bb));
2127       phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2128     }
2129   /* If we have multiple successors, we take the intersection of all of
2130      them.  Note that in the case of loop exit phi nodes, we may have
2131      phis to translate through.  */
2132   else
2133     {
2134       size_t i;
2135       basic_block bprime, first = NULL;
2136 
2137       auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2138       FOR_EACH_EDGE (e, ei, block->succs)
2139 	{
2140 	  if (!first
2141 	      && BB_VISITED (e->dest))
2142 	    first = e->dest;
2143 	  else if (BB_VISITED (e->dest))
2144 	    worklist.quick_push (e->dest);
2145 	  else
2146 	    {
2147 	      /* Unvisited successors get their ANTIC_IN replaced by the
2148 		 maximal set to arrive at a maximum ANTIC_IN solution.
2149 		 We can ignore them in the intersection operation and thus
2150 		 need not explicitely represent that maximum solution.  */
2151 	      if (dump_file && (dump_flags & TDF_DETAILS))
2152 		fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2153 			 e->src->index, e->dest->index);
2154 	    }
2155 	}
2156 
2157       /* Of multiple successors we have to have visited one already
2158          which is guaranteed by iteration order.  */
2159       gcc_assert (first != NULL);
2160 
2161       phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2162 
2163       FOR_EACH_VEC_ELT (worklist, i, bprime)
2164 	{
2165 	  if (!gimple_seq_empty_p (phi_nodes (bprime)))
2166 	    {
2167 	      bitmap_set_t tmp = bitmap_set_new ();
2168 	      phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2169 	      bitmap_set_and (ANTIC_OUT, tmp);
2170 	      bitmap_set_free (tmp);
2171 	    }
2172 	  else
2173 	    bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2174 	}
2175     }
2176 
2177   /* Prune expressions that are clobbered in block and thus become
2178      invalid if translated from ANTIC_OUT to ANTIC_IN.  */
2179   prune_clobbered_mems (ANTIC_OUT, block);
2180 
2181   /* Generate ANTIC_OUT - TMP_GEN.  */
2182   S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2183 
2184   /* Start ANTIC_IN with EXP_GEN - TMP_GEN.  */
2185   ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2186 					  TMP_GEN (block));
2187 
2188   /* Then union in the ANTIC_OUT - TMP_GEN values,
2189      to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2190   FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2191     bitmap_value_insert_into_set (ANTIC_IN (block),
2192 				  expression_for_id (bii));
2193 
2194   /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2195      because it can cause non-convergence, see for example PR81181.  */
2196 
2197   if (!was_visited || !bitmap_set_equal (old, ANTIC_IN (block)))
2198     changed = true;
2199 
2200  maybe_dump_sets:
2201   if (dump_file && (dump_flags & TDF_DETAILS))
2202     {
2203       if (ANTIC_OUT)
2204 	print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2205 
2206       if (changed)
2207 	fprintf (dump_file, "[changed] ");
2208       print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2209 			block->index);
2210 
2211       if (S)
2212 	print_bitmap_set (dump_file, S, "S", block->index);
2213     }
2214   if (old)
2215     bitmap_set_free (old);
2216   if (S)
2217     bitmap_set_free (S);
2218   if (ANTIC_OUT)
2219     bitmap_set_free (ANTIC_OUT);
2220   return changed;
2221 }
2222 
2223 /* Compute PARTIAL_ANTIC for BLOCK.
2224 
2225    If succs(BLOCK) > 1 then
2226      PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2227      in ANTIC_OUT for all succ(BLOCK)
2228    else if succs(BLOCK) == 1 then
2229      PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2230 
2231    PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2232 				  - ANTIC_IN[BLOCK])
2233 
2234 */
2235 static void
2236 compute_partial_antic_aux (basic_block block,
2237 			   bool block_has_abnormal_pred_edge)
2238 {
2239   bitmap_set_t old_PA_IN;
2240   bitmap_set_t PA_OUT;
2241   edge e;
2242   edge_iterator ei;
2243   unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2244 
2245   old_PA_IN = PA_OUT = NULL;
2246 
2247   /* If any edges from predecessors are abnormal, antic_in is empty,
2248      so do nothing.  */
2249   if (block_has_abnormal_pred_edge)
2250     goto maybe_dump_sets;
2251 
2252   /* If there are too many partially anticipatable values in the
2253      block, phi_translate_set can take an exponential time: stop
2254      before the translation starts.  */
2255   if (max_pa
2256       && single_succ_p (block)
2257       && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2258     goto maybe_dump_sets;
2259 
2260   old_PA_IN = PA_IN (block);
2261   PA_OUT = bitmap_set_new ();
2262 
2263   /* If the block has no successors, ANTIC_OUT is empty.  */
2264   if (EDGE_COUNT (block->succs) == 0)
2265     ;
2266   /* If we have one successor, we could have some phi nodes to
2267      translate through.  Note that we can't phi translate across DFS
2268      back edges in partial antic, because it uses a union operation on
2269      the successors.  For recurrences like IV's, we will end up
2270      generating a new value in the set on each go around (i + 3 (VH.1)
2271      VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever.  */
2272   else if (single_succ_p (block))
2273     {
2274       basic_block succ = single_succ (block);
2275       if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2276 	phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2277     }
2278   /* If we have multiple successors, we take the union of all of
2279      them.  */
2280   else
2281     {
2282       size_t i;
2283       basic_block bprime;
2284 
2285       auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2286       FOR_EACH_EDGE (e, ei, block->succs)
2287 	{
2288 	  if (e->flags & EDGE_DFS_BACK)
2289 	    continue;
2290 	  worklist.quick_push (e->dest);
2291 	}
2292       if (worklist.length () > 0)
2293 	{
2294 	  FOR_EACH_VEC_ELT (worklist, i, bprime)
2295 	    {
2296 	      unsigned int i;
2297 	      bitmap_iterator bi;
2298 
2299 	      FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2300 		bitmap_value_insert_into_set (PA_OUT,
2301 					      expression_for_id (i));
2302 	      if (!gimple_seq_empty_p (phi_nodes (bprime)))
2303 		{
2304 		  bitmap_set_t pa_in = bitmap_set_new ();
2305 		  phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2306 		  FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2307 		    bitmap_value_insert_into_set (PA_OUT,
2308 						  expression_for_id (i));
2309 		  bitmap_set_free (pa_in);
2310 		}
2311 	      else
2312 		FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2313 		  bitmap_value_insert_into_set (PA_OUT,
2314 						expression_for_id (i));
2315 	    }
2316 	}
2317     }
2318 
2319   /* Prune expressions that are clobbered in block and thus become
2320      invalid if translated from PA_OUT to PA_IN.  */
2321   prune_clobbered_mems (PA_OUT, block);
2322 
2323   /* PA_IN starts with PA_OUT - TMP_GEN.
2324      Then we subtract things from ANTIC_IN.  */
2325   PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2326 
2327   /* For partial antic, we want to put back in the phi results, since
2328      we will properly avoid making them partially antic over backedges.  */
2329   bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2330   bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2331 
2332   /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2333   bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2334 
2335   dependent_clean (PA_IN (block), ANTIC_IN (block));
2336 
2337  maybe_dump_sets:
2338   if (dump_file && (dump_flags & TDF_DETAILS))
2339     {
2340       if (PA_OUT)
2341 	print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2342 
2343       print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2344     }
2345   if (old_PA_IN)
2346     bitmap_set_free (old_PA_IN);
2347   if (PA_OUT)
2348     bitmap_set_free (PA_OUT);
2349 }
2350 
2351 /* Compute ANTIC and partial ANTIC sets.  */
2352 
2353 static void
2354 compute_antic (void)
2355 {
2356   bool changed = true;
2357   int num_iterations = 0;
2358   basic_block block;
2359   int i;
2360   edge_iterator ei;
2361   edge e;
2362 
2363   /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2364      We pre-build the map of blocks with incoming abnormal edges here.  */
2365   has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2366   bitmap_clear (has_abnormal_preds);
2367 
2368   FOR_ALL_BB_FN (block, cfun)
2369     {
2370       BB_VISITED (block) = 0;
2371 
2372       FOR_EACH_EDGE (e, ei, block->preds)
2373 	if (e->flags & EDGE_ABNORMAL)
2374 	  {
2375 	    bitmap_set_bit (has_abnormal_preds, block->index);
2376 
2377 	    /* We also anticipate nothing.  */
2378 	    BB_VISITED (block) = 1;
2379 	    break;
2380 	  }
2381 
2382       /* While we are here, give empty ANTIC_IN sets to each block.  */
2383       ANTIC_IN (block) = bitmap_set_new ();
2384       if (do_partial_partial)
2385 	PA_IN (block) = bitmap_set_new ();
2386     }
2387 
2388   /* At the exit block we anticipate nothing.  */
2389   BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2390 
2391   /* For ANTIC computation we need a postorder that also guarantees that
2392      a block with a single successor is visited after its successor.
2393      RPO on the inverted CFG has this property.  */
2394   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
2395   int postorder_num = inverted_post_order_compute (postorder);
2396 
2397   auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2398   bitmap_ones (worklist);
2399   while (changed)
2400     {
2401       if (dump_file && (dump_flags & TDF_DETAILS))
2402 	fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2403       /* ???  We need to clear our PHI translation cache here as the
2404          ANTIC sets shrink and we restrict valid translations to
2405 	 those having operands with leaders in ANTIC.  Same below
2406 	 for PA ANTIC computation.  */
2407       num_iterations++;
2408       changed = false;
2409       for (i = postorder_num - 1; i >= 0; i--)
2410 	{
2411 	  if (bitmap_bit_p (worklist, postorder[i]))
2412 	    {
2413 	      basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2414 	      bitmap_clear_bit (worklist, block->index);
2415 	      if (compute_antic_aux (block,
2416 				     bitmap_bit_p (has_abnormal_preds,
2417 						   block->index)))
2418 		{
2419 		  FOR_EACH_EDGE (e, ei, block->preds)
2420 		    bitmap_set_bit (worklist, e->src->index);
2421 		  changed = true;
2422 		}
2423 	    }
2424 	}
2425       /* Theoretically possible, but *highly* unlikely.  */
2426       gcc_checking_assert (num_iterations < 500);
2427     }
2428 
2429   /* We have to clean after the dataflow problem converged as cleaning
2430      can cause non-convergence because it is based on expressions
2431      rather than values.  */
2432   FOR_EACH_BB_FN (block, cfun)
2433     clean (ANTIC_IN (block));
2434 
2435   statistics_histogram_event (cfun, "compute_antic iterations",
2436 			      num_iterations);
2437 
2438   if (do_partial_partial)
2439     {
2440       /* For partial antic we ignore backedges and thus we do not need
2441          to perform any iteration when we process blocks in postorder.  */
2442       postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
2443       for (i = postorder_num - 1 ; i >= 0; i--)
2444 	{
2445 	  basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2446 	  compute_partial_antic_aux (block,
2447 				     bitmap_bit_p (has_abnormal_preds,
2448 						   block->index));
2449 	}
2450     }
2451 
2452   sbitmap_free (has_abnormal_preds);
2453   free (postorder);
2454 }
2455 
2456 
2457 /* Inserted expressions are placed onto this worklist, which is used
2458    for performing quick dead code elimination of insertions we made
2459    that didn't turn out to be necessary.   */
2460 static bitmap inserted_exprs;
2461 
2462 /* The actual worker for create_component_ref_by_pieces.  */
2463 
2464 static tree
2465 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2466 				  unsigned int *operand, gimple_seq *stmts)
2467 {
2468   vn_reference_op_t currop = &ref->operands[*operand];
2469   tree genop;
2470   ++*operand;
2471   switch (currop->opcode)
2472     {
2473     case CALL_EXPR:
2474       gcc_unreachable ();
2475 
2476     case MEM_REF:
2477       {
2478 	tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2479 							stmts);
2480 	if (!baseop)
2481 	  return NULL_TREE;
2482 	tree offset = currop->op0;
2483 	if (TREE_CODE (baseop) == ADDR_EXPR
2484 	    && handled_component_p (TREE_OPERAND (baseop, 0)))
2485 	  {
2486 	    HOST_WIDE_INT off;
2487 	    tree base;
2488 	    base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2489 						  &off);
2490 	    gcc_assert (base);
2491 	    offset = int_const_binop (PLUS_EXPR, offset,
2492 				      build_int_cst (TREE_TYPE (offset),
2493 						     off));
2494 	    baseop = build_fold_addr_expr (base);
2495 	  }
2496 	genop = build2 (MEM_REF, currop->type, baseop, offset);
2497 	MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2498 	MR_DEPENDENCE_BASE (genop) = currop->base;
2499 	REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2500 	return genop;
2501       }
2502 
2503     case TARGET_MEM_REF:
2504       {
2505 	tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2506 	vn_reference_op_t nextop = &ref->operands[++*operand];
2507 	tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2508 							stmts);
2509 	if (!baseop)
2510 	  return NULL_TREE;
2511 	if (currop->op0)
2512 	  {
2513 	    genop0 = find_or_generate_expression (block, currop->op0, stmts);
2514 	    if (!genop0)
2515 	      return NULL_TREE;
2516 	  }
2517 	if (nextop->op0)
2518 	  {
2519 	    genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2520 	    if (!genop1)
2521 	      return NULL_TREE;
2522 	  }
2523 	genop = build5 (TARGET_MEM_REF, currop->type,
2524 			baseop, currop->op2, genop0, currop->op1, genop1);
2525 
2526 	MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2527 	MR_DEPENDENCE_BASE (genop) = currop->base;
2528 	return genop;
2529       }
2530 
2531     case ADDR_EXPR:
2532       if (currop->op0)
2533 	{
2534 	  gcc_assert (is_gimple_min_invariant (currop->op0));
2535 	  return currop->op0;
2536 	}
2537       /* Fallthrough.  */
2538     case REALPART_EXPR:
2539     case IMAGPART_EXPR:
2540     case VIEW_CONVERT_EXPR:
2541       {
2542 	tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2543 							stmts);
2544 	if (!genop0)
2545 	  return NULL_TREE;
2546 	return fold_build1 (currop->opcode, currop->type, genop0);
2547       }
2548 
2549     case WITH_SIZE_EXPR:
2550       {
2551 	tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2552 							stmts);
2553 	if (!genop0)
2554 	  return NULL_TREE;
2555 	tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2556 	if (!genop1)
2557 	  return NULL_TREE;
2558 	return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2559       }
2560 
2561     case BIT_FIELD_REF:
2562       {
2563 	tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2564 							stmts);
2565 	if (!genop0)
2566 	  return NULL_TREE;
2567 	tree op1 = currop->op0;
2568 	tree op2 = currop->op1;
2569 	tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2570 	REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2571 	return fold (t);
2572       }
2573 
2574       /* For array ref vn_reference_op's, operand 1 of the array ref
2575 	 is op0 of the reference op and operand 3 of the array ref is
2576 	 op1.  */
2577     case ARRAY_RANGE_REF:
2578     case ARRAY_REF:
2579       {
2580 	tree genop0;
2581 	tree genop1 = currop->op0;
2582 	tree genop2 = currop->op1;
2583 	tree genop3 = currop->op2;
2584 	genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2585 						   stmts);
2586 	if (!genop0)
2587 	  return NULL_TREE;
2588 	genop1 = find_or_generate_expression (block, genop1, stmts);
2589 	if (!genop1)
2590 	  return NULL_TREE;
2591 	if (genop2)
2592 	  {
2593 	    tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2594 	    /* Drop zero minimum index if redundant.  */
2595 	    if (integer_zerop (genop2)
2596 		&& (!domain_type
2597 		    || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2598 	      genop2 = NULL_TREE;
2599 	    else
2600 	      {
2601 		genop2 = find_or_generate_expression (block, genop2, stmts);
2602 		if (!genop2)
2603 		  return NULL_TREE;
2604 	      }
2605 	  }
2606 	if (genop3)
2607 	  {
2608 	    tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2609 	    /* We can't always put a size in units of the element alignment
2610 	       here as the element alignment may be not visible.  See
2611 	       PR43783.  Simply drop the element size for constant
2612 	       sizes.  */
2613 	    if (TREE_CODE (genop3) == INTEGER_CST
2614 		&& TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2615 		&& wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2616 			     (wi::to_offset (genop3)
2617 			      * vn_ref_op_align_unit (currop))))
2618 	      genop3 = NULL_TREE;
2619 	    else
2620 	      {
2621 		genop3 = find_or_generate_expression (block, genop3, stmts);
2622 		if (!genop3)
2623 		  return NULL_TREE;
2624 	      }
2625 	  }
2626 	return build4 (currop->opcode, currop->type, genop0, genop1,
2627 		       genop2, genop3);
2628       }
2629     case COMPONENT_REF:
2630       {
2631 	tree op0;
2632 	tree op1;
2633 	tree genop2 = currop->op1;
2634 	op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2635 	if (!op0)
2636 	  return NULL_TREE;
2637 	/* op1 should be a FIELD_DECL, which are represented by themselves.  */
2638 	op1 = currop->op0;
2639 	if (genop2)
2640 	  {
2641 	    genop2 = find_or_generate_expression (block, genop2, stmts);
2642 	    if (!genop2)
2643 	      return NULL_TREE;
2644 	  }
2645 	return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2646       }
2647 
2648     case SSA_NAME:
2649       {
2650 	genop = find_or_generate_expression (block, currop->op0, stmts);
2651 	return genop;
2652       }
2653     case STRING_CST:
2654     case INTEGER_CST:
2655     case COMPLEX_CST:
2656     case VECTOR_CST:
2657     case REAL_CST:
2658     case CONSTRUCTOR:
2659     case VAR_DECL:
2660     case PARM_DECL:
2661     case CONST_DECL:
2662     case RESULT_DECL:
2663     case FUNCTION_DECL:
2664       return currop->op0;
2665 
2666     default:
2667       gcc_unreachable ();
2668     }
2669 }
2670 
2671 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2672    COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2673    trying to rename aggregates into ssa form directly, which is a no no.
2674 
2675    Thus, this routine doesn't create temporaries, it just builds a
2676    single access expression for the array, calling
2677    find_or_generate_expression to build the innermost pieces.
2678 
2679    This function is a subroutine of create_expression_by_pieces, and
2680    should not be called on it's own unless you really know what you
2681    are doing.  */
2682 
2683 static tree
2684 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2685 				gimple_seq *stmts)
2686 {
2687   unsigned int op = 0;
2688   return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2689 }
2690 
2691 /* Find a simple leader for an expression, or generate one using
2692    create_expression_by_pieces from a NARY expression for the value.
2693    BLOCK is the basic_block we are looking for leaders in.
2694    OP is the tree expression to find a leader for or generate.
2695    Returns the leader or NULL_TREE on failure.  */
2696 
2697 static tree
2698 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2699 {
2700   pre_expr expr = get_or_alloc_expr_for (op);
2701   unsigned int lookfor = get_expr_value_id (expr);
2702   pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2703   if (leader)
2704     {
2705       if (leader->kind == NAME)
2706 	return PRE_EXPR_NAME (leader);
2707       else if (leader->kind == CONSTANT)
2708 	return PRE_EXPR_CONSTANT (leader);
2709 
2710       /* Defer.  */
2711       return NULL_TREE;
2712     }
2713 
2714   /* It must be a complex expression, so generate it recursively.  Note
2715      that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2716      where the insert algorithm fails to insert a required expression.  */
2717   bitmap exprset = value_expressions[lookfor];
2718   bitmap_iterator bi;
2719   unsigned int i;
2720   EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2721     {
2722       pre_expr temp = expression_for_id (i);
2723       /* We cannot insert random REFERENCE expressions at arbitrary
2724 	 places.  We can insert NARYs which eventually re-materializes
2725 	 its operand values.  */
2726       if (temp->kind == NARY)
2727 	return create_expression_by_pieces (block, temp, stmts,
2728 					    get_expr_type (expr));
2729     }
2730 
2731   /* Defer.  */
2732   return NULL_TREE;
2733 }
2734 
2735 #define NECESSARY GF_PLF_1
2736 
2737 /* Create an expression in pieces, so that we can handle very complex
2738    expressions that may be ANTIC, but not necessary GIMPLE.
2739    BLOCK is the basic block the expression will be inserted into,
2740    EXPR is the expression to insert (in value form)
2741    STMTS is a statement list to append the necessary insertions into.
2742 
2743    This function will die if we hit some value that shouldn't be
2744    ANTIC but is (IE there is no leader for it, or its components).
2745    The function returns NULL_TREE in case a different antic expression
2746    has to be inserted first.
2747    This function may also generate expressions that are themselves
2748    partially or fully redundant.  Those that are will be either made
2749    fully redundant during the next iteration of insert (for partially
2750    redundant ones), or eliminated by eliminate (for fully redundant
2751    ones).  */
2752 
2753 static tree
2754 create_expression_by_pieces (basic_block block, pre_expr expr,
2755 			     gimple_seq *stmts, tree type)
2756 {
2757   tree name;
2758   tree folded;
2759   gimple_seq forced_stmts = NULL;
2760   unsigned int value_id;
2761   gimple_stmt_iterator gsi;
2762   tree exprtype = type ? type : get_expr_type (expr);
2763   pre_expr nameexpr;
2764   gassign *newstmt;
2765 
2766   switch (expr->kind)
2767     {
2768     /* We may hit the NAME/CONSTANT case if we have to convert types
2769        that value numbering saw through.  */
2770     case NAME:
2771       folded = PRE_EXPR_NAME (expr);
2772       if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2773 	return folded;
2774       break;
2775     case CONSTANT:
2776       {
2777 	folded = PRE_EXPR_CONSTANT (expr);
2778 	tree tem = fold_convert (exprtype, folded);
2779 	if (is_gimple_min_invariant (tem))
2780 	  return tem;
2781 	break;
2782       }
2783     case REFERENCE:
2784       if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2785 	{
2786 	  vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2787 	  unsigned int operand = 1;
2788 	  vn_reference_op_t currop = &ref->operands[0];
2789 	  tree sc = NULL_TREE;
2790 	  tree fn  = find_or_generate_expression (block, currop->op0, stmts);
2791 	  if (!fn)
2792 	    return NULL_TREE;
2793 	  if (currop->op1)
2794 	    {
2795 	      sc = find_or_generate_expression (block, currop->op1, stmts);
2796 	      if (!sc)
2797 		return NULL_TREE;
2798 	    }
2799 	  auto_vec<tree> args (ref->operands.length () - 1);
2800 	  while (operand < ref->operands.length ())
2801 	    {
2802 	      tree arg = create_component_ref_by_pieces_1 (block, ref,
2803 							   &operand, stmts);
2804 	      if (!arg)
2805 		return NULL_TREE;
2806 	      args.quick_push (arg);
2807 	    }
2808 	  gcall *call = gimple_build_call_vec (fn, args);
2809 	  gimple_call_set_with_bounds (call, currop->with_bounds);
2810 	  if (sc)
2811 	    gimple_call_set_chain (call, sc);
2812 	  tree forcedname = make_ssa_name (currop->type);
2813 	  gimple_call_set_lhs (call, forcedname);
2814 	  /* There's no CCP pass after PRE which would re-compute alignment
2815 	     information so make sure we re-materialize this here.  */
2816 	  if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED)
2817 	      && args.length () - 2 <= 1
2818 	      && tree_fits_uhwi_p (args[1])
2819 	      && (args.length () != 3 || tree_fits_uhwi_p (args[2])))
2820 	    {
2821 	      unsigned HOST_WIDE_INT halign = tree_to_uhwi (args[1]);
2822 	      unsigned HOST_WIDE_INT hmisalign
2823 		= args.length () == 3 ? tree_to_uhwi (args[2]) : 0;
2824 	      if ((halign & (halign - 1)) == 0
2825 		  && (hmisalign & ~(halign - 1)) == 0)
2826 		set_ptr_info_alignment (get_ptr_info (forcedname),
2827 					halign, hmisalign);
2828 	    }
2829 	  gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2830 	  gimple_seq_add_stmt_without_update (&forced_stmts, call);
2831 	  folded = forcedname;
2832 	}
2833       else
2834 	{
2835 	  folded = create_component_ref_by_pieces (block,
2836 						   PRE_EXPR_REFERENCE (expr),
2837 						   stmts);
2838 	  if (!folded)
2839 	    return NULL_TREE;
2840 	  name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2841 	  newstmt = gimple_build_assign (name, folded);
2842 	  gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2843 	  gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2844 	  folded = name;
2845 	}
2846       break;
2847     case NARY:
2848       {
2849 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2850 	tree *genop = XALLOCAVEC (tree, nary->length);
2851 	unsigned i;
2852 	for (i = 0; i < nary->length; ++i)
2853 	  {
2854 	    genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2855 	    if (!genop[i])
2856 	      return NULL_TREE;
2857 	    /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR.  It
2858 	       may have conversions stripped.  */
2859 	    if (nary->opcode == POINTER_PLUS_EXPR)
2860 	      {
2861 		if (i == 0)
2862 		  genop[i] = gimple_convert (&forced_stmts,
2863 					     nary->type, genop[i]);
2864 		else if (i == 1)
2865 		  genop[i] = gimple_convert (&forced_stmts,
2866 					     sizetype, genop[i]);
2867 	      }
2868 	    else
2869 	      genop[i] = gimple_convert (&forced_stmts,
2870 					 TREE_TYPE (nary->op[i]), genop[i]);
2871 	  }
2872 	if (nary->opcode == CONSTRUCTOR)
2873 	  {
2874 	    vec<constructor_elt, va_gc> *elts = NULL;
2875 	    for (i = 0; i < nary->length; ++i)
2876 	      CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2877 	    folded = build_constructor (nary->type, elts);
2878 	    name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2879 	    newstmt = gimple_build_assign (name, folded);
2880 	    gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2881 	    folded = name;
2882 	  }
2883 	else
2884 	  {
2885 	    switch (nary->length)
2886 	      {
2887 	      case 1:
2888 		folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2889 				       genop[0]);
2890 		break;
2891 	      case 2:
2892 		folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2893 				       genop[0], genop[1]);
2894 		break;
2895 	      case 3:
2896 		folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2897 				       genop[0], genop[1], genop[2]);
2898 		break;
2899 	      default:
2900 		gcc_unreachable ();
2901 	      }
2902 	  }
2903       }
2904       break;
2905     default:
2906       gcc_unreachable ();
2907     }
2908 
2909   folded = gimple_convert (&forced_stmts, exprtype, folded);
2910 
2911   /* If there is nothing to insert, return the simplified result.  */
2912   if (gimple_seq_empty_p (forced_stmts))
2913     return folded;
2914   /* If we simplified to a constant return it and discard eventually
2915      built stmts.  */
2916   if (is_gimple_min_invariant (folded))
2917     {
2918       gimple_seq_discard (forced_stmts);
2919       return folded;
2920     }
2921   /* Likewise if we simplified to sth not queued for insertion.  */
2922   bool found = false;
2923   gsi = gsi_last (forced_stmts);
2924   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2925     {
2926       gimple *stmt = gsi_stmt (gsi);
2927       tree forcedname = gimple_get_lhs (stmt);
2928       if (forcedname == folded)
2929 	{
2930 	  found = true;
2931 	  break;
2932 	}
2933     }
2934   if (! found)
2935     {
2936       gimple_seq_discard (forced_stmts);
2937       return folded;
2938     }
2939   gcc_assert (TREE_CODE (folded) == SSA_NAME);
2940 
2941   /* If we have any intermediate expressions to the value sets, add them
2942      to the value sets and chain them in the instruction stream.  */
2943   if (forced_stmts)
2944     {
2945       gsi = gsi_start (forced_stmts);
2946       for (; !gsi_end_p (gsi); gsi_next (&gsi))
2947 	{
2948 	  gimple *stmt = gsi_stmt (gsi);
2949 	  tree forcedname = gimple_get_lhs (stmt);
2950 	  pre_expr nameexpr;
2951 
2952 	  if (forcedname != folded)
2953 	    {
2954 	      VN_INFO_GET (forcedname)->valnum = forcedname;
2955 	      VN_INFO (forcedname)->value_id = get_next_value_id ();
2956 	      nameexpr = get_or_alloc_expr_for_name (forcedname);
2957 	      add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2958 	      bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2959 	      bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2960 	    }
2961 
2962 	  bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2963 	  gimple_set_plf (stmt, NECESSARY, false);
2964 	}
2965       gimple_seq_add_seq (stmts, forced_stmts);
2966     }
2967 
2968   name = folded;
2969 
2970   /* Fold the last statement.  */
2971   gsi = gsi_last (*stmts);
2972   if (fold_stmt_inplace (&gsi))
2973     update_stmt (gsi_stmt (gsi));
2974 
2975   /* Add a value number to the temporary.
2976      The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2977      we are creating the expression by pieces, and this particular piece of
2978      the expression may have been represented.  There is no harm in replacing
2979      here.  */
2980   value_id = get_expr_value_id (expr);
2981   VN_INFO_GET (name)->value_id = value_id;
2982   VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2983   if (VN_INFO (name)->valnum == NULL_TREE)
2984     VN_INFO (name)->valnum = name;
2985   gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2986   nameexpr = get_or_alloc_expr_for_name (name);
2987   add_to_value (value_id, nameexpr);
2988   if (NEW_SETS (block))
2989     bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2990   bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2991 
2992   pre_stats.insertions++;
2993   if (dump_file && (dump_flags & TDF_DETAILS))
2994     {
2995       fprintf (dump_file, "Inserted ");
2996       print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0, 0);
2997       fprintf (dump_file, " in predecessor %d (%04d)\n",
2998 	       block->index, value_id);
2999     }
3000 
3001   return name;
3002 }
3003 
3004 
3005 /* Insert the to-be-made-available values of expression EXPRNUM for each
3006    predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3007    merge the result with a phi node, given the same value number as
3008    NODE.  Return true if we have inserted new stuff.  */
3009 
3010 static bool
3011 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3012 			    vec<pre_expr> avail)
3013 {
3014   pre_expr expr = expression_for_id (exprnum);
3015   pre_expr newphi;
3016   unsigned int val = get_expr_value_id (expr);
3017   edge pred;
3018   bool insertions = false;
3019   bool nophi = false;
3020   basic_block bprime;
3021   pre_expr eprime;
3022   edge_iterator ei;
3023   tree type = get_expr_type (expr);
3024   tree temp;
3025   gphi *phi;
3026 
3027   /* Make sure we aren't creating an induction variable.  */
3028   if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3029     {
3030       bool firstinsideloop = false;
3031       bool secondinsideloop = false;
3032       firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3033 					       EDGE_PRED (block, 0)->src);
3034       secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3035 						EDGE_PRED (block, 1)->src);
3036       /* Induction variables only have one edge inside the loop.  */
3037       if ((firstinsideloop ^ secondinsideloop)
3038 	  && expr->kind != REFERENCE)
3039 	{
3040 	  if (dump_file && (dump_flags & TDF_DETAILS))
3041 	    fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3042 	  nophi = true;
3043 	}
3044     }
3045 
3046   /* Make the necessary insertions.  */
3047   FOR_EACH_EDGE (pred, ei, block->preds)
3048     {
3049       gimple_seq stmts = NULL;
3050       tree builtexpr;
3051       bprime = pred->src;
3052       eprime = avail[pred->dest_idx];
3053       builtexpr = create_expression_by_pieces (bprime, eprime,
3054 					       &stmts, type);
3055       gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3056       if (!gimple_seq_empty_p (stmts))
3057 	{
3058 	  gsi_insert_seq_on_edge (pred, stmts);
3059 	  insertions = true;
3060 	}
3061       if (!builtexpr)
3062 	{
3063 	  /* We cannot insert a PHI node if we failed to insert
3064 	     on one edge.  */
3065 	  nophi = true;
3066 	  continue;
3067 	}
3068       if (is_gimple_min_invariant (builtexpr))
3069 	avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3070       else
3071 	avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3072     }
3073   /* If we didn't want a phi node, and we made insertions, we still have
3074      inserted new stuff, and thus return true.  If we didn't want a phi node,
3075      and didn't make insertions, we haven't added anything new, so return
3076      false.  */
3077   if (nophi && insertions)
3078     return true;
3079   else if (nophi && !insertions)
3080     return false;
3081 
3082   /* Now build a phi for the new variable.  */
3083   temp = make_temp_ssa_name (type, NULL, "prephitmp");
3084   phi = create_phi_node (temp, block);
3085 
3086   gimple_set_plf (phi, NECESSARY, false);
3087   VN_INFO_GET (temp)->value_id = val;
3088   VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3089   if (VN_INFO (temp)->valnum == NULL_TREE)
3090     VN_INFO (temp)->valnum = temp;
3091   bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3092   FOR_EACH_EDGE (pred, ei, block->preds)
3093     {
3094       pre_expr ae = avail[pred->dest_idx];
3095       gcc_assert (get_expr_type (ae) == type
3096 		  || useless_type_conversion_p (type, get_expr_type (ae)));
3097       if (ae->kind == CONSTANT)
3098 	add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3099 		     pred, UNKNOWN_LOCATION);
3100       else
3101 	add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3102     }
3103 
3104   newphi = get_or_alloc_expr_for_name (temp);
3105   add_to_value (val, newphi);
3106 
3107   /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3108      this insertion, since we test for the existence of this value in PHI_GEN
3109      before proceeding with the partial redundancy checks in insert_aux.
3110 
3111      The value may exist in AVAIL_OUT, in particular, it could be represented
3112      by the expression we are trying to eliminate, in which case we want the
3113      replacement to occur.  If it's not existing in AVAIL_OUT, we want it
3114      inserted there.
3115 
3116      Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3117      this block, because if it did, it would have existed in our dominator's
3118      AVAIL_OUT, and would have been skipped due to the full redundancy check.
3119   */
3120 
3121   bitmap_insert_into_set (PHI_GEN (block), newphi);
3122   bitmap_value_replace_in_set (AVAIL_OUT (block),
3123 			       newphi);
3124   bitmap_insert_into_set (NEW_SETS (block),
3125 			  newphi);
3126 
3127   /* If we insert a PHI node for a conversion of another PHI node
3128      in the same basic-block try to preserve range information.
3129      This is important so that followup loop passes receive optimal
3130      number of iteration analysis results.  See PR61743.  */
3131   if (expr->kind == NARY
3132       && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3133       && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3134       && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3135       && INTEGRAL_TYPE_P (type)
3136       && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3137       && (TYPE_PRECISION (type)
3138 	  >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3139       && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3140     {
3141       wide_int min, max;
3142       if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3143 	  && !wi::neg_p (min, SIGNED)
3144 	  && !wi::neg_p (max, SIGNED))
3145 	/* Just handle extension and sign-changes of all-positive ranges.  */
3146 	set_range_info (temp,
3147 			SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3148 			wide_int_storage::from (min, TYPE_PRECISION (type),
3149 						TYPE_SIGN (type)),
3150 			wide_int_storage::from (max, TYPE_PRECISION (type),
3151 						TYPE_SIGN (type)));
3152     }
3153 
3154   if (dump_file && (dump_flags & TDF_DETAILS))
3155     {
3156       fprintf (dump_file, "Created phi ");
3157       print_gimple_stmt (dump_file, phi, 0, 0);
3158       fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3159     }
3160   pre_stats.phis++;
3161   return true;
3162 }
3163 
3164 
3165 
3166 /* Perform insertion of partially redundant or hoistable values.
3167    For BLOCK, do the following:
3168    1.  Propagate the NEW_SETS of the dominator into the current block.
3169    If the block has multiple predecessors,
3170        2a. Iterate over the ANTIC expressions for the block to see if
3171 	   any of them are partially redundant.
3172        2b. If so, insert them into the necessary predecessors to make
3173 	   the expression fully redundant.
3174        2c. Insert a new PHI merging the values of the predecessors.
3175        2d. Insert the new PHI, and the new expressions, into the
3176 	   NEW_SETS set.
3177    If the block has multiple successors,
3178        3a. Iterate over the ANTIC values for the block to see if
3179 	   any of them are good candidates for hoisting.
3180        3b. If so, insert expressions computing the values in BLOCK,
3181 	   and add the new expressions into the NEW_SETS set.
3182    4. Recursively call ourselves on the dominator children of BLOCK.
3183 
3184    Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3185    do_pre_regular_insertion and do_partial_insertion.  3a and 3b are
3186    done in do_hoist_insertion.
3187 */
3188 
3189 static bool
3190 do_pre_regular_insertion (basic_block block, basic_block dom)
3191 {
3192   bool new_stuff = false;
3193   vec<pre_expr> exprs;
3194   pre_expr expr;
3195   auto_vec<pre_expr> avail;
3196   int i;
3197 
3198   exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3199   avail.safe_grow (EDGE_COUNT (block->preds));
3200 
3201   FOR_EACH_VEC_ELT (exprs, i, expr)
3202     {
3203       if (expr->kind == NARY
3204 	  || expr->kind == REFERENCE)
3205 	{
3206 	  unsigned int val;
3207 	  bool by_some = false;
3208 	  bool cant_insert = false;
3209 	  bool all_same = true;
3210 	  pre_expr first_s = NULL;
3211 	  edge pred;
3212 	  basic_block bprime;
3213 	  pre_expr eprime = NULL;
3214 	  edge_iterator ei;
3215 	  pre_expr edoubleprime = NULL;
3216 	  bool do_insertion = false;
3217 
3218 	  val = get_expr_value_id (expr);
3219 	  if (bitmap_set_contains_value (PHI_GEN (block), val))
3220 	    continue;
3221 	  if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3222 	    {
3223 	      if (dump_file && (dump_flags & TDF_DETAILS))
3224 		{
3225 		  fprintf (dump_file, "Found fully redundant value: ");
3226 		  print_pre_expr (dump_file, expr);
3227 		  fprintf (dump_file, "\n");
3228 		}
3229 	      continue;
3230 	    }
3231 
3232 	  FOR_EACH_EDGE (pred, ei, block->preds)
3233 	    {
3234 	      unsigned int vprime;
3235 
3236 	      /* We should never run insertion for the exit block
3237 	         and so not come across fake pred edges.  */
3238 	      gcc_assert (!(pred->flags & EDGE_FAKE));
3239 	      bprime = pred->src;
3240 	      /* We are looking at ANTIC_OUT of bprime.  */
3241 	      eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3242 				      bprime, block);
3243 
3244 	      /* eprime will generally only be NULL if the
3245 		 value of the expression, translated
3246 		 through the PHI for this predecessor, is
3247 		 undefined.  If that is the case, we can't
3248 		 make the expression fully redundant,
3249 		 because its value is undefined along a
3250 		 predecessor path.  We can thus break out
3251 		 early because it doesn't matter what the
3252 		 rest of the results are.  */
3253 	      if (eprime == NULL)
3254 		{
3255 		  avail[pred->dest_idx] = NULL;
3256 		  cant_insert = true;
3257 		  break;
3258 		}
3259 
3260 	      vprime = get_expr_value_id (eprime);
3261 	      edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3262 						 vprime);
3263 	      if (edoubleprime == NULL)
3264 		{
3265 		  avail[pred->dest_idx] = eprime;
3266 		  all_same = false;
3267 		}
3268 	      else
3269 		{
3270 		  avail[pred->dest_idx] = edoubleprime;
3271 		  by_some = true;
3272 		  /* We want to perform insertions to remove a redundancy on
3273 		     a path in the CFG we want to optimize for speed.  */
3274 		  if (optimize_edge_for_speed_p (pred))
3275 		    do_insertion = true;
3276 		  if (first_s == NULL)
3277 		    first_s = edoubleprime;
3278 		  else if (!pre_expr_d::equal (first_s, edoubleprime))
3279 		    all_same = false;
3280 		}
3281 	    }
3282 	  /* If we can insert it, it's not the same value
3283 	     already existing along every predecessor, and
3284 	     it's defined by some predecessor, it is
3285 	     partially redundant.  */
3286 	  if (!cant_insert && !all_same && by_some)
3287 	    {
3288 	      if (!do_insertion)
3289 		{
3290 		  if (dump_file && (dump_flags & TDF_DETAILS))
3291 		    {
3292 		      fprintf (dump_file, "Skipping partial redundancy for "
3293 			       "expression ");
3294 		      print_pre_expr (dump_file, expr);
3295 		      fprintf (dump_file, " (%04d), no redundancy on to be "
3296 			       "optimized for speed edge\n", val);
3297 		    }
3298 		}
3299 	      else if (dbg_cnt (treepre_insert))
3300 		{
3301 		  if (dump_file && (dump_flags & TDF_DETAILS))
3302 		    {
3303 		      fprintf (dump_file, "Found partial redundancy for "
3304 			       "expression ");
3305 		      print_pre_expr (dump_file, expr);
3306 		      fprintf (dump_file, " (%04d)\n",
3307 			       get_expr_value_id (expr));
3308 		    }
3309 		  if (insert_into_preds_of_block (block,
3310 						  get_expression_id (expr),
3311 						  avail))
3312 		    new_stuff = true;
3313 		}
3314 	    }
3315 	  /* If all edges produce the same value and that value is
3316 	     an invariant, then the PHI has the same value on all
3317 	     edges.  Note this.  */
3318 	  else if (!cant_insert && all_same)
3319 	    {
3320 	      gcc_assert (edoubleprime->kind == CONSTANT
3321 			  || edoubleprime->kind == NAME);
3322 
3323 	      tree temp = make_temp_ssa_name (get_expr_type (expr),
3324 					      NULL, "pretmp");
3325 	      gassign *assign
3326 		= gimple_build_assign (temp,
3327 				       edoubleprime->kind == CONSTANT ?
3328 				       PRE_EXPR_CONSTANT (edoubleprime) :
3329 				       PRE_EXPR_NAME (edoubleprime));
3330 	      gimple_stmt_iterator gsi = gsi_after_labels (block);
3331 	      gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3332 
3333 	      gimple_set_plf (assign, NECESSARY, false);
3334 	      VN_INFO_GET (temp)->value_id = val;
3335 	      VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3336 	      if (VN_INFO (temp)->valnum == NULL_TREE)
3337 		VN_INFO (temp)->valnum = temp;
3338 	      bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3339 	      pre_expr newe = get_or_alloc_expr_for_name (temp);
3340 	      add_to_value (val, newe);
3341 	      bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3342 	      bitmap_insert_into_set (NEW_SETS (block), newe);
3343 	    }
3344 	}
3345     }
3346 
3347   exprs.release ();
3348   return new_stuff;
3349 }
3350 
3351 
3352 /* Perform insertion for partially anticipatable expressions.  There
3353    is only one case we will perform insertion for these.  This case is
3354    if the expression is partially anticipatable, and fully available.
3355    In this case, we know that putting it earlier will enable us to
3356    remove the later computation.  */
3357 
3358 static bool
3359 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3360 {
3361   bool new_stuff = false;
3362   vec<pre_expr> exprs;
3363   pre_expr expr;
3364   auto_vec<pre_expr> avail;
3365   int i;
3366 
3367   exprs = sorted_array_from_bitmap_set (PA_IN (block));
3368   avail.safe_grow (EDGE_COUNT (block->preds));
3369 
3370   FOR_EACH_VEC_ELT (exprs, i, expr)
3371     {
3372       if (expr->kind == NARY
3373 	  || expr->kind == REFERENCE)
3374 	{
3375 	  unsigned int val;
3376 	  bool by_all = true;
3377 	  bool cant_insert = false;
3378 	  edge pred;
3379 	  basic_block bprime;
3380 	  pre_expr eprime = NULL;
3381 	  edge_iterator ei;
3382 
3383 	  val = get_expr_value_id (expr);
3384 	  if (bitmap_set_contains_value (PHI_GEN (block), val))
3385 	    continue;
3386 	  if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3387 	    continue;
3388 
3389 	  FOR_EACH_EDGE (pred, ei, block->preds)
3390 	    {
3391 	      unsigned int vprime;
3392 	      pre_expr edoubleprime;
3393 
3394 	      /* We should never run insertion for the exit block
3395 	         and so not come across fake pred edges.  */
3396 	      gcc_assert (!(pred->flags & EDGE_FAKE));
3397 	      bprime = pred->src;
3398 	      eprime = phi_translate (expr, ANTIC_IN (block),
3399 				      PA_IN (block),
3400 				      bprime, block);
3401 
3402 	      /* eprime will generally only be NULL if the
3403 		 value of the expression, translated
3404 		 through the PHI for this predecessor, is
3405 		 undefined.  If that is the case, we can't
3406 		 make the expression fully redundant,
3407 		 because its value is undefined along a
3408 		 predecessor path.  We can thus break out
3409 		 early because it doesn't matter what the
3410 		 rest of the results are.  */
3411 	      if (eprime == NULL)
3412 		{
3413 		  avail[pred->dest_idx] = NULL;
3414 		  cant_insert = true;
3415 		  break;
3416 		}
3417 
3418 	      vprime = get_expr_value_id (eprime);
3419 	      edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3420 	      avail[pred->dest_idx] = edoubleprime;
3421 	      if (edoubleprime == NULL)
3422 		{
3423 		  by_all = false;
3424 		  break;
3425 		}
3426 	    }
3427 
3428 	  /* If we can insert it, it's not the same value
3429 	     already existing along every predecessor, and
3430 	     it's defined by some predecessor, it is
3431 	     partially redundant.  */
3432 	  if (!cant_insert && by_all)
3433 	    {
3434 	      edge succ;
3435 	      bool do_insertion = false;
3436 
3437 	      /* Insert only if we can remove a later expression on a path
3438 		 that we want to optimize for speed.
3439 		 The phi node that we will be inserting in BLOCK is not free,
3440 		 and inserting it for the sake of !optimize_for_speed successor
3441 		 may cause regressions on the speed path.  */
3442 	      FOR_EACH_EDGE (succ, ei, block->succs)
3443 		{
3444 		  if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3445 		      || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3446 		    {
3447 		      if (optimize_edge_for_speed_p (succ))
3448 			do_insertion = true;
3449 		    }
3450 		}
3451 
3452 	      if (!do_insertion)
3453 		{
3454 		  if (dump_file && (dump_flags & TDF_DETAILS))
3455 		    {
3456 		      fprintf (dump_file, "Skipping partial partial redundancy "
3457 			       "for expression ");
3458 		      print_pre_expr (dump_file, expr);
3459 		      fprintf (dump_file, " (%04d), not (partially) anticipated "
3460 			       "on any to be optimized for speed edges\n", val);
3461 		    }
3462 		}
3463 	      else if (dbg_cnt (treepre_insert))
3464 		{
3465 		  pre_stats.pa_insert++;
3466 		  if (dump_file && (dump_flags & TDF_DETAILS))
3467 		    {
3468 		      fprintf (dump_file, "Found partial partial redundancy "
3469 			       "for expression ");
3470 		      print_pre_expr (dump_file, expr);
3471 		      fprintf (dump_file, " (%04d)\n",
3472 			       get_expr_value_id (expr));
3473 		    }
3474 		  if (insert_into_preds_of_block (block,
3475 						  get_expression_id (expr),
3476 						  avail))
3477 		    new_stuff = true;
3478 		}
3479 	    }
3480 	}
3481     }
3482 
3483   exprs.release ();
3484   return new_stuff;
3485 }
3486 
3487 /* Insert expressions in BLOCK to compute hoistable values up.
3488    Return TRUE if something was inserted, otherwise return FALSE.
3489    The caller has to make sure that BLOCK has at least two successors.  */
3490 
3491 static bool
3492 do_hoist_insertion (basic_block block)
3493 {
3494   edge e;
3495   edge_iterator ei;
3496   bool new_stuff = false;
3497   unsigned i;
3498   gimple_stmt_iterator last;
3499 
3500   /* At least two successors, or else...  */
3501   gcc_assert (EDGE_COUNT (block->succs) >= 2);
3502 
3503   /* Check that all successors of BLOCK are dominated by block.
3504      We could use dominated_by_p() for this, but actually there is a much
3505      quicker check: any successor that is dominated by BLOCK can't have
3506      more than one predecessor edge.  */
3507   FOR_EACH_EDGE (e, ei, block->succs)
3508     if (! single_pred_p (e->dest))
3509       return false;
3510 
3511   /* Determine the insertion point.  If we cannot safely insert before
3512      the last stmt if we'd have to, bail out.  */
3513   last = gsi_last_bb (block);
3514   if (!gsi_end_p (last)
3515       && !is_ctrl_stmt (gsi_stmt (last))
3516       && stmt_ends_bb_p (gsi_stmt (last)))
3517     return false;
3518 
3519   /* Compute the set of hoistable expressions from ANTIC_IN.  First compute
3520      hoistable values.  */
3521   bitmap_set hoistable_set;
3522 
3523   /* A hoistable value must be in ANTIC_IN(block)
3524      but not in AVAIL_OUT(BLOCK).  */
3525   bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3526   bitmap_and_compl (&hoistable_set.values,
3527 		    &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3528 
3529   /* Short-cut for a common case: hoistable_set is empty.  */
3530   if (bitmap_empty_p (&hoistable_set.values))
3531     return false;
3532 
3533   /* Compute which of the hoistable values is in AVAIL_OUT of
3534      at least one of the successors of BLOCK.  */
3535   bitmap_head availout_in_some;
3536   bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3537   FOR_EACH_EDGE (e, ei, block->succs)
3538     /* Do not consider expressions solely because their availability
3539        on loop exits.  They'd be ANTIC-IN throughout the whole loop
3540        and thus effectively hoisted across loops by combination of
3541        PRE and hoisting.  */
3542     if (! loop_exit_edge_p (block->loop_father, e))
3543       bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3544 			   &AVAIL_OUT (e->dest)->values);
3545   bitmap_clear (&hoistable_set.values);
3546 
3547   /* Short-cut for a common case: availout_in_some is empty.  */
3548   if (bitmap_empty_p (&availout_in_some))
3549     return false;
3550 
3551   /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set.  */
3552   hoistable_set.values = availout_in_some;
3553   hoistable_set.expressions = ANTIC_IN (block)->expressions;
3554 
3555   /* Now finally construct the topological-ordered expression set.  */
3556   vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3557 
3558   bitmap_clear (&hoistable_set.values);
3559 
3560   /* If there are candidate values for hoisting, insert expressions
3561      strategically to make the hoistable expressions fully redundant.  */
3562   pre_expr expr;
3563   FOR_EACH_VEC_ELT (exprs, i, expr)
3564     {
3565       /* While we try to sort expressions topologically above the
3566          sorting doesn't work out perfectly.  Catch expressions we
3567 	 already inserted.  */
3568       unsigned int value_id = get_expr_value_id (expr);
3569       if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3570 	{
3571 	  if (dump_file && (dump_flags & TDF_DETAILS))
3572 	    {
3573 	      fprintf (dump_file,
3574 		       "Already inserted expression for ");
3575 	      print_pre_expr (dump_file, expr);
3576 	      fprintf (dump_file, " (%04d)\n", value_id);
3577 	    }
3578 	  continue;
3579 	}
3580 
3581       /* OK, we should hoist this value.  Perform the transformation.  */
3582       pre_stats.hoist_insert++;
3583       if (dump_file && (dump_flags & TDF_DETAILS))
3584 	{
3585 	  fprintf (dump_file,
3586 		   "Inserting expression in block %d for code hoisting: ",
3587 		   block->index);
3588 	  print_pre_expr (dump_file, expr);
3589 	  fprintf (dump_file, " (%04d)\n", value_id);
3590 	}
3591 
3592       gimple_seq stmts = NULL;
3593       tree res = create_expression_by_pieces (block, expr, &stmts,
3594 					      get_expr_type (expr));
3595 
3596       /* Do not return true if expression creation ultimately
3597          did not insert any statements.  */
3598       if (gimple_seq_empty_p (stmts))
3599 	res = NULL_TREE;
3600       else
3601 	{
3602 	  if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3603 	    gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3604 	  else
3605 	    gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3606 	}
3607 
3608       /* Make sure to not return true if expression creation ultimately
3609          failed but also make sure to insert any stmts produced as they
3610 	 are tracked in inserted_exprs.  */
3611       if (! res)
3612 	continue;
3613 
3614       new_stuff = true;
3615     }
3616 
3617   exprs.release ();
3618 
3619   return new_stuff;
3620 }
3621 
3622 /* Do a dominator walk on the control flow graph, and insert computations
3623    of values as necessary for PRE and hoisting.  */
3624 
3625 static bool
3626 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3627 {
3628   basic_block son;
3629   bool new_stuff = false;
3630 
3631   if (block)
3632     {
3633       basic_block dom;
3634       dom = get_immediate_dominator (CDI_DOMINATORS, block);
3635       if (dom)
3636 	{
3637 	  unsigned i;
3638 	  bitmap_iterator bi;
3639 	  bitmap_set_t newset;
3640 
3641 	  /* First, update the AVAIL_OUT set with anything we may have
3642 	     inserted higher up in the dominator tree.  */
3643 	  newset = NEW_SETS (dom);
3644 	  if (newset)
3645 	    {
3646 	      /* Note that we need to value_replace both NEW_SETS, and
3647 		 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3648 		 represented by some non-simple expression here that we want
3649 		 to replace it with.  */
3650 	      FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3651 		{
3652 		  pre_expr expr = expression_for_id (i);
3653 		  bitmap_value_replace_in_set (NEW_SETS (block), expr);
3654 		  bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3655 		}
3656 	    }
3657 
3658 	  /* Insert expressions for partial redundancies.  */
3659 	  if (do_pre && !single_pred_p (block))
3660 	    {
3661 	      new_stuff |= do_pre_regular_insertion (block, dom);
3662 	      if (do_partial_partial)
3663 		new_stuff |= do_pre_partial_partial_insertion (block, dom);
3664 	    }
3665 
3666 	  /* Insert expressions for hoisting.  */
3667 	  if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3668 	    new_stuff |= do_hoist_insertion (block);
3669 	}
3670     }
3671   for (son = first_dom_son (CDI_DOMINATORS, block);
3672        son;
3673        son = next_dom_son (CDI_DOMINATORS, son))
3674     {
3675       new_stuff |= insert_aux (son, do_pre, do_hoist);
3676     }
3677 
3678   return new_stuff;
3679 }
3680 
3681 /* Perform insertion of partially redundant and hoistable values.  */
3682 
3683 static void
3684 insert (void)
3685 {
3686   bool new_stuff = true;
3687   basic_block bb;
3688   int num_iterations = 0;
3689 
3690   FOR_ALL_BB_FN (bb, cfun)
3691     NEW_SETS (bb) = bitmap_set_new ();
3692 
3693   while (new_stuff)
3694     {
3695       num_iterations++;
3696       if (dump_file && dump_flags & TDF_DETAILS)
3697 	fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3698       new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3699 			      flag_code_hoisting);
3700 
3701       /* Clear the NEW sets before the next iteration.  We have already
3702          fully propagated its contents.  */
3703       if (new_stuff)
3704 	FOR_ALL_BB_FN (bb, cfun)
3705 	  bitmap_set_free (NEW_SETS (bb));
3706     }
3707   statistics_histogram_event (cfun, "insert iterations", num_iterations);
3708 }
3709 
3710 
3711 /* Compute the AVAIL set for all basic blocks.
3712 
3713    This function performs value numbering of the statements in each basic
3714    block.  The AVAIL sets are built from information we glean while doing
3715    this value numbering, since the AVAIL sets contain only one entry per
3716    value.
3717 
3718    AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3719    AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK].  */
3720 
3721 static void
3722 compute_avail (void)
3723 {
3724 
3725   basic_block block, son;
3726   basic_block *worklist;
3727   size_t sp = 0;
3728   unsigned i;
3729   tree name;
3730 
3731   /* We pretend that default definitions are defined in the entry block.
3732      This includes function arguments and the static chain decl.  */
3733   FOR_EACH_SSA_NAME (i, name, cfun)
3734     {
3735       pre_expr e;
3736       if (!SSA_NAME_IS_DEFAULT_DEF (name)
3737 	  || has_zero_uses (name)
3738 	  || virtual_operand_p (name))
3739 	continue;
3740 
3741       e = get_or_alloc_expr_for_name (name);
3742       add_to_value (get_expr_value_id (e), e);
3743       bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3744       bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3745 				    e);
3746     }
3747 
3748   if (dump_file && (dump_flags & TDF_DETAILS))
3749     {
3750       print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3751 			"tmp_gen", ENTRY_BLOCK);
3752       print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3753 			"avail_out", ENTRY_BLOCK);
3754     }
3755 
3756   /* Allocate the worklist.  */
3757   worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3758 
3759   /* Seed the algorithm by putting the dominator children of the entry
3760      block on the worklist.  */
3761   for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3762        son;
3763        son = next_dom_son (CDI_DOMINATORS, son))
3764     worklist[sp++] = son;
3765 
3766   BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3767     = ssa_default_def (cfun, gimple_vop (cfun));
3768 
3769   /* Loop until the worklist is empty.  */
3770   while (sp)
3771     {
3772       gimple *stmt;
3773       basic_block dom;
3774 
3775       /* Pick a block from the worklist.  */
3776       block = worklist[--sp];
3777 
3778       /* Initially, the set of available values in BLOCK is that of
3779 	 its immediate dominator.  */
3780       dom = get_immediate_dominator (CDI_DOMINATORS, block);
3781       if (dom)
3782 	{
3783 	  bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3784 	  BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3785 	}
3786 
3787       /* Generate values for PHI nodes.  */
3788       for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3789 	   gsi_next (&gsi))
3790 	{
3791 	  tree result = gimple_phi_result (gsi.phi ());
3792 
3793 	  /* We have no need for virtual phis, as they don't represent
3794 	     actual computations.  */
3795 	  if (virtual_operand_p (result))
3796 	    {
3797 	      BB_LIVE_VOP_ON_EXIT (block) = result;
3798 	      continue;
3799 	    }
3800 
3801 	  pre_expr e = get_or_alloc_expr_for_name (result);
3802 	  add_to_value (get_expr_value_id (e), e);
3803 	  bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3804 	  bitmap_insert_into_set (PHI_GEN (block), e);
3805 	}
3806 
3807       BB_MAY_NOTRETURN (block) = 0;
3808 
3809       /* Now compute value numbers and populate value sets with all
3810 	 the expressions computed in BLOCK.  */
3811       for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3812 	   gsi_next (&gsi))
3813 	{
3814 	  ssa_op_iter iter;
3815 	  tree op;
3816 
3817 	  stmt = gsi_stmt (gsi);
3818 
3819 	  /* Cache whether the basic-block has any non-visible side-effect
3820 	     or control flow.
3821 	     If this isn't a call or it is the last stmt in the
3822 	     basic-block then the CFG represents things correctly.  */
3823 	  if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3824 	    {
3825 	      /* Non-looping const functions always return normally.
3826 		 Otherwise the call might not return or have side-effects
3827 		 that forbids hoisting possibly trapping expressions
3828 		 before it.  */
3829 	      int flags = gimple_call_flags (stmt);
3830 	      if (!(flags & ECF_CONST)
3831 		  || (flags & ECF_LOOPING_CONST_OR_PURE))
3832 		BB_MAY_NOTRETURN (block) = 1;
3833 	    }
3834 
3835 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3836 	    {
3837 	      pre_expr e = get_or_alloc_expr_for_name (op);
3838 
3839 	      add_to_value (get_expr_value_id (e), e);
3840 	      bitmap_insert_into_set (TMP_GEN (block), e);
3841 	      bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3842 	    }
3843 
3844 	  if (gimple_vdef (stmt))
3845 	    BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3846 
3847 	  if (gimple_has_side_effects (stmt)
3848 	      || stmt_could_throw_p (stmt)
3849 	      || is_gimple_debug (stmt))
3850 	    continue;
3851 
3852 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3853 	    {
3854 	      if (ssa_undefined_value_p (op))
3855 		continue;
3856 	      pre_expr e = get_or_alloc_expr_for_name (op);
3857 	      bitmap_value_insert_into_set (EXP_GEN (block), e);
3858 	    }
3859 
3860 	  switch (gimple_code (stmt))
3861 	    {
3862 	    case GIMPLE_RETURN:
3863 	      continue;
3864 
3865 	    case GIMPLE_CALL:
3866 	      {
3867 		vn_reference_t ref;
3868 		vn_reference_s ref1;
3869 		pre_expr result = NULL;
3870 
3871 		/* We can value number only calls to real functions.  */
3872 		if (gimple_call_internal_p (stmt))
3873 		  continue;
3874 
3875 		vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3876 		if (!ref)
3877 		  continue;
3878 
3879 		/* If the value of the call is not invalidated in
3880 		   this block until it is computed, add the expression
3881 		   to EXP_GEN.  */
3882 		if (!gimple_vuse (stmt)
3883 		    || gimple_code
3884 		         (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3885 		    || gimple_bb (SSA_NAME_DEF_STMT
3886 				    (gimple_vuse (stmt))) != block)
3887 		  {
3888 		    result = pre_expr_pool.allocate ();
3889 		    result->kind = REFERENCE;
3890 		    result->id = 0;
3891 		    PRE_EXPR_REFERENCE (result) = ref;
3892 
3893 		    get_or_alloc_expression_id (result);
3894 		    add_to_value (get_expr_value_id (result), result);
3895 		    bitmap_value_insert_into_set (EXP_GEN (block), result);
3896 		  }
3897 		continue;
3898 	      }
3899 
3900 	    case GIMPLE_ASSIGN:
3901 	      {
3902 		pre_expr result = NULL;
3903 		switch (vn_get_stmt_kind (stmt))
3904 		  {
3905 		  case VN_NARY:
3906 		    {
3907 		      enum tree_code code = gimple_assign_rhs_code (stmt);
3908 		      vn_nary_op_t nary;
3909 
3910 		      /* COND_EXPR and VEC_COND_EXPR are awkward in
3911 			 that they contain an embedded complex expression.
3912 			 Don't even try to shove those through PRE.  */
3913 		      if (code == COND_EXPR
3914 			  || code == VEC_COND_EXPR)
3915 			continue;
3916 
3917 		      vn_nary_op_lookup_stmt (stmt, &nary);
3918 		      if (!nary)
3919 			continue;
3920 
3921 		      /* If the NARY traps and there was a preceding
3922 		         point in the block that might not return avoid
3923 			 adding the nary to EXP_GEN.  */
3924 		      if (BB_MAY_NOTRETURN (block)
3925 			  && vn_nary_may_trap (nary))
3926 			continue;
3927 
3928 		      result = pre_expr_pool.allocate ();
3929 		      result->kind = NARY;
3930 		      result->id = 0;
3931 		      PRE_EXPR_NARY (result) = nary;
3932 		      break;
3933 		    }
3934 
3935 		  case VN_REFERENCE:
3936 		    {
3937 		      tree rhs1 = gimple_assign_rhs1 (stmt);
3938 		      alias_set_type set = get_alias_set (rhs1);
3939 		      vec<vn_reference_op_s> operands
3940 			= vn_reference_operands_for_lookup (rhs1);
3941 		      vn_reference_t ref;
3942 		      vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3943 						  TREE_TYPE (rhs1),
3944 						  operands, &ref, VN_WALK);
3945 		      if (!ref)
3946 			{
3947 			  operands.release ();
3948 			  continue;
3949 			}
3950 
3951 		      /* If the value of the reference is not invalidated in
3952 			 this block until it is computed, add the expression
3953 			 to EXP_GEN.  */
3954 		      if (gimple_vuse (stmt))
3955 			{
3956 			  gimple *def_stmt;
3957 			  bool ok = true;
3958 			  def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3959 			  while (!gimple_nop_p (def_stmt)
3960 				 && gimple_code (def_stmt) != GIMPLE_PHI
3961 				 && gimple_bb (def_stmt) == block)
3962 			    {
3963 			      if (stmt_may_clobber_ref_p
3964 				    (def_stmt, gimple_assign_rhs1 (stmt)))
3965 				{
3966 				  ok = false;
3967 				  break;
3968 				}
3969 			      def_stmt
3970 				= SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3971 			    }
3972 			  if (!ok)
3973 			    {
3974 			      operands.release ();
3975 			      continue;
3976 			    }
3977 			}
3978 
3979 		      /* If the load was value-numbered to another
3980 			 load make sure we do not use its expression
3981 			 for insertion if it wouldn't be a valid
3982 			 replacement.  */
3983 		      /* At the momemt we have a testcase
3984 			 for hoist insertion of aligned vs. misaligned
3985 			 variants in gcc.dg/torture/pr65270-1.c thus
3986 			 with just alignment to be considered we can
3987 			 simply replace the expression in the hashtable
3988 			 with the most conservative one.  */
3989 		      vn_reference_op_t ref1 = &ref->operands.last ();
3990 		      while (ref1->opcode != TARGET_MEM_REF
3991 			     && ref1->opcode != MEM_REF
3992 			     && ref1 != &ref->operands[0])
3993 			--ref1;
3994 		      vn_reference_op_t ref2 = &operands.last ();
3995 		      while (ref2->opcode != TARGET_MEM_REF
3996 			     && ref2->opcode != MEM_REF
3997 			     && ref2 != &operands[0])
3998 			--ref2;
3999 		      if ((ref1->opcode == TARGET_MEM_REF
4000 			   || ref1->opcode == MEM_REF)
4001 			  && (TYPE_ALIGN (ref1->type)
4002 			      > TYPE_ALIGN (ref2->type)))
4003 			ref1->type
4004 			  = build_aligned_type (ref1->type,
4005 						TYPE_ALIGN (ref2->type));
4006 		      /* TBAA behavior is an obvious part so make sure
4007 		         that the hashtable one covers this as well
4008 			 by adjusting the ref alias set and its base.  */
4009 		      if (ref->set == set
4010 			  || alias_set_subset_of (set, ref->set))
4011 			;
4012 		      else if (alias_set_subset_of (ref->set, set))
4013 			{
4014 			  ref->set = set;
4015 			  if (ref1->opcode == MEM_REF)
4016 			    ref1->op0 = wide_int_to_tree (TREE_TYPE (ref2->op0),
4017 							  ref1->op0);
4018 			  else
4019 			    ref1->op2 = wide_int_to_tree (TREE_TYPE (ref2->op2),
4020 							  ref1->op2);
4021 			}
4022 		      else
4023 			{
4024 			  ref->set = 0;
4025 			  if (ref1->opcode == MEM_REF)
4026 			    ref1->op0 = wide_int_to_tree (ptr_type_node,
4027 							  ref1->op0);
4028 			  else
4029 			    ref1->op2 = wide_int_to_tree (ptr_type_node,
4030 							  ref1->op2);
4031 			}
4032 		      operands.release ();
4033 
4034 		      result = pre_expr_pool.allocate ();
4035 		      result->kind = REFERENCE;
4036 		      result->id = 0;
4037 		      PRE_EXPR_REFERENCE (result) = ref;
4038 		      break;
4039 		    }
4040 
4041 		  default:
4042 		    continue;
4043 		  }
4044 
4045 		get_or_alloc_expression_id (result);
4046 		add_to_value (get_expr_value_id (result), result);
4047 		bitmap_value_insert_into_set (EXP_GEN (block), result);
4048 		continue;
4049 	      }
4050 	    default:
4051 	      break;
4052 	    }
4053 	}
4054 
4055       if (dump_file && (dump_flags & TDF_DETAILS))
4056 	{
4057 	  print_bitmap_set (dump_file, EXP_GEN (block),
4058 			    "exp_gen", block->index);
4059 	  print_bitmap_set (dump_file, PHI_GEN (block),
4060 			    "phi_gen", block->index);
4061 	  print_bitmap_set (dump_file, TMP_GEN (block),
4062 			    "tmp_gen", block->index);
4063 	  print_bitmap_set (dump_file, AVAIL_OUT (block),
4064 			    "avail_out", block->index);
4065 	}
4066 
4067       /* Put the dominator children of BLOCK on the worklist of blocks
4068 	 to compute available sets for.  */
4069       for (son = first_dom_son (CDI_DOMINATORS, block);
4070 	   son;
4071 	   son = next_dom_son (CDI_DOMINATORS, son))
4072 	worklist[sp++] = son;
4073     }
4074 
4075   free (worklist);
4076 }
4077 
4078 
4079 /* Local state for the eliminate domwalk.  */
4080 static vec<gimple *> el_to_remove;
4081 static vec<gimple *> el_to_fixup;
4082 static unsigned int el_todo;
4083 static vec<tree> el_avail;
4084 static vec<tree> el_avail_stack;
4085 
4086 /* Return a leader for OP that is available at the current point of the
4087    eliminate domwalk.  */
4088 
4089 static tree
4090 eliminate_avail (tree op)
4091 {
4092   tree valnum = VN_INFO (op)->valnum;
4093   if (TREE_CODE (valnum) == SSA_NAME)
4094     {
4095       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4096 	return valnum;
4097       if (el_avail.length () > SSA_NAME_VERSION (valnum))
4098 	return el_avail[SSA_NAME_VERSION (valnum)];
4099     }
4100   else if (is_gimple_min_invariant (valnum))
4101     return valnum;
4102   return NULL_TREE;
4103 }
4104 
4105 /* At the current point of the eliminate domwalk make OP available.  */
4106 
4107 static void
4108 eliminate_push_avail (tree op)
4109 {
4110   tree valnum = VN_INFO (op)->valnum;
4111   if (TREE_CODE (valnum) == SSA_NAME)
4112     {
4113       if (el_avail.length () <= SSA_NAME_VERSION (valnum))
4114 	el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4115       tree pushop = op;
4116       if (el_avail[SSA_NAME_VERSION (valnum)])
4117 	pushop = el_avail[SSA_NAME_VERSION (valnum)];
4118       el_avail_stack.safe_push (pushop);
4119       el_avail[SSA_NAME_VERSION (valnum)] = op;
4120     }
4121 }
4122 
4123 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
4124    the leader for the expression if insertion was successful.  */
4125 
4126 static tree
4127 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
4128 {
4129   /* We can insert a sequence with a single assignment only.  */
4130   gimple_seq stmts = VN_INFO (val)->expr;
4131   if (!gimple_seq_singleton_p (stmts))
4132     return NULL_TREE;
4133   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
4134   if (!stmt
4135       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4136 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4137 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
4138 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
4139 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
4140     return NULL_TREE;
4141 
4142   tree op = gimple_assign_rhs1 (stmt);
4143   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4144       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4145     op = TREE_OPERAND (op, 0);
4146   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
4147   if (!leader)
4148     return NULL_TREE;
4149 
4150   tree res;
4151   stmts = NULL;
4152   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4153     res = gimple_build (&stmts, BIT_FIELD_REF,
4154 			TREE_TYPE (val), leader,
4155 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4156 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4157   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
4158     res = gimple_build (&stmts, BIT_AND_EXPR,
4159 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
4160   else
4161     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4162 			TREE_TYPE (val), leader);
4163   if (TREE_CODE (res) != SSA_NAME
4164       || SSA_NAME_IS_DEFAULT_DEF (res)
4165       || gimple_bb (SSA_NAME_DEF_STMT (res)))
4166     {
4167       gimple_seq_discard (stmts);
4168 
4169       /* During propagation we have to treat SSA info conservatively
4170          and thus we can end up simplifying the inserted expression
4171 	 at elimination time to sth not defined in stmts.  */
4172       /* But then this is a redundancy we failed to detect.  Which means
4173          res now has two values.  That doesn't play well with how
4174 	 we track availability here, so give up.  */
4175       if (dump_file && (dump_flags & TDF_DETAILS))
4176 	{
4177 	  if (TREE_CODE (res) == SSA_NAME)
4178 	    res = eliminate_avail (res);
4179 	  if (res)
4180 	    {
4181 	      fprintf (dump_file, "Failed to insert expression for value ");
4182 	      print_generic_expr (dump_file, val, 0);
4183 	      fprintf (dump_file, " which is really fully redundant to ");
4184 	      print_generic_expr (dump_file, res, 0);
4185 	      fprintf (dump_file, "\n");
4186 	    }
4187 	}
4188 
4189       return NULL_TREE;
4190     }
4191   else
4192     {
4193       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4194       VN_INFO_GET (res)->valnum = val;
4195 
4196       if (TREE_CODE (leader) == SSA_NAME)
4197 	gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
4198     }
4199 
4200   pre_stats.insertions++;
4201   if (dump_file && (dump_flags & TDF_DETAILS))
4202     {
4203       fprintf (dump_file, "Inserted ");
4204       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0, 0);
4205     }
4206 
4207   return res;
4208 }
4209 
4210 class eliminate_dom_walker : public dom_walker
4211 {
4212 public:
4213   eliminate_dom_walker (cdi_direction direction, bool do_pre_)
4214       : dom_walker (direction), do_pre (do_pre_) {}
4215 
4216   virtual edge before_dom_children (basic_block);
4217   virtual void after_dom_children (basic_block);
4218 
4219   bool do_pre;
4220 };
4221 
4222 /* Perform elimination for the basic-block B during the domwalk.  */
4223 
4224 edge
4225 eliminate_dom_walker::before_dom_children (basic_block b)
4226 {
4227   /* Mark new bb.  */
4228   el_avail_stack.safe_push (NULL_TREE);
4229 
4230   /* ???  If we do nothing for unreachable blocks then this will confuse
4231      tailmerging.  Eventually we can reduce its reliance on SCCVN now
4232      that we fully copy/constant-propagate (most) things.  */
4233 
4234   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4235     {
4236       gphi *phi = gsi.phi ();
4237       tree res = PHI_RESULT (phi);
4238 
4239       if (virtual_operand_p (res))
4240 	{
4241 	  gsi_next (&gsi);
4242 	  continue;
4243 	}
4244 
4245       tree sprime = eliminate_avail (res);
4246       if (sprime
4247 	  && sprime != res)
4248 	{
4249 	  if (dump_file && (dump_flags & TDF_DETAILS))
4250 	    {
4251 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
4252 	      print_generic_expr (dump_file, res, 0);
4253 	      fprintf (dump_file, " with ");
4254 	      print_generic_expr (dump_file, sprime, 0);
4255 	      fprintf (dump_file, "\n");
4256 	    }
4257 
4258 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
4259 	  if (inserted_exprs
4260 	      && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4261 	    pre_stats.phis--;
4262 	  else
4263 	    pre_stats.eliminations++;
4264 
4265 	  /* If we will propagate into all uses don't bother to do
4266 	     anything.  */
4267 	  if (may_propagate_copy (res, sprime))
4268 	    {
4269 	      /* Mark the PHI for removal.  */
4270 	      el_to_remove.safe_push (phi);
4271 	      gsi_next (&gsi);
4272 	      continue;
4273 	    }
4274 
4275 	  remove_phi_node (&gsi, false);
4276 
4277 	  if (inserted_exprs
4278 	      && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4279 	      && TREE_CODE (sprime) == SSA_NAME)
4280 	    gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4281 
4282 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4283 	    sprime = fold_convert (TREE_TYPE (res), sprime);
4284 	  gimple *stmt = gimple_build_assign (res, sprime);
4285 	  /* ???  It cannot yet be necessary (DOM walk).  */
4286 	  gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4287 
4288 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
4289 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4290 	  continue;
4291 	}
4292 
4293       eliminate_push_avail (res);
4294       gsi_next (&gsi);
4295     }
4296 
4297   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
4298        !gsi_end_p (gsi);
4299        gsi_next (&gsi))
4300     {
4301       tree sprime = NULL_TREE;
4302       gimple *stmt = gsi_stmt (gsi);
4303       tree lhs = gimple_get_lhs (stmt);
4304       if (lhs && TREE_CODE (lhs) == SSA_NAME
4305 	  && !gimple_has_volatile_ops (stmt)
4306 	  /* See PR43491.  Do not replace a global register variable when
4307 	     it is a the RHS of an assignment.  Do replace local register
4308 	     variables since gcc does not guarantee a local variable will
4309 	     be allocated in register.
4310 	     ???  The fix isn't effective here.  This should instead
4311 	     be ensured by not value-numbering them the same but treating
4312 	     them like volatiles?  */
4313 	  && !(gimple_assign_single_p (stmt)
4314 	       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4315 		   && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4316 		   && is_global_var (gimple_assign_rhs1 (stmt)))))
4317 	{
4318 	  sprime = eliminate_avail (lhs);
4319 	  if (!sprime)
4320 	    {
4321 	      /* If there is no existing usable leader but SCCVN thinks
4322 		 it has an expression it wants to use as replacement,
4323 		 insert that.  */
4324 	      tree val = VN_INFO (lhs)->valnum;
4325 	      if (val != VN_TOP
4326 		  && TREE_CODE (val) == SSA_NAME
4327 		  && VN_INFO (val)->needs_insertion
4328 		  && VN_INFO (val)->expr != NULL
4329 		  && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4330 		eliminate_push_avail (sprime);
4331 	    }
4332 
4333 	  /* If this now constitutes a copy duplicate points-to
4334 	     and range info appropriately.  This is especially
4335 	     important for inserted code.  See tree-ssa-copy.c
4336 	     for similar code.  */
4337 	  if (sprime
4338 	      && TREE_CODE (sprime) == SSA_NAME)
4339 	    {
4340 	      basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4341 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
4342 		  && VN_INFO_PTR_INFO (lhs)
4343 		  && ! VN_INFO_PTR_INFO (sprime))
4344 		{
4345 		  duplicate_ssa_name_ptr_info (sprime,
4346 					       VN_INFO_PTR_INFO (lhs));
4347 		  if (b != sprime_b)
4348 		    mark_ptr_info_alignment_unknown
4349 			(SSA_NAME_PTR_INFO (sprime));
4350 		}
4351 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
4352 		       && VN_INFO_RANGE_INFO (lhs)
4353 		       && ! VN_INFO_RANGE_INFO (sprime)
4354 		       && b == sprime_b)
4355 		duplicate_ssa_name_range_info (sprime,
4356 					       VN_INFO_RANGE_TYPE (lhs),
4357 					       VN_INFO_RANGE_INFO (lhs));
4358 	    }
4359 
4360 	  /* Inhibit the use of an inserted PHI on a loop header when
4361 	     the address of the memory reference is a simple induction
4362 	     variable.  In other cases the vectorizer won't do anything
4363 	     anyway (either it's loop invariant or a complicated
4364 	     expression).  */
4365 	  if (sprime
4366 	      && TREE_CODE (sprime) == SSA_NAME
4367 	      && do_pre
4368 	      && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
4369 	      && loop_outer (b->loop_father)
4370 	      && has_zero_uses (sprime)
4371 	      && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4372 	      && gimple_assign_load_p (stmt))
4373 	    {
4374 	      gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4375 	      basic_block def_bb = gimple_bb (def_stmt);
4376 	      if (gimple_code (def_stmt) == GIMPLE_PHI
4377 		  && def_bb->loop_father->header == def_bb)
4378 		{
4379 		  loop_p loop = def_bb->loop_father;
4380 		  ssa_op_iter iter;
4381 		  tree op;
4382 		  bool found = false;
4383 		  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4384 		    {
4385 		      affine_iv iv;
4386 		      def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4387 		      if (def_bb
4388 			  && flow_bb_inside_loop_p (loop, def_bb)
4389 			  && simple_iv (loop, loop, op, &iv, true))
4390 			{
4391 			  found = true;
4392 			  break;
4393 			}
4394 		    }
4395 		  if (found)
4396 		    {
4397 		      if (dump_file && (dump_flags & TDF_DETAILS))
4398 			{
4399 			  fprintf (dump_file, "Not replacing ");
4400 			  print_gimple_expr (dump_file, stmt, 0, 0);
4401 			  fprintf (dump_file, " with ");
4402 			  print_generic_expr (dump_file, sprime, 0);
4403 			  fprintf (dump_file, " which would add a loop"
4404 				   " carried dependence to loop %d\n",
4405 				   loop->num);
4406 			}
4407 		      /* Don't keep sprime available.  */
4408 		      sprime = NULL_TREE;
4409 		    }
4410 		}
4411 	    }
4412 
4413 	  if (sprime)
4414 	    {
4415 	      /* If we can propagate the value computed for LHS into
4416 		 all uses don't bother doing anything with this stmt.  */
4417 	      if (may_propagate_copy (lhs, sprime))
4418 		{
4419 		  /* Mark it for removal.  */
4420 		  el_to_remove.safe_push (stmt);
4421 
4422 		  /* ???  Don't count copy/constant propagations.  */
4423 		  if (gimple_assign_single_p (stmt)
4424 		      && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4425 			  || gimple_assign_rhs1 (stmt) == sprime))
4426 		    continue;
4427 
4428 		  if (dump_file && (dump_flags & TDF_DETAILS))
4429 		    {
4430 		      fprintf (dump_file, "Replaced ");
4431 		      print_gimple_expr (dump_file, stmt, 0, 0);
4432 		      fprintf (dump_file, " with ");
4433 		      print_generic_expr (dump_file, sprime, 0);
4434 		      fprintf (dump_file, " in all uses of ");
4435 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4436 		    }
4437 
4438 		  pre_stats.eliminations++;
4439 		  continue;
4440 		}
4441 
4442 	      /* If this is an assignment from our leader (which
4443 	         happens in the case the value-number is a constant)
4444 		 then there is nothing to do.  */
4445 	      if (gimple_assign_single_p (stmt)
4446 		  && sprime == gimple_assign_rhs1 (stmt))
4447 		continue;
4448 
4449 	      /* Else replace its RHS.  */
4450 	      bool can_make_abnormal_goto
4451 		  = is_gimple_call (stmt)
4452 		  && stmt_can_make_abnormal_goto (stmt);
4453 
4454 	      if (dump_file && (dump_flags & TDF_DETAILS))
4455 		{
4456 		  fprintf (dump_file, "Replaced ");
4457 		  print_gimple_expr (dump_file, stmt, 0, 0);
4458 		  fprintf (dump_file, " with ");
4459 		  print_generic_expr (dump_file, sprime, 0);
4460 		  fprintf (dump_file, " in ");
4461 		  print_gimple_stmt (dump_file, stmt, 0, 0);
4462 		}
4463 
4464 	      if (TREE_CODE (sprime) == SSA_NAME)
4465 		gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4466 				NECESSARY, true);
4467 
4468 	      pre_stats.eliminations++;
4469 	      gimple *orig_stmt = stmt;
4470 	      if (!useless_type_conversion_p (TREE_TYPE (lhs),
4471 					      TREE_TYPE (sprime)))
4472 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
4473 	      tree vdef = gimple_vdef (stmt);
4474 	      tree vuse = gimple_vuse (stmt);
4475 	      propagate_tree_value_into_stmt (&gsi, sprime);
4476 	      stmt = gsi_stmt (gsi);
4477 	      update_stmt (stmt);
4478 	      if (vdef != gimple_vdef (stmt))
4479 		VN_INFO (vdef)->valnum = vuse;
4480 
4481 	      /* If we removed EH side-effects from the statement, clean
4482 		 its EH information.  */
4483 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4484 		{
4485 		  bitmap_set_bit (need_eh_cleanup,
4486 				  gimple_bb (stmt)->index);
4487 		  if (dump_file && (dump_flags & TDF_DETAILS))
4488 		    fprintf (dump_file, "  Removed EH side-effects.\n");
4489 		}
4490 
4491 	      /* Likewise for AB side-effects.  */
4492 	      if (can_make_abnormal_goto
4493 		  && !stmt_can_make_abnormal_goto (stmt))
4494 		{
4495 		  bitmap_set_bit (need_ab_cleanup,
4496 				  gimple_bb (stmt)->index);
4497 		  if (dump_file && (dump_flags & TDF_DETAILS))
4498 		    fprintf (dump_file, "  Removed AB side-effects.\n");
4499 		}
4500 
4501 	      continue;
4502 	    }
4503 	}
4504 
4505       /* If the statement is a scalar store, see if the expression
4506          has the same value number as its rhs.  If so, the store is
4507          dead.  */
4508       if (gimple_assign_single_p (stmt)
4509 	  && !gimple_has_volatile_ops (stmt)
4510 	  && !is_gimple_reg (gimple_assign_lhs (stmt))
4511 	  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4512 	      || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4513 	{
4514 	  tree val;
4515 	  tree rhs = gimple_assign_rhs1 (stmt);
4516 	  vn_reference_t vnresult;
4517 	  val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
4518 				     &vnresult, false);
4519 	  if (TREE_CODE (rhs) == SSA_NAME)
4520 	    rhs = VN_INFO (rhs)->valnum;
4521 	  if (val
4522 	      && operand_equal_p (val, rhs, 0))
4523 	    {
4524 	      /* We can only remove the later store if the former aliases
4525 		 at least all accesses the later one does or if the store
4526 		 was to readonly memory storing the same value.  */
4527 	      alias_set_type set = get_alias_set (lhs);
4528 	      if (! vnresult
4529 		  || vnresult->set == set
4530 		  || alias_set_subset_of (set, vnresult->set))
4531 		{
4532 		  if (dump_file && (dump_flags & TDF_DETAILS))
4533 		    {
4534 		      fprintf (dump_file, "Deleted redundant store ");
4535 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4536 		    }
4537 
4538 		  /* Queue stmt for removal.  */
4539 		  el_to_remove.safe_push (stmt);
4540 		  continue;
4541 		}
4542 	    }
4543 	}
4544 
4545       /* If this is a control statement value numbering left edges
4546 	 unexecuted on force the condition in a way consistent with
4547 	 that.  */
4548       if (gcond *cond = dyn_cast <gcond *> (stmt))
4549 	{
4550 	  if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
4551 	      ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
4552 	    {
4553               if (dump_file && (dump_flags & TDF_DETAILS))
4554                 {
4555                   fprintf (dump_file, "Removing unexecutable edge from ");
4556                   print_gimple_stmt (dump_file, stmt, 0, 0);
4557                 }
4558 	      if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
4559 		  == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
4560 		gimple_cond_make_true (cond);
4561 	      else
4562 		gimple_cond_make_false (cond);
4563 	      update_stmt (cond);
4564 	      el_todo |= TODO_cleanup_cfg;
4565 	      continue;
4566 	    }
4567 	}
4568 
4569       bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
4570       bool was_noreturn = (is_gimple_call (stmt)
4571 			   && gimple_call_noreturn_p (stmt));
4572       tree vdef = gimple_vdef (stmt);
4573       tree vuse = gimple_vuse (stmt);
4574 
4575       /* If we didn't replace the whole stmt (or propagate the result
4576          into all uses), replace all uses on this stmt with their
4577 	 leaders.  */
4578       bool modified = false;
4579       use_operand_p use_p;
4580       ssa_op_iter iter;
4581       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4582 	{
4583 	  tree use = USE_FROM_PTR (use_p);
4584 	  /* ???  The call code above leaves stmt operands un-updated.  */
4585 	  if (TREE_CODE (use) != SSA_NAME)
4586 	    continue;
4587 	  tree sprime = eliminate_avail (use);
4588 	  if (sprime && sprime != use
4589 	      && may_propagate_copy (use, sprime)
4590 	      /* We substitute into debug stmts to avoid excessive
4591 	         debug temporaries created by removed stmts, but we need
4592 		 to avoid doing so for inserted sprimes as we never want
4593 		 to create debug temporaries for them.  */
4594 	      && (!inserted_exprs
4595 		  || TREE_CODE (sprime) != SSA_NAME
4596 		  || !is_gimple_debug (stmt)
4597 		  || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
4598 	    {
4599 	      propagate_value (use_p, sprime);
4600 	      modified = true;
4601 	      if (TREE_CODE (sprime) == SSA_NAME
4602 		  && !is_gimple_debug (stmt))
4603 		gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4604 				NECESSARY, true);
4605 	    }
4606 	}
4607 
4608       /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
4609          into which is a requirement for the IPA devirt machinery.  */
4610       gimple *old_stmt = stmt;
4611       if (modified)
4612 	{
4613 	  /* If a formerly non-invariant ADDR_EXPR is turned into an
4614 	     invariant one it was on a separate stmt.  */
4615 	  if (gimple_assign_single_p (stmt)
4616 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
4617 	    recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
4618 	  if (is_gimple_call (stmt))
4619 	    {
4620 	      /* ???  Only fold calls inplace for now, this may create new
4621 		 SSA names which in turn will confuse free_scc_vn SSA name
4622 		 release code.  */
4623 	      fold_stmt_inplace (&gsi);
4624 	    }
4625 	  else
4626 	    {
4627 	      fold_stmt (&gsi);
4628 	      stmt = gsi_stmt (gsi);
4629 	    }
4630 	}
4631 
4632       /* Visit indirect calls and turn them into direct calls if
4633 	 possible using the devirtualization machinery.  Do this before
4634 	 checking for required EH/abnormal/noreturn cleanup as devird
4635 	 may expose more of those.  */
4636       if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4637 	{
4638 	  tree fn = gimple_call_fn (call_stmt);
4639 	  if (fn
4640 	      && flag_devirtualize
4641 	      && virtual_method_call_p (fn))
4642 	    {
4643 	      tree otr_type = obj_type_ref_class (fn);
4644 	      unsigned HOST_WIDE_INT otr_tok
4645 		= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
4646 	      tree instance;
4647 	      ipa_polymorphic_call_context context (current_function_decl,
4648 						    fn, stmt, &instance);
4649 	      context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
4650 					otr_type, stmt);
4651 	      bool final;
4652 	      vec <cgraph_node *> targets
4653 		= possible_polymorphic_call_targets (obj_type_ref_class (fn),
4654 						     otr_tok, context, &final);
4655 	      if (dump_file)
4656 		dump_possible_polymorphic_call_targets (dump_file,
4657 							obj_type_ref_class (fn),
4658 							otr_tok, context);
4659 	      if (final && targets.length () <= 1 && dbg_cnt (devirt))
4660 		{
4661 		  tree fn;
4662 		  if (targets.length () == 1)
4663 		    fn = targets[0]->decl;
4664 		  else
4665 		    fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4666 		  if (dump_enabled_p ())
4667 		    {
4668 		      location_t loc = gimple_location (stmt);
4669 		      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4670 				       "converting indirect call to "
4671 				       "function %s\n",
4672 				       lang_hooks.decl_printable_name (fn, 2));
4673 		    }
4674 		  gimple_call_set_fndecl (call_stmt, fn);
4675 		  /* If changing the call to __builtin_unreachable
4676 		     or similar noreturn function, adjust gimple_call_fntype
4677 		     too.  */
4678 		  if (gimple_call_noreturn_p (call_stmt)
4679 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
4680 		      && TYPE_ARG_TYPES (TREE_TYPE (fn))
4681 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
4682 			  == void_type_node))
4683 		    gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
4684 		  maybe_remove_unused_call_args (cfun, call_stmt);
4685 		  modified = true;
4686 		}
4687 	    }
4688 	}
4689 
4690       if (modified)
4691 	{
4692 	  /* When changing a call into a noreturn call, cfg cleanup
4693 	     is needed to fix up the noreturn call.  */
4694 	  if (!was_noreturn
4695 	      && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
4696 	    el_to_fixup.safe_push  (stmt);
4697 	  /* When changing a condition or switch into one we know what
4698 	     edge will be executed, schedule a cfg cleanup.  */
4699 	  if ((gimple_code (stmt) == GIMPLE_COND
4700 	       && (gimple_cond_true_p (as_a <gcond *> (stmt))
4701 		   || gimple_cond_false_p (as_a <gcond *> (stmt))))
4702 	      || (gimple_code (stmt) == GIMPLE_SWITCH
4703 		  && TREE_CODE (gimple_switch_index
4704 				  (as_a <gswitch *> (stmt))) == INTEGER_CST))
4705 	    el_todo |= TODO_cleanup_cfg;
4706 	  /* If we removed EH side-effects from the statement, clean
4707 	     its EH information.  */
4708 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
4709 	    {
4710 	      bitmap_set_bit (need_eh_cleanup,
4711 			      gimple_bb (stmt)->index);
4712 	      if (dump_file && (dump_flags & TDF_DETAILS))
4713 		fprintf (dump_file, "  Removed EH side-effects.\n");
4714 	    }
4715 	  /* Likewise for AB side-effects.  */
4716 	  if (can_make_abnormal_goto
4717 	      && !stmt_can_make_abnormal_goto (stmt))
4718 	    {
4719 	      bitmap_set_bit (need_ab_cleanup,
4720 			      gimple_bb (stmt)->index);
4721 	      if (dump_file && (dump_flags & TDF_DETAILS))
4722 		fprintf (dump_file, "  Removed AB side-effects.\n");
4723 	    }
4724 	  update_stmt (stmt);
4725 	  if (vdef != gimple_vdef (stmt))
4726 	    VN_INFO (vdef)->valnum = vuse;
4727 	}
4728 
4729       /* Make new values available - for fully redundant LHS we
4730          continue with the next stmt above and skip this.  */
4731       def_operand_p defp;
4732       FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4733 	eliminate_push_avail (DEF_FROM_PTR (defp));
4734     }
4735 
4736   /* Replace destination PHI arguments.  */
4737   edge_iterator ei;
4738   edge e;
4739   FOR_EACH_EDGE (e, ei, b->succs)
4740     {
4741       for (gphi_iterator gsi = gsi_start_phis (e->dest);
4742 	   !gsi_end_p (gsi);
4743 	   gsi_next (&gsi))
4744 	{
4745 	  gphi *phi = gsi.phi ();
4746 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4747 	  tree arg = USE_FROM_PTR (use_p);
4748 	  if (TREE_CODE (arg) != SSA_NAME
4749 	      || virtual_operand_p (arg))
4750 	    continue;
4751 	  tree sprime = eliminate_avail (arg);
4752 	  if (sprime && may_propagate_copy (arg, sprime))
4753 	    {
4754 	      propagate_value (use_p, sprime);
4755 	      if (TREE_CODE (sprime) == SSA_NAME)
4756 		gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4757 	    }
4758 	}
4759     }
4760   return NULL;
4761 }
4762 
4763 /* Make no longer available leaders no longer available.  */
4764 
4765 void
4766 eliminate_dom_walker::after_dom_children (basic_block)
4767 {
4768   tree entry;
4769   while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4770     {
4771       tree valnum = VN_INFO (entry)->valnum;
4772       tree old = el_avail[SSA_NAME_VERSION (valnum)];
4773       if (old == entry)
4774 	el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
4775       else
4776 	el_avail[SSA_NAME_VERSION (valnum)] = entry;
4777     }
4778 }
4779 
4780 /* Eliminate fully redundant computations.  */
4781 
4782 static unsigned int
4783 eliminate (bool do_pre)
4784 {
4785   gimple_stmt_iterator gsi;
4786   gimple *stmt;
4787 
4788   need_eh_cleanup = BITMAP_ALLOC (NULL);
4789   need_ab_cleanup = BITMAP_ALLOC (NULL);
4790 
4791   el_to_remove.create (0);
4792   el_to_fixup.create (0);
4793   el_todo = 0;
4794   el_avail.create (num_ssa_names);
4795   el_avail_stack.create (0);
4796 
4797   eliminate_dom_walker (CDI_DOMINATORS,
4798 			do_pre).walk (cfun->cfg->x_entry_block_ptr);
4799 
4800   el_avail.release ();
4801   el_avail_stack.release ();
4802 
4803   /* We cannot remove stmts during BB walk, especially not release SSA
4804      names there as this confuses the VN machinery.  The stmts ending
4805      up in el_to_remove are either stores or simple copies.
4806      Remove stmts in reverse order to make debug stmt creation possible.  */
4807   while (!el_to_remove.is_empty ())
4808     {
4809       stmt = el_to_remove.pop ();
4810 
4811       if (dump_file && (dump_flags & TDF_DETAILS))
4812 	{
4813 	  fprintf (dump_file, "Removing dead stmt ");
4814 	  print_gimple_stmt (dump_file, stmt, 0, 0);
4815 	}
4816 
4817       tree lhs;
4818       if (gimple_code (stmt) == GIMPLE_PHI)
4819 	lhs = gimple_phi_result (stmt);
4820       else
4821 	lhs = gimple_get_lhs (stmt);
4822 
4823       if (lhs
4824 	  && inserted_exprs
4825 	  && TREE_CODE (lhs) == SSA_NAME)
4826 	bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4827 
4828       gsi = gsi_for_stmt (stmt);
4829       if (gimple_code (stmt) == GIMPLE_PHI)
4830 	remove_phi_node (&gsi, true);
4831       else
4832 	{
4833 	  basic_block bb = gimple_bb (stmt);
4834 	  unlink_stmt_vdef (stmt);
4835 	  if (gsi_remove (&gsi, true))
4836 	    bitmap_set_bit (need_eh_cleanup, bb->index);
4837 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
4838 	    bitmap_set_bit (need_ab_cleanup, bb->index);
4839 	  release_defs (stmt);
4840 	}
4841 
4842       /* Removing a stmt may expose a forwarder block.  */
4843       el_todo |= TODO_cleanup_cfg;
4844     }
4845   el_to_remove.release ();
4846 
4847   /* Fixup stmts that became noreturn calls.  This may require splitting
4848      blocks and thus isn't possible during the dominator walk.  Do this
4849      in reverse order so we don't inadvertedly remove a stmt we want to
4850      fixup by visiting a dominating now noreturn call first.  */
4851   while (!el_to_fixup.is_empty ())
4852     {
4853       stmt = el_to_fixup.pop ();
4854 
4855       if (dump_file && (dump_flags & TDF_DETAILS))
4856 	{
4857 	  fprintf (dump_file, "Fixing up noreturn call ");
4858 	  print_gimple_stmt (dump_file, stmt, 0, 0);
4859 	}
4860 
4861       if (fixup_noreturn_call (stmt))
4862 	el_todo |= TODO_cleanup_cfg;
4863     }
4864   el_to_fixup.release ();
4865 
4866   return el_todo;
4867 }
4868 
4869 /* Perform CFG cleanups made necessary by elimination.  */
4870 
4871 static unsigned
4872 fini_eliminate (void)
4873 {
4874   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4875   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4876 
4877   if (do_eh_cleanup)
4878     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4879 
4880   if (do_ab_cleanup)
4881     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4882 
4883   BITMAP_FREE (need_eh_cleanup);
4884   BITMAP_FREE (need_ab_cleanup);
4885 
4886   if (do_eh_cleanup || do_ab_cleanup)
4887     return TODO_cleanup_cfg;
4888   return 0;
4889 }
4890 
4891 /* Borrow a bit of tree-ssa-dce.c for the moment.
4892    XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4893    this may be a bit faster, and we may want critical edges kept split.  */
4894 
4895 /* If OP's defining statement has not already been determined to be necessary,
4896    mark that statement necessary. Return the stmt, if it is newly
4897    necessary.  */
4898 
4899 static inline gimple *
4900 mark_operand_necessary (tree op)
4901 {
4902   gimple *stmt;
4903 
4904   gcc_assert (op);
4905 
4906   if (TREE_CODE (op) != SSA_NAME)
4907     return NULL;
4908 
4909   stmt = SSA_NAME_DEF_STMT (op);
4910   gcc_assert (stmt);
4911 
4912   if (gimple_plf (stmt, NECESSARY)
4913       || gimple_nop_p (stmt))
4914     return NULL;
4915 
4916   gimple_set_plf (stmt, NECESSARY, true);
4917   return stmt;
4918 }
4919 
4920 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4921    to insert PHI nodes sometimes, and because value numbering of casts isn't
4922    perfect, we sometimes end up inserting dead code.   This simple DCE-like
4923    pass removes any insertions we made that weren't actually used.  */
4924 
4925 static void
4926 remove_dead_inserted_code (void)
4927 {
4928   bitmap worklist;
4929   unsigned i;
4930   bitmap_iterator bi;
4931   gimple *t;
4932 
4933   worklist = BITMAP_ALLOC (NULL);
4934   EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4935     {
4936       t = SSA_NAME_DEF_STMT (ssa_name (i));
4937       if (gimple_plf (t, NECESSARY))
4938 	bitmap_set_bit (worklist, i);
4939     }
4940   while (!bitmap_empty_p (worklist))
4941     {
4942       i = bitmap_first_set_bit (worklist);
4943       bitmap_clear_bit (worklist, i);
4944       t = SSA_NAME_DEF_STMT (ssa_name (i));
4945 
4946       /* PHI nodes are somewhat special in that each PHI alternative has
4947 	 data and control dependencies.  All the statements feeding the
4948 	 PHI node's arguments are always necessary. */
4949       if (gimple_code (t) == GIMPLE_PHI)
4950 	{
4951 	  unsigned k;
4952 
4953 	  for (k = 0; k < gimple_phi_num_args (t); k++)
4954 	    {
4955 	      tree arg = PHI_ARG_DEF (t, k);
4956 	      if (TREE_CODE (arg) == SSA_NAME)
4957 		{
4958 		  gimple *n = mark_operand_necessary (arg);
4959 		  if (n)
4960 		    bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4961 		}
4962 	    }
4963 	}
4964       else
4965 	{
4966 	  /* Propagate through the operands.  Examine all the USE, VUSE and
4967 	     VDEF operands in this statement.  Mark all the statements
4968 	     which feed this statement's uses as necessary.  */
4969 	  ssa_op_iter iter;
4970 	  tree use;
4971 
4972 	  /* The operands of VDEF expressions are also needed as they
4973 	     represent potential definitions that may reach this
4974 	     statement (VDEF operands allow us to follow def-def
4975 	     links).  */
4976 
4977 	  FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4978 	    {
4979 	      gimple *n = mark_operand_necessary (use);
4980 	      if (n)
4981 		bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4982 	    }
4983 	}
4984     }
4985 
4986   EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4987     {
4988       t = SSA_NAME_DEF_STMT (ssa_name (i));
4989       if (!gimple_plf (t, NECESSARY))
4990 	{
4991 	  gimple_stmt_iterator gsi;
4992 
4993 	  if (dump_file && (dump_flags & TDF_DETAILS))
4994 	    {
4995 	      fprintf (dump_file, "Removing unnecessary insertion:");
4996 	      print_gimple_stmt (dump_file, t, 0, 0);
4997 	    }
4998 
4999 	  gsi = gsi_for_stmt (t);
5000 	  if (gimple_code (t) == GIMPLE_PHI)
5001 	    remove_phi_node (&gsi, true);
5002 	  else
5003 	    {
5004 	      gsi_remove (&gsi, true);
5005 	      release_defs (t);
5006 	    }
5007 	}
5008     }
5009   BITMAP_FREE (worklist);
5010 }
5011 
5012 
5013 /* Initialize data structures used by PRE.  */
5014 
5015 static void
5016 init_pre (void)
5017 {
5018   basic_block bb;
5019 
5020   next_expression_id = 1;
5021   expressions.create (0);
5022   expressions.safe_push (NULL);
5023   value_expressions.create (get_max_value_id () + 1);
5024   value_expressions.safe_grow_cleared (get_max_value_id () + 1);
5025   name_to_id.create (0);
5026 
5027   inserted_exprs = BITMAP_ALLOC (NULL);
5028 
5029   connect_infinite_loops_to_exit ();
5030   memset (&pre_stats, 0, sizeof (pre_stats));
5031 
5032   alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
5033 
5034   calculate_dominance_info (CDI_DOMINATORS);
5035 
5036   bitmap_obstack_initialize (&grand_bitmap_obstack);
5037   phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
5038   expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
5039   FOR_ALL_BB_FN (bb, cfun)
5040     {
5041       EXP_GEN (bb) = bitmap_set_new ();
5042       PHI_GEN (bb) = bitmap_set_new ();
5043       TMP_GEN (bb) = bitmap_set_new ();
5044       AVAIL_OUT (bb) = bitmap_set_new ();
5045     }
5046 }
5047 
5048 
5049 /* Deallocate data structures used by PRE.  */
5050 
5051 static void
5052 fini_pre ()
5053 {
5054   value_expressions.release ();
5055   BITMAP_FREE (inserted_exprs);
5056   bitmap_obstack_release (&grand_bitmap_obstack);
5057   bitmap_set_pool.release ();
5058   pre_expr_pool.release ();
5059   delete phi_translate_table;
5060   phi_translate_table = NULL;
5061   delete expression_to_id;
5062   expression_to_id = NULL;
5063   name_to_id.release ();
5064 
5065   free_aux_for_blocks ();
5066 }
5067 
5068 namespace {
5069 
5070 const pass_data pass_data_pre =
5071 {
5072   GIMPLE_PASS, /* type */
5073   "pre", /* name */
5074   OPTGROUP_NONE, /* optinfo_flags */
5075   TV_TREE_PRE, /* tv_id */
5076   /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
5077      pass_pre.  */
5078   ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
5079   0, /* properties_provided */
5080   PROP_no_crit_edges, /* properties_destroyed */
5081   TODO_rebuild_alias, /* todo_flags_start */
5082   0, /* todo_flags_finish */
5083 };
5084 
5085 class pass_pre : public gimple_opt_pass
5086 {
5087 public:
5088   pass_pre (gcc::context *ctxt)
5089     : gimple_opt_pass (pass_data_pre, ctxt)
5090   {}
5091 
5092   /* opt_pass methods: */
5093   virtual bool gate (function *)
5094     { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
5095   virtual unsigned int execute (function *);
5096 
5097 }; // class pass_pre
5098 
5099 unsigned int
5100 pass_pre::execute (function *fun)
5101 {
5102   unsigned int todo = 0;
5103 
5104   do_partial_partial =
5105     flag_tree_partial_pre && optimize_function_for_speed_p (fun);
5106 
5107   /* This has to happen before SCCVN runs because
5108      loop_optimizer_init may create new phis, etc.  */
5109   loop_optimizer_init (LOOPS_NORMAL);
5110 
5111   if (!run_scc_vn (VN_WALK))
5112     {
5113       loop_optimizer_finalize ();
5114       return 0;
5115     }
5116 
5117   init_pre ();
5118   scev_initialize ();
5119 
5120   /* Collect and value number expressions computed in each basic block.  */
5121   compute_avail ();
5122 
5123   /* Insert can get quite slow on an incredibly large number of basic
5124      blocks due to some quadratic behavior.  Until this behavior is
5125      fixed, don't run it when he have an incredibly large number of
5126      bb's.  If we aren't going to run insert, there is no point in
5127      computing ANTIC, either, even though it's plenty fast.  */
5128   if (n_basic_blocks_for_fn (fun) < 4000)
5129     {
5130       compute_antic ();
5131       insert ();
5132     }
5133 
5134   /* Make sure to remove fake edges before committing our inserts.
5135      This makes sure we don't end up with extra critical edges that
5136      we would need to split.  */
5137   remove_fake_exit_edges ();
5138   gsi_commit_edge_inserts ();
5139 
5140   /* Eliminate folds statements which might (should not...) end up
5141      not keeping virtual operands up-to-date.  */
5142   gcc_assert (!need_ssa_update_p (fun));
5143 
5144   /* Remove all the redundant expressions.  */
5145   todo |= eliminate (true);
5146 
5147   statistics_counter_event (fun, "Insertions", pre_stats.insertions);
5148   statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
5149   statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
5150   statistics_counter_event (fun, "New PHIs", pre_stats.phis);
5151   statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
5152 
5153   clear_expression_ids ();
5154   remove_dead_inserted_code ();
5155 
5156   scev_finalize ();
5157   fini_pre ();
5158   todo |= fini_eliminate ();
5159   loop_optimizer_finalize ();
5160 
5161   /* Restore SSA info before tail-merging as that resets it as well.  */
5162   scc_vn_restore_ssa_info ();
5163 
5164   /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
5165      case we can merge the block with the remaining predecessor of the block.
5166      It should either:
5167      - call merge_blocks after each tail merge iteration
5168      - call merge_blocks after all tail merge iterations
5169      - mark TODO_cleanup_cfg when necessary
5170      - share the cfg cleanup with fini_pre.  */
5171   todo |= tail_merge_optimize (todo);
5172 
5173   free_scc_vn ();
5174 
5175   /* Tail merging invalidates the virtual SSA web, together with
5176      cfg-cleanup opportunities exposed by PRE this will wreck the
5177      SSA updating machinery.  So make sure to run update-ssa
5178      manually, before eventually scheduling cfg-cleanup as part of
5179      the todo.  */
5180   update_ssa (TODO_update_ssa_only_virtuals);
5181 
5182   return todo;
5183 }
5184 
5185 } // anon namespace
5186 
5187 gimple_opt_pass *
5188 make_pass_pre (gcc::context *ctxt)
5189 {
5190   return new pass_pre (ctxt);
5191 }
5192 
5193 namespace {
5194 
5195 const pass_data pass_data_fre =
5196 {
5197   GIMPLE_PASS, /* type */
5198   "fre", /* name */
5199   OPTGROUP_NONE, /* optinfo_flags */
5200   TV_TREE_FRE, /* tv_id */
5201   ( PROP_cfg | PROP_ssa ), /* properties_required */
5202   0, /* properties_provided */
5203   0, /* properties_destroyed */
5204   0, /* todo_flags_start */
5205   0, /* todo_flags_finish */
5206 };
5207 
5208 class pass_fre : public gimple_opt_pass
5209 {
5210 public:
5211   pass_fre (gcc::context *ctxt)
5212     : gimple_opt_pass (pass_data_fre, ctxt)
5213   {}
5214 
5215   /* opt_pass methods: */
5216   opt_pass * clone () { return new pass_fre (m_ctxt); }
5217   virtual bool gate (function *) { return flag_tree_fre != 0; }
5218   virtual unsigned int execute (function *);
5219 
5220 }; // class pass_fre
5221 
5222 unsigned int
5223 pass_fre::execute (function *fun)
5224 {
5225   unsigned int todo = 0;
5226 
5227   if (!run_scc_vn (VN_WALKREWRITE))
5228     return 0;
5229 
5230   memset (&pre_stats, 0, sizeof (pre_stats));
5231 
5232   /* Remove all the redundant expressions.  */
5233   todo |= eliminate (false);
5234 
5235   todo |= fini_eliminate ();
5236 
5237   scc_vn_restore_ssa_info ();
5238   free_scc_vn ();
5239 
5240   statistics_counter_event (fun, "Insertions", pre_stats.insertions);
5241   statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
5242 
5243   return todo;
5244 }
5245 
5246 } // anon namespace
5247 
5248 gimple_opt_pass *
5249 make_pass_fre (gcc::context *ctxt)
5250 {
5251   return new pass_fre (ctxt);
5252 }
5253