xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-pre.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* SSA-PRE for trees.
2    Copyright (C) 2001-2015 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4    <stevenb@suse.de>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "gimple-pretty-print.h"
45 #include "tree-inline.h"
46 #include "hash-table.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "tree-eh.h"
51 #include "gimple-expr.h"
52 #include "is-a.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-ssa.h"
58 #include "tree-cfg.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop.h"
64 #include "tree-into-ssa.h"
65 #include "hashtab.h"
66 #include "rtl.h"
67 #include "flags.h"
68 #include "statistics.h"
69 #include "real.h"
70 #include "fixed-value.h"
71 #include "insn-config.h"
72 #include "expmed.h"
73 #include "dojump.h"
74 #include "explow.h"
75 #include "calls.h"
76 #include "emit-rtl.h"
77 #include "varasm.h"
78 #include "stmt.h"
79 #include "expr.h"
80 #include "tree-dfa.h"
81 #include "tree-ssa.h"
82 #include "tree-iterator.h"
83 #include "alloc-pool.h"
84 #include "obstack.h"
85 #include "tree-pass.h"
86 #include "langhooks.h"
87 #include "cfgloop.h"
88 #include "tree-ssa-sccvn.h"
89 #include "tree-scalar-evolution.h"
90 #include "params.h"
91 #include "dbgcnt.h"
92 #include "domwalk.h"
93 #include "hash-map.h"
94 #include "plugin-api.h"
95 #include "ipa-ref.h"
96 #include "cgraph.h"
97 #include "symbol-summary.h"
98 #include "ipa-prop.h"
99 #include "tree-ssa-propagate.h"
100 #include "ipa-utils.h"
101 #include "tree-cfgcleanup.h"
102 
103 /* TODO:
104 
105    1. Avail sets can be shared by making an avail_find_leader that
106       walks up the dominator tree and looks in those avail sets.
107       This might affect code optimality, it's unclear right now.
108    2. Strength reduction can be performed by anticipating expressions
109       we can repair later on.
110    3. We can do back-substitution or smarter value numbering to catch
111       commutative expressions split up over multiple statements.
112 */
113 
114 /* For ease of terminology, "expression node" in the below refers to
115    every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
116    represent the actual statement containing the expressions we care about,
117    and we cache the value number by putting it in the expression.  */
118 
119 /* Basic algorithm
120 
121    First we walk the statements to generate the AVAIL sets, the
122    EXP_GEN sets, and the tmp_gen sets.  EXP_GEN sets represent the
123    generation of values/expressions by a given block.  We use them
124    when computing the ANTIC sets.  The AVAIL sets consist of
125    SSA_NAME's that represent values, so we know what values are
126    available in what blocks.  AVAIL is a forward dataflow problem.  In
127    SSA, values are never killed, so we don't need a kill set, or a
128    fixpoint iteration, in order to calculate the AVAIL sets.  In
129    traditional parlance, AVAIL sets tell us the downsafety of the
130    expressions/values.
131 
132    Next, we generate the ANTIC sets.  These sets represent the
133    anticipatable expressions.  ANTIC is a backwards dataflow
134    problem.  An expression is anticipatable in a given block if it could
135    be generated in that block.  This means that if we had to perform
136    an insertion in that block, of the value of that expression, we
137    could.  Calculating the ANTIC sets requires phi translation of
138    expressions, because the flow goes backwards through phis.  We must
139    iterate to a fixpoint of the ANTIC sets, because we have a kill
140    set.  Even in SSA form, values are not live over the entire
141    function, only from their definition point onwards.  So we have to
142    remove values from the ANTIC set once we go past the definition
143    point of the leaders that make them up.
144    compute_antic/compute_antic_aux performs this computation.
145 
146    Third, we perform insertions to make partially redundant
147    expressions fully redundant.
148 
149    An expression is partially redundant (excluding partial
150    anticipation) if:
151 
152    1. It is AVAIL in some, but not all, of the predecessors of a
153       given block.
154    2. It is ANTIC in all the predecessors.
155 
156    In order to make it fully redundant, we insert the expression into
157    the predecessors where it is not available, but is ANTIC.
158 
159    For the partial anticipation case, we only perform insertion if it
160    is partially anticipated in some block, and fully available in all
161    of the predecessors.
162 
163    insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
164    performs these steps.
165 
166    Fourth, we eliminate fully redundant expressions.
167    This is a simple statement walk that replaces redundant
168    calculations with the now available values.  */
169 
170 /* Representations of value numbers:
171 
172    Value numbers are represented by a representative SSA_NAME.  We
173    will create fake SSA_NAME's in situations where we need a
174    representative but do not have one (because it is a complex
175    expression).  In order to facilitate storing the value numbers in
176    bitmaps, and keep the number of wasted SSA_NAME's down, we also
177    associate a value_id with each value number, and create full blown
178    ssa_name's only where we actually need them (IE in operands of
179    existing expressions).
180 
181    Theoretically you could replace all the value_id's with
182    SSA_NAME_VERSION, but this would allocate a large number of
183    SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
184    It would also require an additional indirection at each point we
185    use the value id.  */
186 
187 /* Representation of expressions on value numbers:
188 
189    Expressions consisting of value numbers are represented the same
190    way as our VN internally represents them, with an additional
191    "pre_expr" wrapping around them in order to facilitate storing all
192    of the expressions in the same sets.  */
193 
194 /* Representation of sets:
195 
196    The dataflow sets do not need to be sorted in any particular order
197    for the majority of their lifetime, are simply represented as two
198    bitmaps, one that keeps track of values present in the set, and one
199    that keeps track of expressions present in the set.
200 
201    When we need them in topological order, we produce it on demand by
202    transforming the bitmap into an array and sorting it into topo
203    order.  */
204 
205 /* Type of expression, used to know which member of the PRE_EXPR union
206    is valid.  */
207 
208 enum pre_expr_kind
209 {
210     NAME,
211     NARY,
212     REFERENCE,
213     CONSTANT
214 };
215 
216 typedef union pre_expr_union_d
217 {
218   tree name;
219   tree constant;
220   vn_nary_op_t nary;
221   vn_reference_t reference;
222 } pre_expr_union;
223 
224 typedef struct pre_expr_d : typed_noop_remove <pre_expr_d>
225 {
226   enum pre_expr_kind kind;
227   unsigned int id;
228   pre_expr_union u;
229 
230   /* hash_table support.  */
231   typedef pre_expr_d value_type;
232   typedef pre_expr_d compare_type;
233   static inline hashval_t hash (const pre_expr_d *);
234   static inline int equal (const pre_expr_d *, const pre_expr_d *);
235 } *pre_expr;
236 
237 #define PRE_EXPR_NAME(e) (e)->u.name
238 #define PRE_EXPR_NARY(e) (e)->u.nary
239 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
240 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
241 
242 /* Compare E1 and E1 for equality.  */
243 
244 inline int
245 pre_expr_d::equal (const value_type *e1, const compare_type *e2)
246 {
247   if (e1->kind != e2->kind)
248     return false;
249 
250   switch (e1->kind)
251     {
252     case CONSTANT:
253       return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
254 				       PRE_EXPR_CONSTANT (e2));
255     case NAME:
256       return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
257     case NARY:
258       return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
259     case REFERENCE:
260       return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
261 			      PRE_EXPR_REFERENCE (e2));
262     default:
263       gcc_unreachable ();
264     }
265 }
266 
267 /* Hash E.  */
268 
269 inline hashval_t
270 pre_expr_d::hash (const value_type *e)
271 {
272   switch (e->kind)
273     {
274     case CONSTANT:
275       return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
276     case NAME:
277       return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
278     case NARY:
279       return PRE_EXPR_NARY (e)->hashcode;
280     case REFERENCE:
281       return PRE_EXPR_REFERENCE (e)->hashcode;
282     default:
283       gcc_unreachable ();
284     }
285 }
286 
287 /* Next global expression id number.  */
288 static unsigned int next_expression_id;
289 
290 /* Mapping from expression to id number we can use in bitmap sets.  */
291 static vec<pre_expr> expressions;
292 static hash_table<pre_expr_d> *expression_to_id;
293 static vec<unsigned> name_to_id;
294 
295 /* Allocate an expression id for EXPR.  */
296 
297 static inline unsigned int
298 alloc_expression_id (pre_expr expr)
299 {
300   struct pre_expr_d **slot;
301   /* Make sure we won't overflow. */
302   gcc_assert (next_expression_id + 1 > next_expression_id);
303   expr->id = next_expression_id++;
304   expressions.safe_push (expr);
305   if (expr->kind == NAME)
306     {
307       unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
308       /* vec::safe_grow_cleared allocates no headroom.  Avoid frequent
309 	 re-allocations by using vec::reserve upfront.  */
310       unsigned old_len = name_to_id.length ();
311       name_to_id.reserve (num_ssa_names - old_len);
312       name_to_id.quick_grow_cleared (num_ssa_names);
313       gcc_assert (name_to_id[version] == 0);
314       name_to_id[version] = expr->id;
315     }
316   else
317     {
318       slot = expression_to_id->find_slot (expr, INSERT);
319       gcc_assert (!*slot);
320       *slot = expr;
321     }
322   return next_expression_id - 1;
323 }
324 
325 /* Return the expression id for tree EXPR.  */
326 
327 static inline unsigned int
328 get_expression_id (const pre_expr expr)
329 {
330   return expr->id;
331 }
332 
333 static inline unsigned int
334 lookup_expression_id (const pre_expr expr)
335 {
336   struct pre_expr_d **slot;
337 
338   if (expr->kind == NAME)
339     {
340       unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
341       if (name_to_id.length () <= version)
342 	return 0;
343       return name_to_id[version];
344     }
345   else
346     {
347       slot = expression_to_id->find_slot (expr, NO_INSERT);
348       if (!slot)
349 	return 0;
350       return ((pre_expr)*slot)->id;
351     }
352 }
353 
354 /* Return the existing expression id for EXPR, or create one if one
355    does not exist yet.  */
356 
357 static inline unsigned int
358 get_or_alloc_expression_id (pre_expr expr)
359 {
360   unsigned int id = lookup_expression_id (expr);
361   if (id == 0)
362     return alloc_expression_id (expr);
363   return expr->id = id;
364 }
365 
366 /* Return the expression that has expression id ID */
367 
368 static inline pre_expr
369 expression_for_id (unsigned int id)
370 {
371   return expressions[id];
372 }
373 
374 /* Free the expression id field in all of our expressions,
375    and then destroy the expressions array.  */
376 
377 static void
378 clear_expression_ids (void)
379 {
380   expressions.release ();
381 }
382 
383 static alloc_pool pre_expr_pool;
384 
385 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it.  */
386 
387 static pre_expr
388 get_or_alloc_expr_for_name (tree name)
389 {
390   struct pre_expr_d expr;
391   pre_expr result;
392   unsigned int result_id;
393 
394   expr.kind = NAME;
395   expr.id = 0;
396   PRE_EXPR_NAME (&expr) = name;
397   result_id = lookup_expression_id (&expr);
398   if (result_id != 0)
399     return expression_for_id (result_id);
400 
401   result = (pre_expr) pool_alloc (pre_expr_pool);
402   result->kind = NAME;
403   PRE_EXPR_NAME (result) = name;
404   alloc_expression_id (result);
405   return result;
406 }
407 
408 /* An unordered bitmap set.  One bitmap tracks values, the other,
409    expressions.  */
410 typedef struct bitmap_set
411 {
412   bitmap_head expressions;
413   bitmap_head values;
414 } *bitmap_set_t;
415 
416 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi)		\
417   EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
418 
419 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi)		\
420   EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
421 
422 /* Mapping from value id to expressions with that value_id.  */
423 static vec<bitmap> value_expressions;
424 
425 /* Sets that we need to keep track of.  */
426 typedef struct bb_bitmap_sets
427 {
428   /* The EXP_GEN set, which represents expressions/values generated in
429      a basic block.  */
430   bitmap_set_t exp_gen;
431 
432   /* The PHI_GEN set, which represents PHI results generated in a
433      basic block.  */
434   bitmap_set_t phi_gen;
435 
436   /* The TMP_GEN set, which represents results/temporaries generated
437      in a basic block. IE the LHS of an expression.  */
438   bitmap_set_t tmp_gen;
439 
440   /* The AVAIL_OUT set, which represents which values are available in
441      a given basic block.  */
442   bitmap_set_t avail_out;
443 
444   /* The ANTIC_IN set, which represents which values are anticipatable
445      in a given basic block.  */
446   bitmap_set_t antic_in;
447 
448   /* The PA_IN set, which represents which values are
449      partially anticipatable in a given basic block.  */
450   bitmap_set_t pa_in;
451 
452   /* The NEW_SETS set, which is used during insertion to augment the
453      AVAIL_OUT set of blocks with the new insertions performed during
454      the current iteration.  */
455   bitmap_set_t new_sets;
456 
457   /* A cache for value_dies_in_block_x.  */
458   bitmap expr_dies;
459 
460   /* The live virtual operand on successor edges.  */
461   tree vop_on_exit;
462 
463   /* True if we have visited this block during ANTIC calculation.  */
464   unsigned int visited : 1;
465 
466   /* True when the block contains a call that might not return.  */
467   unsigned int contains_may_not_return_call : 1;
468 } *bb_value_sets_t;
469 
470 #define EXP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->exp_gen
471 #define PHI_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->phi_gen
472 #define TMP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->tmp_gen
473 #define AVAIL_OUT(BB)	((bb_value_sets_t) ((BB)->aux))->avail_out
474 #define ANTIC_IN(BB)	((bb_value_sets_t) ((BB)->aux))->antic_in
475 #define PA_IN(BB)	((bb_value_sets_t) ((BB)->aux))->pa_in
476 #define NEW_SETS(BB)	((bb_value_sets_t) ((BB)->aux))->new_sets
477 #define EXPR_DIES(BB)	((bb_value_sets_t) ((BB)->aux))->expr_dies
478 #define BB_VISITED(BB)	((bb_value_sets_t) ((BB)->aux))->visited
479 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
480 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
481 
482 
483 /* Basic block list in postorder.  */
484 static int *postorder;
485 static int postorder_num;
486 
487 /* This structure is used to keep track of statistics on what
488    optimization PRE was able to perform.  */
489 static struct
490 {
491   /* The number of RHS computations eliminated by PRE.  */
492   int eliminations;
493 
494   /* The number of new expressions/temporaries generated by PRE.  */
495   int insertions;
496 
497   /* The number of inserts found due to partial anticipation  */
498   int pa_insert;
499 
500   /* The number of new PHI nodes added by PRE.  */
501   int phis;
502 } pre_stats;
503 
504 static bool do_partial_partial;
505 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
506 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
507 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
508 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
509 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
510 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
511 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
512 				      unsigned int, bool);
513 static bitmap_set_t bitmap_set_new (void);
514 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
515 					 tree);
516 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
517 static unsigned int get_expr_value_id (pre_expr);
518 
519 /* We can add and remove elements and entries to and from sets
520    and hash tables, so we use alloc pools for them.  */
521 
522 static alloc_pool bitmap_set_pool;
523 static bitmap_obstack grand_bitmap_obstack;
524 
525 /* Set of blocks with statements that have had their EH properties changed.  */
526 static bitmap need_eh_cleanup;
527 
528 /* Set of blocks with statements that have had their AB properties changed.  */
529 static bitmap need_ab_cleanup;
530 
531 /* A three tuple {e, pred, v} used to cache phi translations in the
532    phi_translate_table.  */
533 
534 typedef struct expr_pred_trans_d : typed_free_remove<expr_pred_trans_d>
535 {
536   /* The expression.  */
537   pre_expr e;
538 
539   /* The predecessor block along which we translated the expression.  */
540   basic_block pred;
541 
542   /* The value that resulted from the translation.  */
543   pre_expr v;
544 
545   /* The hashcode for the expression, pred pair. This is cached for
546      speed reasons.  */
547   hashval_t hashcode;
548 
549   /* hash_table support.  */
550   typedef expr_pred_trans_d value_type;
551   typedef expr_pred_trans_d compare_type;
552   static inline hashval_t hash (const value_type *);
553   static inline int equal (const value_type *, const compare_type *);
554 } *expr_pred_trans_t;
555 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
556 
557 inline hashval_t
558 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
559 {
560   return e->hashcode;
561 }
562 
563 inline int
564 expr_pred_trans_d::equal (const value_type *ve1,
565 			  const compare_type *ve2)
566 {
567   basic_block b1 = ve1->pred;
568   basic_block b2 = ve2->pred;
569 
570   /* If they are not translations for the same basic block, they can't
571      be equal.  */
572   if (b1 != b2)
573     return false;
574   return pre_expr_d::equal (ve1->e, ve2->e);
575 }
576 
577 /* The phi_translate_table caches phi translations for a given
578    expression and predecessor.  */
579 static hash_table<expr_pred_trans_d> *phi_translate_table;
580 
581 /* Add the tuple mapping from {expression E, basic block PRED} to
582    the phi translation table and return whether it pre-existed.  */
583 
584 static inline bool
585 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
586 {
587   expr_pred_trans_t *slot;
588   expr_pred_trans_d tem;
589   hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
590 					     pred->index);
591   tem.e = e;
592   tem.pred = pred;
593   tem.hashcode = hash;
594   slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
595   if (*slot)
596     {
597       *entry = *slot;
598       return true;
599     }
600 
601   *entry = *slot = XNEW (struct expr_pred_trans_d);
602   (*entry)->e = e;
603   (*entry)->pred = pred;
604   (*entry)->hashcode = hash;
605   return false;
606 }
607 
608 
609 /* Add expression E to the expression set of value id V.  */
610 
611 static void
612 add_to_value (unsigned int v, pre_expr e)
613 {
614   bitmap set;
615 
616   gcc_checking_assert (get_expr_value_id (e) == v);
617 
618   if (v >= value_expressions.length ())
619     {
620       value_expressions.safe_grow_cleared (v + 1);
621     }
622 
623   set = value_expressions[v];
624   if (!set)
625     {
626       set = BITMAP_ALLOC (&grand_bitmap_obstack);
627       value_expressions[v] = set;
628     }
629 
630   bitmap_set_bit (set, get_or_alloc_expression_id (e));
631 }
632 
633 /* Create a new bitmap set and return it.  */
634 
635 static bitmap_set_t
636 bitmap_set_new (void)
637 {
638   bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
639   bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
640   bitmap_initialize (&ret->values, &grand_bitmap_obstack);
641   return ret;
642 }
643 
644 /* Return the value id for a PRE expression EXPR.  */
645 
646 static unsigned int
647 get_expr_value_id (pre_expr expr)
648 {
649   unsigned int id;
650   switch (expr->kind)
651     {
652     case CONSTANT:
653       id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
654       break;
655     case NAME:
656       id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
657       break;
658     case NARY:
659       id = PRE_EXPR_NARY (expr)->value_id;
660       break;
661     case REFERENCE:
662       id = PRE_EXPR_REFERENCE (expr)->value_id;
663       break;
664     default:
665       gcc_unreachable ();
666     }
667   /* ???  We cannot assert that expr has a value-id (it can be 0), because
668      we assign value-ids only to expressions that have a result
669      in set_hashtable_value_ids.  */
670   return id;
671 }
672 
673 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL.  */
674 
675 static tree
676 sccvn_valnum_from_value_id (unsigned int val)
677 {
678   bitmap_iterator bi;
679   unsigned int i;
680   bitmap exprset = value_expressions[val];
681   EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
682     {
683       pre_expr vexpr = expression_for_id (i);
684       if (vexpr->kind == NAME)
685 	return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
686       else if (vexpr->kind == CONSTANT)
687 	return PRE_EXPR_CONSTANT (vexpr);
688     }
689   return NULL_TREE;
690 }
691 
692 /* Remove an expression EXPR from a bitmapped set.  */
693 
694 static void
695 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
696 {
697   unsigned int val  = get_expr_value_id (expr);
698   if (!value_id_constant_p (val))
699     {
700       bitmap_clear_bit (&set->values, val);
701       bitmap_clear_bit (&set->expressions, get_expression_id (expr));
702     }
703 }
704 
705 static void
706 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
707 			  unsigned int val, bool allow_constants)
708 {
709   if (allow_constants || !value_id_constant_p (val))
710     {
711       /* We specifically expect this and only this function to be able to
712 	 insert constants into a set.  */
713       bitmap_set_bit (&set->values, val);
714       bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
715     }
716 }
717 
718 /* Insert an expression EXPR into a bitmapped set.  */
719 
720 static void
721 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
722 {
723   bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
724 }
725 
726 /* Copy a bitmapped set ORIG, into bitmapped set DEST.  */
727 
728 static void
729 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
730 {
731   bitmap_copy (&dest->expressions, &orig->expressions);
732   bitmap_copy (&dest->values, &orig->values);
733 }
734 
735 
736 /* Free memory used up by SET.  */
737 static void
738 bitmap_set_free (bitmap_set_t set)
739 {
740   bitmap_clear (&set->expressions);
741   bitmap_clear (&set->values);
742 }
743 
744 
745 /* Generate an topological-ordered array of bitmap set SET.  */
746 
747 static vec<pre_expr>
748 sorted_array_from_bitmap_set (bitmap_set_t set)
749 {
750   unsigned int i, j;
751   bitmap_iterator bi, bj;
752   vec<pre_expr> result;
753 
754   /* Pre-allocate enough space for the array.  */
755   result.create (bitmap_count_bits (&set->expressions));
756 
757   FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
758     {
759       /* The number of expressions having a given value is usually
760 	 relatively small.  Thus, rather than making a vector of all
761 	 the expressions and sorting it by value-id, we walk the values
762 	 and check in the reverse mapping that tells us what expressions
763 	 have a given value, to filter those in our set.  As a result,
764 	 the expressions are inserted in value-id order, which means
765 	 topological order.
766 
767 	 If this is somehow a significant lose for some cases, we can
768 	 choose which set to walk based on the set size.  */
769       bitmap exprset = value_expressions[i];
770       EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
771 	{
772 	  if (bitmap_bit_p (&set->expressions, j))
773 	    result.quick_push (expression_for_id (j));
774         }
775     }
776 
777   return result;
778 }
779 
780 /* Perform bitmapped set operation DEST &= ORIG.  */
781 
782 static void
783 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
784 {
785   bitmap_iterator bi;
786   unsigned int i;
787 
788   if (dest != orig)
789     {
790       bitmap_head temp;
791       bitmap_initialize (&temp, &grand_bitmap_obstack);
792 
793       bitmap_and_into (&dest->values, &orig->values);
794       bitmap_copy (&temp, &dest->expressions);
795       EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
796 	{
797 	  pre_expr expr = expression_for_id (i);
798 	  unsigned int value_id = get_expr_value_id (expr);
799 	  if (!bitmap_bit_p (&dest->values, value_id))
800 	    bitmap_clear_bit (&dest->expressions, i);
801 	}
802       bitmap_clear (&temp);
803     }
804 }
805 
806 /* Subtract all values and expressions contained in ORIG from DEST.  */
807 
808 static bitmap_set_t
809 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
810 {
811   bitmap_set_t result = bitmap_set_new ();
812   bitmap_iterator bi;
813   unsigned int i;
814 
815   bitmap_and_compl (&result->expressions, &dest->expressions,
816 		    &orig->expressions);
817 
818   FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
819     {
820       pre_expr expr = expression_for_id (i);
821       unsigned int value_id = get_expr_value_id (expr);
822       bitmap_set_bit (&result->values, value_id);
823     }
824 
825   return result;
826 }
827 
828 /* Subtract all the values in bitmap set B from bitmap set A.  */
829 
830 static void
831 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
832 {
833   unsigned int i;
834   bitmap_iterator bi;
835   bitmap_head temp;
836 
837   bitmap_initialize (&temp, &grand_bitmap_obstack);
838 
839   bitmap_copy (&temp, &a->expressions);
840   EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
841     {
842       pre_expr expr = expression_for_id (i);
843       if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
844 	bitmap_remove_from_set (a, expr);
845     }
846   bitmap_clear (&temp);
847 }
848 
849 
850 /* Return true if bitmapped set SET contains the value VALUE_ID.  */
851 
852 static bool
853 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
854 {
855   if (value_id_constant_p (value_id))
856     return true;
857 
858   if (!set || bitmap_empty_p (&set->expressions))
859     return false;
860 
861   return bitmap_bit_p (&set->values, value_id);
862 }
863 
864 static inline bool
865 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
866 {
867   return bitmap_bit_p (&set->expressions, get_expression_id (expr));
868 }
869 
870 /* Replace an instance of value LOOKFOR with expression EXPR in SET.  */
871 
872 static void
873 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
874 			  const pre_expr expr)
875 {
876   bitmap exprset;
877   unsigned int i;
878   bitmap_iterator bi;
879 
880   if (value_id_constant_p (lookfor))
881     return;
882 
883   if (!bitmap_set_contains_value (set, lookfor))
884     return;
885 
886   /* The number of expressions having a given value is usually
887      significantly less than the total number of expressions in SET.
888      Thus, rather than check, for each expression in SET, whether it
889      has the value LOOKFOR, we walk the reverse mapping that tells us
890      what expressions have a given value, and see if any of those
891      expressions are in our set.  For large testcases, this is about
892      5-10x faster than walking the bitmap.  If this is somehow a
893      significant lose for some cases, we can choose which set to walk
894      based on the set size.  */
895   exprset = value_expressions[lookfor];
896   EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
897     {
898       if (bitmap_clear_bit (&set->expressions, i))
899 	{
900 	  bitmap_set_bit (&set->expressions, get_expression_id (expr));
901 	  return;
902 	}
903     }
904 
905   gcc_unreachable ();
906 }
907 
908 /* Return true if two bitmap sets are equal.  */
909 
910 static bool
911 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
912 {
913   return bitmap_equal_p (&a->values, &b->values);
914 }
915 
916 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
917    and add it otherwise.  */
918 
919 static void
920 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
921 {
922   unsigned int val = get_expr_value_id (expr);
923 
924   if (bitmap_set_contains_value (set, val))
925     bitmap_set_replace_value (set, val, expr);
926   else
927     bitmap_insert_into_set (set, expr);
928 }
929 
930 /* Insert EXPR into SET if EXPR's value is not already present in
931    SET.  */
932 
933 static void
934 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
935 {
936   unsigned int val = get_expr_value_id (expr);
937 
938   gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
939 
940   /* Constant values are always considered to be part of the set.  */
941   if (value_id_constant_p (val))
942     return;
943 
944   /* If the value membership changed, add the expression.  */
945   if (bitmap_set_bit (&set->values, val))
946     bitmap_set_bit (&set->expressions, expr->id);
947 }
948 
949 /* Print out EXPR to outfile.  */
950 
951 static void
952 print_pre_expr (FILE *outfile, const pre_expr expr)
953 {
954   switch (expr->kind)
955     {
956     case CONSTANT:
957       print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
958       break;
959     case NAME:
960       print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
961       break;
962     case NARY:
963       {
964 	unsigned int i;
965 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
966 	fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
967 	for (i = 0; i < nary->length; i++)
968 	  {
969 	    print_generic_expr (outfile, nary->op[i], 0);
970 	    if (i != (unsigned) nary->length - 1)
971 	      fprintf (outfile, ",");
972 	  }
973 	fprintf (outfile, "}");
974       }
975       break;
976 
977     case REFERENCE:
978       {
979 	vn_reference_op_t vro;
980 	unsigned int i;
981 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
982 	fprintf (outfile, "{");
983 	for (i = 0;
984 	     ref->operands.iterate (i, &vro);
985 	     i++)
986 	  {
987 	    bool closebrace = false;
988 	    if (vro->opcode != SSA_NAME
989 		&& TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
990 	      {
991 		fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
992 		if (vro->op0)
993 		  {
994 		    fprintf (outfile, "<");
995 		    closebrace = true;
996 		  }
997 	      }
998 	    if (vro->op0)
999 	      {
1000 		print_generic_expr (outfile, vro->op0, 0);
1001 		if (vro->op1)
1002 		  {
1003 		    fprintf (outfile, ",");
1004 		    print_generic_expr (outfile, vro->op1, 0);
1005 		  }
1006 		if (vro->op2)
1007 		  {
1008 		    fprintf (outfile, ",");
1009 		    print_generic_expr (outfile, vro->op2, 0);
1010 		  }
1011 	      }
1012 	    if (closebrace)
1013 		fprintf (outfile, ">");
1014 	    if (i != ref->operands.length () - 1)
1015 	      fprintf (outfile, ",");
1016 	  }
1017 	fprintf (outfile, "}");
1018 	if (ref->vuse)
1019 	  {
1020 	    fprintf (outfile, "@");
1021 	    print_generic_expr (outfile, ref->vuse, 0);
1022 	  }
1023       }
1024       break;
1025     }
1026 }
1027 void debug_pre_expr (pre_expr);
1028 
1029 /* Like print_pre_expr but always prints to stderr.  */
1030 DEBUG_FUNCTION void
1031 debug_pre_expr (pre_expr e)
1032 {
1033   print_pre_expr (stderr, e);
1034   fprintf (stderr, "\n");
1035 }
1036 
1037 /* Print out SET to OUTFILE.  */
1038 
1039 static void
1040 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1041 		  const char *setname, int blockindex)
1042 {
1043   fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1044   if (set)
1045     {
1046       bool first = true;
1047       unsigned i;
1048       bitmap_iterator bi;
1049 
1050       FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1051 	{
1052 	  const pre_expr expr = expression_for_id (i);
1053 
1054 	  if (!first)
1055 	    fprintf (outfile, ", ");
1056 	  first = false;
1057 	  print_pre_expr (outfile, expr);
1058 
1059 	  fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1060 	}
1061     }
1062   fprintf (outfile, " }\n");
1063 }
1064 
1065 void debug_bitmap_set (bitmap_set_t);
1066 
1067 DEBUG_FUNCTION void
1068 debug_bitmap_set (bitmap_set_t set)
1069 {
1070   print_bitmap_set (stderr, set, "debug", 0);
1071 }
1072 
1073 void debug_bitmap_sets_for (basic_block);
1074 
1075 DEBUG_FUNCTION void
1076 debug_bitmap_sets_for (basic_block bb)
1077 {
1078   print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1079   print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1080   print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1081   print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1082   print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1083   if (do_partial_partial)
1084     print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1085   print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1086 }
1087 
1088 /* Print out the expressions that have VAL to OUTFILE.  */
1089 
1090 static void
1091 print_value_expressions (FILE *outfile, unsigned int val)
1092 {
1093   bitmap set = value_expressions[val];
1094   if (set)
1095     {
1096       bitmap_set x;
1097       char s[10];
1098       sprintf (s, "%04d", val);
1099       x.expressions = *set;
1100       print_bitmap_set (outfile, &x, s, 0);
1101     }
1102 }
1103 
1104 
1105 DEBUG_FUNCTION void
1106 debug_value_expressions (unsigned int val)
1107 {
1108   print_value_expressions (stderr, val);
1109 }
1110 
1111 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1112    represent it.  */
1113 
1114 static pre_expr
1115 get_or_alloc_expr_for_constant (tree constant)
1116 {
1117   unsigned int result_id;
1118   unsigned int value_id;
1119   struct pre_expr_d expr;
1120   pre_expr newexpr;
1121 
1122   expr.kind = CONSTANT;
1123   PRE_EXPR_CONSTANT (&expr) = constant;
1124   result_id = lookup_expression_id (&expr);
1125   if (result_id != 0)
1126     return expression_for_id (result_id);
1127 
1128   newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1129   newexpr->kind = CONSTANT;
1130   PRE_EXPR_CONSTANT (newexpr) = constant;
1131   alloc_expression_id (newexpr);
1132   value_id = get_or_alloc_constant_value_id (constant);
1133   add_to_value (value_id, newexpr);
1134   return newexpr;
1135 }
1136 
1137 /* Given a value id V, find the actual tree representing the constant
1138    value if there is one, and return it. Return NULL if we can't find
1139    a constant.  */
1140 
1141 static tree
1142 get_constant_for_value_id (unsigned int v)
1143 {
1144   if (value_id_constant_p (v))
1145     {
1146       unsigned int i;
1147       bitmap_iterator bi;
1148       bitmap exprset = value_expressions[v];
1149 
1150       EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1151 	{
1152 	  pre_expr expr = expression_for_id (i);
1153 	  if (expr->kind == CONSTANT)
1154 	    return PRE_EXPR_CONSTANT (expr);
1155 	}
1156     }
1157   return NULL;
1158 }
1159 
1160 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1161    Currently only supports constants and SSA_NAMES.  */
1162 static pre_expr
1163 get_or_alloc_expr_for (tree t)
1164 {
1165   if (TREE_CODE (t) == SSA_NAME)
1166     return get_or_alloc_expr_for_name (t);
1167   else if (is_gimple_min_invariant (t))
1168     return get_or_alloc_expr_for_constant (t);
1169   else
1170     {
1171       /* More complex expressions can result from SCCVN expression
1172 	 simplification that inserts values for them.  As they all
1173 	 do not have VOPs the get handled by the nary ops struct.  */
1174       vn_nary_op_t result;
1175       unsigned int result_id;
1176       vn_nary_op_lookup (t, &result);
1177       if (result != NULL)
1178 	{
1179 	  pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1180 	  e->kind = NARY;
1181 	  PRE_EXPR_NARY (e) = result;
1182 	  result_id = lookup_expression_id (e);
1183 	  if (result_id != 0)
1184 	    {
1185 	      pool_free (pre_expr_pool, e);
1186 	      e = expression_for_id (result_id);
1187 	      return e;
1188 	    }
1189 	  alloc_expression_id (e);
1190 	  return e;
1191 	}
1192     }
1193   return NULL;
1194 }
1195 
1196 /* Return the folded version of T if T, when folded, is a gimple
1197    min_invariant.  Otherwise, return T.  */
1198 
1199 static pre_expr
1200 fully_constant_expression (pre_expr e)
1201 {
1202   switch (e->kind)
1203     {
1204     case CONSTANT:
1205       return e;
1206     case NARY:
1207       {
1208 	vn_nary_op_t nary = PRE_EXPR_NARY (e);
1209 	switch (TREE_CODE_CLASS (nary->opcode))
1210 	  {
1211 	  case tcc_binary:
1212 	  case tcc_comparison:
1213 	    {
1214 	      /* We have to go from trees to pre exprs to value ids to
1215 		 constants.  */
1216 	      tree naryop0 = nary->op[0];
1217 	      tree naryop1 = nary->op[1];
1218 	      tree result;
1219 	      if (!is_gimple_min_invariant (naryop0))
1220 		{
1221 		  pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1222 		  unsigned int vrep0 = get_expr_value_id (rep0);
1223 		  tree const0 = get_constant_for_value_id (vrep0);
1224 		  if (const0)
1225 		    naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1226 		}
1227 	      if (!is_gimple_min_invariant (naryop1))
1228 		{
1229 		  pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1230 		  unsigned int vrep1 = get_expr_value_id (rep1);
1231 		  tree const1 = get_constant_for_value_id (vrep1);
1232 		  if (const1)
1233 		    naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1234 		}
1235 	      result = fold_binary (nary->opcode, nary->type,
1236 				    naryop0, naryop1);
1237 	      if (result && is_gimple_min_invariant (result))
1238 		return get_or_alloc_expr_for_constant (result);
1239 	      /* We might have simplified the expression to a
1240 		 SSA_NAME for example from x_1 * 1.  But we cannot
1241 		 insert a PHI for x_1 unconditionally as x_1 might
1242 		 not be available readily.  */
1243 	      return e;
1244 	    }
1245 	  case tcc_reference:
1246 	    if (nary->opcode != REALPART_EXPR
1247 		&& nary->opcode != IMAGPART_EXPR
1248 		&& nary->opcode != VIEW_CONVERT_EXPR)
1249 	      return e;
1250 	    /* Fallthrough.  */
1251 	  case tcc_unary:
1252 	    {
1253 	      /* We have to go from trees to pre exprs to value ids to
1254 		 constants.  */
1255 	      tree naryop0 = nary->op[0];
1256 	      tree const0, result;
1257 	      if (is_gimple_min_invariant (naryop0))
1258 		const0 = naryop0;
1259 	      else
1260 		{
1261 		  pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1262 		  unsigned int vrep0 = get_expr_value_id (rep0);
1263 		  const0 = get_constant_for_value_id (vrep0);
1264 		}
1265 	      result = NULL;
1266 	      if (const0)
1267 		{
1268 		  tree type1 = TREE_TYPE (nary->op[0]);
1269 		  const0 = fold_convert (type1, const0);
1270 		  result = fold_unary (nary->opcode, nary->type, const0);
1271 		}
1272 	      if (result && is_gimple_min_invariant (result))
1273 		return get_or_alloc_expr_for_constant (result);
1274 	      return e;
1275 	    }
1276 	  default:
1277 	    return e;
1278 	  }
1279       }
1280     case REFERENCE:
1281       {
1282 	vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1283 	tree folded;
1284 	if ((folded = fully_constant_vn_reference_p (ref)))
1285 	  return get_or_alloc_expr_for_constant (folded);
1286 	return e;
1287       }
1288     default:
1289       return e;
1290     }
1291   return e;
1292 }
1293 
1294 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1295    it has the value it would have in BLOCK.  Set *SAME_VALID to true
1296    in case the new vuse doesn't change the value id of the OPERANDS.  */
1297 
1298 static tree
1299 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1300 			      alias_set_type set, tree type, tree vuse,
1301 			      basic_block phiblock,
1302 			      basic_block block, bool *same_valid)
1303 {
1304   gimple phi = SSA_NAME_DEF_STMT (vuse);
1305   ao_ref ref;
1306   edge e = NULL;
1307   bool use_oracle;
1308 
1309   *same_valid = true;
1310 
1311   if (gimple_bb (phi) != phiblock)
1312     return vuse;
1313 
1314   use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1315 
1316   /* Use the alias-oracle to find either the PHI node in this block,
1317      the first VUSE used in this block that is equivalent to vuse or
1318      the first VUSE which definition in this block kills the value.  */
1319   if (gimple_code (phi) == GIMPLE_PHI)
1320     e = find_edge (block, phiblock);
1321   else if (use_oracle)
1322     while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1323       {
1324 	vuse = gimple_vuse (phi);
1325 	phi = SSA_NAME_DEF_STMT (vuse);
1326 	if (gimple_bb (phi) != phiblock)
1327 	  return vuse;
1328 	if (gimple_code (phi) == GIMPLE_PHI)
1329 	  {
1330 	    e = find_edge (block, phiblock);
1331 	    break;
1332 	  }
1333       }
1334   else
1335     return NULL_TREE;
1336 
1337   if (e)
1338     {
1339       if (use_oracle)
1340 	{
1341 	  bitmap visited = NULL;
1342 	  unsigned int cnt;
1343 	  /* Try to find a vuse that dominates this phi node by skipping
1344 	     non-clobbering statements.  */
1345 	  vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1346 					   NULL, NULL);
1347 	  if (visited)
1348 	    BITMAP_FREE (visited);
1349 	}
1350       else
1351 	vuse = NULL_TREE;
1352       if (!vuse)
1353 	{
1354 	  /* If we didn't find any, the value ID can't stay the same,
1355 	     but return the translated vuse.  */
1356 	  *same_valid = false;
1357 	  vuse = PHI_ARG_DEF (phi, e->dest_idx);
1358 	}
1359       /* ??? We would like to return vuse here as this is the canonical
1360          upmost vdef that this reference is associated with.  But during
1361 	 insertion of the references into the hash tables we only ever
1362 	 directly insert with their direct gimple_vuse, hence returning
1363 	 something else would make us not find the other expression.  */
1364       return PHI_ARG_DEF (phi, e->dest_idx);
1365     }
1366 
1367   return NULL_TREE;
1368 }
1369 
1370 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1371    SET2.  This is used to avoid making a set consisting of the union
1372    of PA_IN and ANTIC_IN during insert.  */
1373 
1374 static inline pre_expr
1375 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1376 {
1377   pre_expr result;
1378 
1379   result = bitmap_find_leader (set1, val);
1380   if (!result && set2)
1381     result = bitmap_find_leader (set2, val);
1382   return result;
1383 }
1384 
1385 /* Get the tree type for our PRE expression e.  */
1386 
1387 static tree
1388 get_expr_type (const pre_expr e)
1389 {
1390   switch (e->kind)
1391     {
1392     case NAME:
1393       return TREE_TYPE (PRE_EXPR_NAME (e));
1394     case CONSTANT:
1395       return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1396     case REFERENCE:
1397       return PRE_EXPR_REFERENCE (e)->type;
1398     case NARY:
1399       return PRE_EXPR_NARY (e)->type;
1400     }
1401   gcc_unreachable ();
1402 }
1403 
1404 /* Get a representative SSA_NAME for a given expression.
1405    Since all of our sub-expressions are treated as values, we require
1406    them to be SSA_NAME's for simplicity.
1407    Prior versions of GVNPRE used to use "value handles" here, so that
1408    an expression would be VH.11 + VH.10 instead of d_3 + e_6.  In
1409    either case, the operands are really values (IE we do not expect
1410    them to be usable without finding leaders).  */
1411 
1412 static tree
1413 get_representative_for (const pre_expr e)
1414 {
1415   tree name;
1416   unsigned int value_id = get_expr_value_id (e);
1417 
1418   switch (e->kind)
1419     {
1420     case NAME:
1421       return PRE_EXPR_NAME (e);
1422     case CONSTANT:
1423       return PRE_EXPR_CONSTANT (e);
1424     case NARY:
1425     case REFERENCE:
1426       {
1427 	/* Go through all of the expressions representing this value
1428 	   and pick out an SSA_NAME.  */
1429 	unsigned int i;
1430 	bitmap_iterator bi;
1431 	bitmap exprs = value_expressions[value_id];
1432 	EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1433 	  {
1434 	    pre_expr rep = expression_for_id (i);
1435 	    if (rep->kind == NAME)
1436 	      return PRE_EXPR_NAME (rep);
1437 	    else if (rep->kind == CONSTANT)
1438 	      return PRE_EXPR_CONSTANT (rep);
1439 	  }
1440       }
1441       break;
1442     }
1443 
1444   /* If we reached here we couldn't find an SSA_NAME.  This can
1445      happen when we've discovered a value that has never appeared in
1446      the program as set to an SSA_NAME, as the result of phi translation.
1447      Create one here.
1448      ???  We should be able to re-use this when we insert the statement
1449      to compute it.  */
1450   name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1451   VN_INFO_GET (name)->value_id = value_id;
1452   VN_INFO (name)->valnum = name;
1453   /* ???  For now mark this SSA name for release by SCCVN.  */
1454   VN_INFO (name)->needs_insertion = true;
1455   add_to_value (value_id, get_or_alloc_expr_for_name (name));
1456   if (dump_file && (dump_flags & TDF_DETAILS))
1457     {
1458       fprintf (dump_file, "Created SSA_NAME representative ");
1459       print_generic_expr (dump_file, name, 0);
1460       fprintf (dump_file, " for expression:");
1461       print_pre_expr (dump_file, e);
1462       fprintf (dump_file, " (%04d)\n", value_id);
1463     }
1464 
1465   return name;
1466 }
1467 
1468 
1469 
1470 static pre_expr
1471 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1472 	       basic_block pred, basic_block phiblock);
1473 
1474 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1475    the phis in PRED.  Return NULL if we can't find a leader for each part
1476    of the translated expression.  */
1477 
1478 static pre_expr
1479 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1480 		 basic_block pred, basic_block phiblock)
1481 {
1482   switch (expr->kind)
1483     {
1484     case NARY:
1485       {
1486 	unsigned int i;
1487 	bool changed = false;
1488 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1489 	vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1490 					   sizeof_vn_nary_op (nary->length));
1491 	memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1492 
1493 	for (i = 0; i < newnary->length; i++)
1494 	  {
1495 	    if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1496 	      continue;
1497 	    else
1498 	      {
1499                 pre_expr leader, result;
1500 		unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1501 		leader = find_leader_in_sets (op_val_id, set1, set2);
1502                 result = phi_translate (leader, set1, set2, pred, phiblock);
1503 		if (result && result != leader)
1504 		  {
1505 		    tree name = get_representative_for (result);
1506 		    if (!name)
1507 		      return NULL;
1508 		    newnary->op[i] = name;
1509 		  }
1510 		else if (!result)
1511 		  return NULL;
1512 
1513 		changed |= newnary->op[i] != nary->op[i];
1514 	      }
1515 	  }
1516 	if (changed)
1517 	  {
1518 	    pre_expr constant;
1519 	    unsigned int new_val_id;
1520 
1521 	    tree result = vn_nary_op_lookup_pieces (newnary->length,
1522 						    newnary->opcode,
1523 						    newnary->type,
1524 						    &newnary->op[0],
1525 						    &nary);
1526 	    if (result && is_gimple_min_invariant (result))
1527 	      return get_or_alloc_expr_for_constant (result);
1528 
1529 	    expr = (pre_expr) pool_alloc (pre_expr_pool);
1530 	    expr->kind = NARY;
1531 	    expr->id = 0;
1532 	    if (nary)
1533 	      {
1534 		PRE_EXPR_NARY (expr) = nary;
1535 		constant = fully_constant_expression (expr);
1536 		if (constant != expr)
1537 		  return constant;
1538 
1539 		new_val_id = nary->value_id;
1540 		get_or_alloc_expression_id (expr);
1541 	      }
1542 	    else
1543 	      {
1544 		new_val_id = get_next_value_id ();
1545 		value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1546 		nary = vn_nary_op_insert_pieces (newnary->length,
1547 						 newnary->opcode,
1548 						 newnary->type,
1549 						 &newnary->op[0],
1550 						 result, new_val_id);
1551 		PRE_EXPR_NARY (expr) = nary;
1552 		constant = fully_constant_expression (expr);
1553 		if (constant != expr)
1554 		  return constant;
1555 		get_or_alloc_expression_id (expr);
1556 	      }
1557 	    add_to_value (new_val_id, expr);
1558 	  }
1559 	return expr;
1560       }
1561       break;
1562 
1563     case REFERENCE:
1564       {
1565 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1566 	vec<vn_reference_op_s> operands = ref->operands;
1567 	tree vuse = ref->vuse;
1568 	tree newvuse = vuse;
1569 	vec<vn_reference_op_s> newoperands = vNULL;
1570 	bool changed = false, same_valid = true;
1571 	unsigned int i, n;
1572 	vn_reference_op_t operand;
1573 	vn_reference_t newref;
1574 
1575 	for (i = 0; operands.iterate (i, &operand); i++)
1576 	  {
1577 	    pre_expr opresult;
1578 	    pre_expr leader;
1579 	    tree op[3];
1580 	    tree type = operand->type;
1581 	    vn_reference_op_s newop = *operand;
1582 	    op[0] = operand->op0;
1583 	    op[1] = operand->op1;
1584 	    op[2] = operand->op2;
1585 	    for (n = 0; n < 3; ++n)
1586 	      {
1587 		unsigned int op_val_id;
1588 		if (!op[n])
1589 		  continue;
1590 		if (TREE_CODE (op[n]) != SSA_NAME)
1591 		  {
1592 		    /* We can't possibly insert these.  */
1593 		    if (n != 0
1594 			&& !is_gimple_min_invariant (op[n]))
1595 		      break;
1596 		    continue;
1597 		  }
1598 		op_val_id = VN_INFO (op[n])->value_id;
1599 		leader = find_leader_in_sets (op_val_id, set1, set2);
1600 		if (!leader)
1601 		  break;
1602 		opresult = phi_translate (leader, set1, set2, pred, phiblock);
1603 		if (!opresult)
1604 		  break;
1605 		if (opresult != leader)
1606 		  {
1607 		    tree name = get_representative_for (opresult);
1608 		    if (!name)
1609 		      break;
1610 		    changed |= name != op[n];
1611 		    op[n] = name;
1612 		  }
1613 	      }
1614 	    if (n != 3)
1615 	      {
1616 		newoperands.release ();
1617 		return NULL;
1618 	      }
1619 	    if (!changed)
1620 	      continue;
1621 	    if (!newoperands.exists ())
1622 	      newoperands = operands.copy ();
1623 	    /* We may have changed from an SSA_NAME to a constant */
1624 	    if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1625 	      newop.opcode = TREE_CODE (op[0]);
1626 	    newop.type = type;
1627 	    newop.op0 = op[0];
1628 	    newop.op1 = op[1];
1629 	    newop.op2 = op[2];
1630 	    newoperands[i] = newop;
1631 	  }
1632 	gcc_checking_assert (i == operands.length ());
1633 
1634 	if (vuse)
1635 	  {
1636 	    newvuse = translate_vuse_through_block (newoperands.exists ()
1637 						    ? newoperands : operands,
1638 						    ref->set, ref->type,
1639 						    vuse, phiblock, pred,
1640 						    &same_valid);
1641 	    if (newvuse == NULL_TREE)
1642 	      {
1643 		newoperands.release ();
1644 		return NULL;
1645 	      }
1646 	  }
1647 
1648 	if (changed || newvuse != vuse)
1649 	  {
1650 	    unsigned int new_val_id;
1651 	    pre_expr constant;
1652 
1653 	    tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1654 						      ref->type,
1655 						      newoperands.exists ()
1656 						      ? newoperands : operands,
1657 						      &newref, VN_WALK);
1658 	    if (result)
1659 	      newoperands.release ();
1660 
1661 	    /* We can always insert constants, so if we have a partial
1662 	       redundant constant load of another type try to translate it
1663 	       to a constant of appropriate type.  */
1664 	    if (result && is_gimple_min_invariant (result))
1665 	      {
1666 		tree tem = result;
1667 		if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1668 		  {
1669 		    tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1670 		    if (tem && !is_gimple_min_invariant (tem))
1671 		      tem = NULL_TREE;
1672 		  }
1673 		if (tem)
1674 		  return get_or_alloc_expr_for_constant (tem);
1675 	      }
1676 
1677 	    /* If we'd have to convert things we would need to validate
1678 	       if we can insert the translated expression.  So fail
1679 	       here for now - we cannot insert an alias with a different
1680 	       type in the VN tables either, as that would assert.  */
1681 	    if (result
1682 		&& !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1683 	      return NULL;
1684 	    else if (!result && newref
1685 		     && !useless_type_conversion_p (ref->type, newref->type))
1686 	      {
1687 		newoperands.release ();
1688 		return NULL;
1689 	      }
1690 
1691 	    expr = (pre_expr) pool_alloc (pre_expr_pool);
1692 	    expr->kind = REFERENCE;
1693 	    expr->id = 0;
1694 
1695 	    if (newref)
1696 	      {
1697 		PRE_EXPR_REFERENCE (expr) = newref;
1698 		constant = fully_constant_expression (expr);
1699 		if (constant != expr)
1700 		  return constant;
1701 
1702 		new_val_id = newref->value_id;
1703 		get_or_alloc_expression_id (expr);
1704 	      }
1705 	    else
1706 	      {
1707 		if (changed || !same_valid)
1708 		  {
1709 		    new_val_id = get_next_value_id ();
1710 		    value_expressions.safe_grow_cleared
1711 		      (get_max_value_id () + 1);
1712 		  }
1713 		else
1714 		  new_val_id = ref->value_id;
1715 		if (!newoperands.exists ())
1716 		  newoperands = operands.copy ();
1717 		newref = vn_reference_insert_pieces (newvuse, ref->set,
1718 						     ref->type,
1719 						     newoperands,
1720 						     result, new_val_id);
1721 		newoperands = vNULL;
1722 		PRE_EXPR_REFERENCE (expr) = newref;
1723 		constant = fully_constant_expression (expr);
1724 		if (constant != expr)
1725 		  return constant;
1726 		get_or_alloc_expression_id (expr);
1727 	      }
1728 	    add_to_value (new_val_id, expr);
1729 	  }
1730 	newoperands.release ();
1731 	return expr;
1732       }
1733       break;
1734 
1735     case NAME:
1736       {
1737 	tree name = PRE_EXPR_NAME (expr);
1738 	gimple def_stmt = SSA_NAME_DEF_STMT (name);
1739 	/* If the SSA name is defined by a PHI node in this block,
1740 	   translate it.  */
1741 	if (gimple_code (def_stmt) == GIMPLE_PHI
1742 	    && gimple_bb (def_stmt) == phiblock)
1743 	  {
1744 	    edge e = find_edge (pred, gimple_bb (def_stmt));
1745 	    tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1746 
1747 	    /* Handle constant. */
1748 	    if (is_gimple_min_invariant (def))
1749 	      return get_or_alloc_expr_for_constant (def);
1750 
1751 	    return get_or_alloc_expr_for_name (def);
1752 	  }
1753 	/* Otherwise return it unchanged - it will get removed if its
1754 	   value is not available in PREDs AVAIL_OUT set of expressions
1755 	   by the subtraction of TMP_GEN.  */
1756 	return expr;
1757       }
1758 
1759     default:
1760       gcc_unreachable ();
1761     }
1762 }
1763 
1764 /* Wrapper around phi_translate_1 providing caching functionality.  */
1765 
1766 static pre_expr
1767 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1768 	       basic_block pred, basic_block phiblock)
1769 {
1770   expr_pred_trans_t slot = NULL;
1771   pre_expr phitrans;
1772 
1773   if (!expr)
1774     return NULL;
1775 
1776   /* Constants contain no values that need translation.  */
1777   if (expr->kind == CONSTANT)
1778     return expr;
1779 
1780   if (value_id_constant_p (get_expr_value_id (expr)))
1781     return expr;
1782 
1783   /* Don't add translations of NAMEs as those are cheap to translate.  */
1784   if (expr->kind != NAME)
1785     {
1786       if (phi_trans_add (&slot, expr, pred))
1787 	return slot->v;
1788       /* Store NULL for the value we want to return in the case of
1789 	 recursing.  */
1790       slot->v = NULL;
1791     }
1792 
1793   /* Translate.  */
1794   phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1795 
1796   if (slot)
1797     {
1798       if (phitrans)
1799 	slot->v = phitrans;
1800       else
1801 	/* Remove failed translations again, they cause insert
1802 	   iteration to not pick up new opportunities reliably.  */
1803 	phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1804     }
1805 
1806   return phitrans;
1807 }
1808 
1809 
1810 /* For each expression in SET, translate the values through phi nodes
1811    in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1812    expressions in DEST.  */
1813 
1814 static void
1815 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1816 		   basic_block phiblock)
1817 {
1818   vec<pre_expr> exprs;
1819   pre_expr expr;
1820   int i;
1821 
1822   if (gimple_seq_empty_p (phi_nodes (phiblock)))
1823     {
1824       bitmap_set_copy (dest, set);
1825       return;
1826     }
1827 
1828   exprs = sorted_array_from_bitmap_set (set);
1829   FOR_EACH_VEC_ELT (exprs, i, expr)
1830     {
1831       pre_expr translated;
1832       translated = phi_translate (expr, set, NULL, pred, phiblock);
1833       if (!translated)
1834 	continue;
1835 
1836       /* We might end up with multiple expressions from SET being
1837 	 translated to the same value.  In this case we do not want
1838 	 to retain the NARY or REFERENCE expression but prefer a NAME
1839 	 which would be the leader.  */
1840       if (translated->kind == NAME)
1841 	bitmap_value_replace_in_set (dest, translated);
1842       else
1843 	bitmap_value_insert_into_set (dest, translated);
1844     }
1845   exprs.release ();
1846 }
1847 
1848 /* Find the leader for a value (i.e., the name representing that
1849    value) in a given set, and return it.  Return NULL if no leader
1850    is found.  */
1851 
1852 static pre_expr
1853 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1854 {
1855   if (value_id_constant_p (val))
1856     {
1857       unsigned int i;
1858       bitmap_iterator bi;
1859       bitmap exprset = value_expressions[val];
1860 
1861       EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1862 	{
1863 	  pre_expr expr = expression_for_id (i);
1864 	  if (expr->kind == CONSTANT)
1865 	    return expr;
1866 	}
1867     }
1868   if (bitmap_set_contains_value (set, val))
1869     {
1870       /* Rather than walk the entire bitmap of expressions, and see
1871 	 whether any of them has the value we are looking for, we look
1872 	 at the reverse mapping, which tells us the set of expressions
1873 	 that have a given value (IE value->expressions with that
1874 	 value) and see if any of those expressions are in our set.
1875 	 The number of expressions per value is usually significantly
1876 	 less than the number of expressions in the set.  In fact, for
1877 	 large testcases, doing it this way is roughly 5-10x faster
1878 	 than walking the bitmap.
1879 	 If this is somehow a significant lose for some cases, we can
1880 	 choose which set to walk based on which set is smaller.  */
1881       unsigned int i;
1882       bitmap_iterator bi;
1883       bitmap exprset = value_expressions[val];
1884 
1885       EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1886 	return expression_for_id (i);
1887     }
1888   return NULL;
1889 }
1890 
1891 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1892    BLOCK by seeing if it is not killed in the block.  Note that we are
1893    only determining whether there is a store that kills it.  Because
1894    of the order in which clean iterates over values, we are guaranteed
1895    that altered operands will have caused us to be eliminated from the
1896    ANTIC_IN set already.  */
1897 
1898 static bool
1899 value_dies_in_block_x (pre_expr expr, basic_block block)
1900 {
1901   tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1902   vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1903   gimple def;
1904   gimple_stmt_iterator gsi;
1905   unsigned id = get_expression_id (expr);
1906   bool res = false;
1907   ao_ref ref;
1908 
1909   if (!vuse)
1910     return false;
1911 
1912   /* Lookup a previously calculated result.  */
1913   if (EXPR_DIES (block)
1914       && bitmap_bit_p (EXPR_DIES (block), id * 2))
1915     return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1916 
1917   /* A memory expression {e, VUSE} dies in the block if there is a
1918      statement that may clobber e.  If, starting statement walk from the
1919      top of the basic block, a statement uses VUSE there can be no kill
1920      inbetween that use and the original statement that loaded {e, VUSE},
1921      so we can stop walking.  */
1922   ref.base = NULL_TREE;
1923   for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1924     {
1925       tree def_vuse, def_vdef;
1926       def = gsi_stmt (gsi);
1927       def_vuse = gimple_vuse (def);
1928       def_vdef = gimple_vdef (def);
1929 
1930       /* Not a memory statement.  */
1931       if (!def_vuse)
1932 	continue;
1933 
1934       /* Not a may-def.  */
1935       if (!def_vdef)
1936 	{
1937 	  /* A load with the same VUSE, we're done.  */
1938 	  if (def_vuse == vuse)
1939 	    break;
1940 
1941 	  continue;
1942 	}
1943 
1944       /* Init ref only if we really need it.  */
1945       if (ref.base == NULL_TREE
1946 	  && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1947 					     refx->operands))
1948 	{
1949 	  res = true;
1950 	  break;
1951 	}
1952       /* If the statement may clobber expr, it dies.  */
1953       if (stmt_may_clobber_ref_p_1 (def, &ref))
1954 	{
1955 	  res = true;
1956 	  break;
1957 	}
1958     }
1959 
1960   /* Remember the result.  */
1961   if (!EXPR_DIES (block))
1962     EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1963   bitmap_set_bit (EXPR_DIES (block), id * 2);
1964   if (res)
1965     bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1966 
1967   return res;
1968 }
1969 
1970 
1971 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1972    contains its value-id.  */
1973 
1974 static bool
1975 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1976 {
1977   if (op && TREE_CODE (op) == SSA_NAME)
1978     {
1979       unsigned int value_id = VN_INFO (op)->value_id;
1980       if (!(bitmap_set_contains_value (set1, value_id)
1981 	    || (set2 && bitmap_set_contains_value  (set2, value_id))))
1982 	return false;
1983     }
1984   return true;
1985 }
1986 
1987 /* Determine if the expression EXPR is valid in SET1 U SET2.
1988    ONLY SET2 CAN BE NULL.
1989    This means that we have a leader for each part of the expression
1990    (if it consists of values), or the expression is an SSA_NAME.
1991    For loads/calls, we also see if the vuse is killed in this block.  */
1992 
1993 static bool
1994 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1995 {
1996   switch (expr->kind)
1997     {
1998     case NAME:
1999       /* By construction all NAMEs are available.  Non-available
2000 	 NAMEs are removed by subtracting TMP_GEN from the sets.  */
2001       return true;
2002     case NARY:
2003       {
2004 	unsigned int i;
2005 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2006 	for (i = 0; i < nary->length; i++)
2007 	  if (!op_valid_in_sets (set1, set2, nary->op[i]))
2008 	    return false;
2009 	return true;
2010       }
2011       break;
2012     case REFERENCE:
2013       {
2014 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2015 	vn_reference_op_t vro;
2016 	unsigned int i;
2017 
2018 	FOR_EACH_VEC_ELT (ref->operands, i, vro)
2019 	  {
2020 	    if (!op_valid_in_sets (set1, set2, vro->op0)
2021 		|| !op_valid_in_sets (set1, set2, vro->op1)
2022 		|| !op_valid_in_sets (set1, set2, vro->op2))
2023 	      return false;
2024 	  }
2025 	return true;
2026       }
2027     default:
2028       gcc_unreachable ();
2029     }
2030 }
2031 
2032 /* Clean the set of expressions that are no longer valid in SET1 or
2033    SET2.  This means expressions that are made up of values we have no
2034    leaders for in SET1 or SET2.  This version is used for partial
2035    anticipation, which means it is not valid in either ANTIC_IN or
2036    PA_IN.  */
2037 
2038 static void
2039 dependent_clean (bitmap_set_t set1, bitmap_set_t set2)
2040 {
2041   vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
2042   pre_expr expr;
2043   int i;
2044 
2045   FOR_EACH_VEC_ELT (exprs, i, expr)
2046     {
2047       if (!valid_in_sets (set1, set2, expr))
2048 	bitmap_remove_from_set (set1, expr);
2049     }
2050   exprs.release ();
2051 }
2052 
2053 /* Clean the set of expressions that are no longer valid in SET.  This
2054    means expressions that are made up of values we have no leaders for
2055    in SET.  */
2056 
2057 static void
2058 clean (bitmap_set_t set)
2059 {
2060   vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2061   pre_expr expr;
2062   int i;
2063 
2064   FOR_EACH_VEC_ELT (exprs, i, expr)
2065     {
2066       if (!valid_in_sets (set, NULL, expr))
2067 	bitmap_remove_from_set (set, expr);
2068     }
2069   exprs.release ();
2070 }
2071 
2072 /* Clean the set of expressions that are no longer valid in SET because
2073    they are clobbered in BLOCK or because they trap and may not be executed.  */
2074 
2075 static void
2076 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2077 {
2078   bitmap_iterator bi;
2079   unsigned i;
2080   pre_expr to_remove = NULL;
2081 
2082   FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2083     {
2084       /* Remove queued expr.  */
2085       if (to_remove)
2086 	{
2087 	  bitmap_remove_from_set (set, to_remove);
2088 	  to_remove = NULL;
2089 	}
2090 
2091       pre_expr expr = expression_for_id (i);
2092       if (expr->kind == REFERENCE)
2093 	{
2094 	  vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2095 	  if (ref->vuse)
2096 	    {
2097 	      gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2098 	      if (!gimple_nop_p (def_stmt)
2099 		  && ((gimple_bb (def_stmt) != block
2100 		       && !dominated_by_p (CDI_DOMINATORS,
2101 					   block, gimple_bb (def_stmt)))
2102 		      || (gimple_bb (def_stmt) == block
2103 			  && value_dies_in_block_x (expr, block))))
2104 		to_remove = expr;
2105 	    }
2106 	}
2107       else if (expr->kind == NARY)
2108 	{
2109 	  vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2110 	  /* If the NARY may trap make sure the block does not contain
2111 	     a possible exit point.
2112 	     ???  This is overly conservative if we translate AVAIL_OUT
2113 	     as the available expression might be after the exit point.  */
2114 	  if (BB_MAY_NOTRETURN (block)
2115 	      && vn_nary_may_trap (nary))
2116 	    to_remove = expr;
2117 	}
2118     }
2119 
2120   /* Remove queued expr.  */
2121   if (to_remove)
2122     bitmap_remove_from_set (set, to_remove);
2123 }
2124 
2125 static sbitmap has_abnormal_preds;
2126 
2127 /* Compute the ANTIC set for BLOCK.
2128 
2129    If succs(BLOCK) > 1 then
2130      ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2131    else if succs(BLOCK) == 1 then
2132      ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2133 
2134    ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2135 */
2136 
2137 static bool
2138 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2139 {
2140   bool changed = false;
2141   bitmap_set_t S, old, ANTIC_OUT;
2142   bitmap_iterator bi;
2143   unsigned int bii;
2144   edge e;
2145   edge_iterator ei;
2146   bool was_visited = BB_VISITED (block);
2147 
2148   old = ANTIC_OUT = S = NULL;
2149   BB_VISITED (block) = 1;
2150 
2151   /* If any edges from predecessors are abnormal, antic_in is empty,
2152      so do nothing.  */
2153   if (block_has_abnormal_pred_edge)
2154     goto maybe_dump_sets;
2155 
2156   old = ANTIC_IN (block);
2157   ANTIC_OUT = bitmap_set_new ();
2158 
2159   /* If the block has no successors, ANTIC_OUT is empty.  */
2160   if (EDGE_COUNT (block->succs) == 0)
2161     ;
2162   /* If we have one successor, we could have some phi nodes to
2163      translate through.  */
2164   else if (single_succ_p (block))
2165     {
2166       basic_block succ_bb = single_succ (block);
2167       gcc_assert (BB_VISITED (succ_bb));
2168       phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2169     }
2170   /* If we have multiple successors, we take the intersection of all of
2171      them.  Note that in the case of loop exit phi nodes, we may have
2172      phis to translate through.  */
2173   else
2174     {
2175       size_t i;
2176       basic_block bprime, first = NULL;
2177 
2178       auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2179       FOR_EACH_EDGE (e, ei, block->succs)
2180 	{
2181 	  if (!first
2182 	      && BB_VISITED (e->dest))
2183 	    first = e->dest;
2184 	  else if (BB_VISITED (e->dest))
2185 	    worklist.quick_push (e->dest);
2186 	  else
2187 	    {
2188 	      /* Unvisited successors get their ANTIC_IN replaced by the
2189 		 maximal set to arrive at a maximum ANTIC_IN solution.
2190 		 We can ignore them in the intersection operation and thus
2191 		 need not explicitely represent that maximum solution.  */
2192 	      if (dump_file && (dump_flags & TDF_DETAILS))
2193 		fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2194 			 e->src->index, e->dest->index);
2195 	    }
2196 	}
2197 
2198       /* Of multiple successors we have to have visited one already
2199          which is guaranteed by iteration order.  */
2200       gcc_assert (first != NULL);
2201 
2202       phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2203 
2204       FOR_EACH_VEC_ELT (worklist, i, bprime)
2205 	{
2206 	  if (!gimple_seq_empty_p (phi_nodes (bprime)))
2207 	    {
2208 	      bitmap_set_t tmp = bitmap_set_new ();
2209 	      phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2210 	      bitmap_set_and (ANTIC_OUT, tmp);
2211 	      bitmap_set_free (tmp);
2212 	    }
2213 	  else
2214 	    bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2215 	}
2216     }
2217 
2218   /* Prune expressions that are clobbered in block and thus become
2219      invalid if translated from ANTIC_OUT to ANTIC_IN.  */
2220   prune_clobbered_mems (ANTIC_OUT, block);
2221 
2222   /* Generate ANTIC_OUT - TMP_GEN.  */
2223   S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2224 
2225   /* Start ANTIC_IN with EXP_GEN - TMP_GEN.  */
2226   ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2227 					  TMP_GEN (block));
2228 
2229   /* Then union in the ANTIC_OUT - TMP_GEN values,
2230      to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2231   FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2232     bitmap_value_insert_into_set (ANTIC_IN (block),
2233 				  expression_for_id (bii));
2234 
2235   clean (ANTIC_IN (block));
2236 
2237   if (!was_visited || !bitmap_set_equal (old, ANTIC_IN (block)))
2238     changed = true;
2239 
2240  maybe_dump_sets:
2241   if (dump_file && (dump_flags & TDF_DETAILS))
2242     {
2243       if (ANTIC_OUT)
2244 	print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2245 
2246       if (changed)
2247 	fprintf (dump_file, "[changed] ");
2248       print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2249 			block->index);
2250 
2251       if (S)
2252 	print_bitmap_set (dump_file, S, "S", block->index);
2253     }
2254   if (old)
2255     bitmap_set_free (old);
2256   if (S)
2257     bitmap_set_free (S);
2258   if (ANTIC_OUT)
2259     bitmap_set_free (ANTIC_OUT);
2260   return changed;
2261 }
2262 
2263 /* Compute PARTIAL_ANTIC for BLOCK.
2264 
2265    If succs(BLOCK) > 1 then
2266      PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2267      in ANTIC_OUT for all succ(BLOCK)
2268    else if succs(BLOCK) == 1 then
2269      PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2270 
2271    PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2272 				  - ANTIC_IN[BLOCK])
2273 
2274 */
2275 static bool
2276 compute_partial_antic_aux (basic_block block,
2277 			   bool block_has_abnormal_pred_edge)
2278 {
2279   bool changed = false;
2280   bitmap_set_t old_PA_IN;
2281   bitmap_set_t PA_OUT;
2282   edge e;
2283   edge_iterator ei;
2284   unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2285 
2286   old_PA_IN = PA_OUT = NULL;
2287 
2288   /* If any edges from predecessors are abnormal, antic_in is empty,
2289      so do nothing.  */
2290   if (block_has_abnormal_pred_edge)
2291     goto maybe_dump_sets;
2292 
2293   /* If there are too many partially anticipatable values in the
2294      block, phi_translate_set can take an exponential time: stop
2295      before the translation starts.  */
2296   if (max_pa
2297       && single_succ_p (block)
2298       && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2299     goto maybe_dump_sets;
2300 
2301   old_PA_IN = PA_IN (block);
2302   PA_OUT = bitmap_set_new ();
2303 
2304   /* If the block has no successors, ANTIC_OUT is empty.  */
2305   if (EDGE_COUNT (block->succs) == 0)
2306     ;
2307   /* If we have one successor, we could have some phi nodes to
2308      translate through.  Note that we can't phi translate across DFS
2309      back edges in partial antic, because it uses a union operation on
2310      the successors.  For recurrences like IV's, we will end up
2311      generating a new value in the set on each go around (i + 3 (VH.1)
2312      VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever.  */
2313   else if (single_succ_p (block))
2314     {
2315       basic_block succ = single_succ (block);
2316       if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2317 	phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2318     }
2319   /* If we have multiple successors, we take the union of all of
2320      them.  */
2321   else
2322     {
2323       size_t i;
2324       basic_block bprime;
2325 
2326       auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2327       FOR_EACH_EDGE (e, ei, block->succs)
2328 	{
2329 	  if (e->flags & EDGE_DFS_BACK)
2330 	    continue;
2331 	  worklist.quick_push (e->dest);
2332 	}
2333       if (worklist.length () > 0)
2334 	{
2335 	  FOR_EACH_VEC_ELT (worklist, i, bprime)
2336 	    {
2337 	      unsigned int i;
2338 	      bitmap_iterator bi;
2339 
2340 	      FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2341 		bitmap_value_insert_into_set (PA_OUT,
2342 					      expression_for_id (i));
2343 	      if (!gimple_seq_empty_p (phi_nodes (bprime)))
2344 		{
2345 		  bitmap_set_t pa_in = bitmap_set_new ();
2346 		  phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2347 		  FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2348 		    bitmap_value_insert_into_set (PA_OUT,
2349 						  expression_for_id (i));
2350 		  bitmap_set_free (pa_in);
2351 		}
2352 	      else
2353 		FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2354 		  bitmap_value_insert_into_set (PA_OUT,
2355 						expression_for_id (i));
2356 	    }
2357 	}
2358     }
2359 
2360   /* Prune expressions that are clobbered in block and thus become
2361      invalid if translated from PA_OUT to PA_IN.  */
2362   prune_clobbered_mems (PA_OUT, block);
2363 
2364   /* PA_IN starts with PA_OUT - TMP_GEN.
2365      Then we subtract things from ANTIC_IN.  */
2366   PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2367 
2368   /* For partial antic, we want to put back in the phi results, since
2369      we will properly avoid making them partially antic over backedges.  */
2370   bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2371   bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2372 
2373   /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2374   bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2375 
2376   dependent_clean (PA_IN (block), ANTIC_IN (block));
2377 
2378   if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2379     changed = true;
2380 
2381  maybe_dump_sets:
2382   if (dump_file && (dump_flags & TDF_DETAILS))
2383     {
2384       if (PA_OUT)
2385 	print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2386 
2387       print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2388     }
2389   if (old_PA_IN)
2390     bitmap_set_free (old_PA_IN);
2391   if (PA_OUT)
2392     bitmap_set_free (PA_OUT);
2393   return changed;
2394 }
2395 
2396 /* Compute ANTIC and partial ANTIC sets.  */
2397 
2398 static void
2399 compute_antic (void)
2400 {
2401   bool changed = true;
2402   int num_iterations = 0;
2403   basic_block block;
2404   int i;
2405   edge_iterator ei;
2406   edge e;
2407 
2408   /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2409      We pre-build the map of blocks with incoming abnormal edges here.  */
2410   has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2411   bitmap_clear (has_abnormal_preds);
2412 
2413   FOR_ALL_BB_FN (block, cfun)
2414     {
2415       BB_VISITED (block) = 0;
2416 
2417       FOR_EACH_EDGE (e, ei, block->preds)
2418 	if (e->flags & EDGE_ABNORMAL)
2419 	  {
2420 	    bitmap_set_bit (has_abnormal_preds, block->index);
2421 
2422 	    /* We also anticipate nothing.  */
2423 	    BB_VISITED (block) = 1;
2424 	    break;
2425 	  }
2426 
2427       /* While we are here, give empty ANTIC_IN sets to each block.  */
2428       ANTIC_IN (block) = bitmap_set_new ();
2429       PA_IN (block) = bitmap_set_new ();
2430     }
2431 
2432   /* At the exit block we anticipate nothing.  */
2433   BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2434 
2435   sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
2436   bitmap_ones (worklist);
2437   while (changed)
2438     {
2439       if (dump_file && (dump_flags & TDF_DETAILS))
2440 	fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2441       /* ???  We need to clear our PHI translation cache here as the
2442          ANTIC sets shrink and we restrict valid translations to
2443 	 those having operands with leaders in ANTIC.  Same below
2444 	 for PA ANTIC computation.  */
2445       num_iterations++;
2446       changed = false;
2447       for (i = postorder_num - 1; i >= 0; i--)
2448 	{
2449 	  if (bitmap_bit_p (worklist, postorder[i]))
2450 	    {
2451 	      basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2452 	      bitmap_clear_bit (worklist, block->index);
2453 	      if (compute_antic_aux (block,
2454 				     bitmap_bit_p (has_abnormal_preds,
2455 						   block->index)))
2456 		{
2457 		  FOR_EACH_EDGE (e, ei, block->preds)
2458 		    bitmap_set_bit (worklist, e->src->index);
2459 		  changed = true;
2460 		}
2461 	    }
2462 	}
2463       /* Theoretically possible, but *highly* unlikely.  */
2464       gcc_checking_assert (num_iterations < 500);
2465     }
2466 
2467   statistics_histogram_event (cfun, "compute_antic iterations",
2468 			      num_iterations);
2469 
2470   if (do_partial_partial)
2471     {
2472       bitmap_ones (worklist);
2473       mark_dfs_back_edges ();
2474       num_iterations = 0;
2475       changed = true;
2476       while (changed)
2477 	{
2478 	  if (dump_file && (dump_flags & TDF_DETAILS))
2479 	    fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2480 	  num_iterations++;
2481 	  changed = false;
2482 	  for (i = postorder_num - 1 ; i >= 0; i--)
2483 	    {
2484 	      if (bitmap_bit_p (worklist, postorder[i]))
2485 		{
2486 		  basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2487 		  bitmap_clear_bit (worklist, block->index);
2488 		  if (compute_partial_antic_aux (block,
2489 						 bitmap_bit_p (has_abnormal_preds,
2490 							       block->index)))
2491 		    {
2492 		      FOR_EACH_EDGE (e, ei, block->preds)
2493 			bitmap_set_bit (worklist, e->src->index);
2494 		      changed = true;
2495 		    }
2496 		}
2497 	    }
2498 	  /* Theoretically possible, but *highly* unlikely.  */
2499 	  gcc_checking_assert (num_iterations < 500);
2500 	}
2501       statistics_histogram_event (cfun, "compute_partial_antic iterations",
2502 				  num_iterations);
2503     }
2504   sbitmap_free (has_abnormal_preds);
2505   sbitmap_free (worklist);
2506 }
2507 
2508 
2509 /* Inserted expressions are placed onto this worklist, which is used
2510    for performing quick dead code elimination of insertions we made
2511    that didn't turn out to be necessary.   */
2512 static bitmap inserted_exprs;
2513 
2514 /* The actual worker for create_component_ref_by_pieces.  */
2515 
2516 static tree
2517 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2518 				  unsigned int *operand, gimple_seq *stmts)
2519 {
2520   vn_reference_op_t currop = &ref->operands[*operand];
2521   tree genop;
2522   ++*operand;
2523   switch (currop->opcode)
2524     {
2525     case CALL_EXPR:
2526       {
2527 	tree folded, sc = NULL_TREE;
2528 	unsigned int nargs = 0;
2529 	tree fn, *args;
2530 	if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2531 	  fn = currop->op0;
2532 	else
2533 	  fn = find_or_generate_expression (block, currop->op0, stmts);
2534 	if (!fn)
2535 	  return NULL_TREE;
2536 	if (currop->op1)
2537 	  {
2538 	    sc = find_or_generate_expression (block, currop->op1, stmts);
2539 	    if (!sc)
2540 	      return NULL_TREE;
2541 	  }
2542 	args = XNEWVEC (tree, ref->operands.length () - 1);
2543 	while (*operand < ref->operands.length ())
2544 	  {
2545 	    args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2546 							    operand, stmts);
2547 	    if (!args[nargs])
2548 	      return NULL_TREE;
2549 	    nargs++;
2550 	  }
2551 	folded = build_call_array (currop->type,
2552 				   (TREE_CODE (fn) == FUNCTION_DECL
2553 				    ? build_fold_addr_expr (fn) : fn),
2554 				   nargs, args);
2555 	if (currop->with_bounds)
2556 	  CALL_WITH_BOUNDS_P (folded) = true;
2557 	free (args);
2558 	if (sc)
2559 	  CALL_EXPR_STATIC_CHAIN (folded) = sc;
2560 	return folded;
2561       }
2562 
2563     case MEM_REF:
2564       {
2565 	tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2566 							stmts);
2567 	if (!baseop)
2568 	  return NULL_TREE;
2569 	tree offset = currop->op0;
2570 	if (TREE_CODE (baseop) == ADDR_EXPR
2571 	    && handled_component_p (TREE_OPERAND (baseop, 0)))
2572 	  {
2573 	    HOST_WIDE_INT off;
2574 	    tree base;
2575 	    base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2576 						  &off);
2577 	    gcc_assert (base);
2578 	    offset = int_const_binop (PLUS_EXPR, offset,
2579 				      build_int_cst (TREE_TYPE (offset),
2580 						     off));
2581 	    baseop = build_fold_addr_expr (base);
2582 	  }
2583 	return fold_build2 (MEM_REF, currop->type, baseop, offset);
2584       }
2585 
2586     case TARGET_MEM_REF:
2587       {
2588 	tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2589 	vn_reference_op_t nextop = &ref->operands[++*operand];
2590 	tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2591 							stmts);
2592 	if (!baseop)
2593 	  return NULL_TREE;
2594 	if (currop->op0)
2595 	  {
2596 	    genop0 = find_or_generate_expression (block, currop->op0, stmts);
2597 	    if (!genop0)
2598 	      return NULL_TREE;
2599 	  }
2600 	if (nextop->op0)
2601 	  {
2602 	    genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2603 	    if (!genop1)
2604 	      return NULL_TREE;
2605 	  }
2606 	return build5 (TARGET_MEM_REF, currop->type,
2607 		       baseop, currop->op2, genop0, currop->op1, genop1);
2608       }
2609 
2610     case ADDR_EXPR:
2611       if (currop->op0)
2612 	{
2613 	  gcc_assert (is_gimple_min_invariant (currop->op0));
2614 	  return currop->op0;
2615 	}
2616       /* Fallthrough.  */
2617     case REALPART_EXPR:
2618     case IMAGPART_EXPR:
2619     case VIEW_CONVERT_EXPR:
2620       {
2621 	tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2622 							stmts);
2623 	if (!genop0)
2624 	  return NULL_TREE;
2625 	return fold_build1 (currop->opcode, currop->type, genop0);
2626       }
2627 
2628     case WITH_SIZE_EXPR:
2629       {
2630 	tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2631 							stmts);
2632 	if (!genop0)
2633 	  return NULL_TREE;
2634 	tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2635 	if (!genop1)
2636 	  return NULL_TREE;
2637 	return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2638       }
2639 
2640     case BIT_FIELD_REF:
2641       {
2642 	tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2643 							stmts);
2644 	if (!genop0)
2645 	  return NULL_TREE;
2646 	tree op1 = currop->op0;
2647 	tree op2 = currop->op1;
2648 	return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2649       }
2650 
2651       /* For array ref vn_reference_op's, operand 1 of the array ref
2652 	 is op0 of the reference op and operand 3 of the array ref is
2653 	 op1.  */
2654     case ARRAY_RANGE_REF:
2655     case ARRAY_REF:
2656       {
2657 	tree genop0;
2658 	tree genop1 = currop->op0;
2659 	tree genop2 = currop->op1;
2660 	tree genop3 = currop->op2;
2661 	genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2662 						   stmts);
2663 	if (!genop0)
2664 	  return NULL_TREE;
2665 	genop1 = find_or_generate_expression (block, genop1, stmts);
2666 	if (!genop1)
2667 	  return NULL_TREE;
2668 	if (genop2)
2669 	  {
2670 	    tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2671 	    /* Drop zero minimum index if redundant.  */
2672 	    if (integer_zerop (genop2)
2673 		&& (!domain_type
2674 		    || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2675 	      genop2 = NULL_TREE;
2676 	    else
2677 	      {
2678 		genop2 = find_or_generate_expression (block, genop2, stmts);
2679 		if (!genop2)
2680 		  return NULL_TREE;
2681 	      }
2682 	  }
2683 	if (genop3)
2684 	  {
2685 	    tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2686 	    /* We can't always put a size in units of the element alignment
2687 	       here as the element alignment may be not visible.  See
2688 	       PR43783.  Simply drop the element size for constant
2689 	       sizes.  */
2690 	    if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2691 	      genop3 = NULL_TREE;
2692 	    else
2693 	      {
2694 		genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2695 				     size_int (TYPE_ALIGN_UNIT (elmt_type)));
2696 		genop3 = find_or_generate_expression (block, genop3, stmts);
2697 		if (!genop3)
2698 		  return NULL_TREE;
2699 	      }
2700 	  }
2701 	return build4 (currop->opcode, currop->type, genop0, genop1,
2702 		       genop2, genop3);
2703       }
2704     case COMPONENT_REF:
2705       {
2706 	tree op0;
2707 	tree op1;
2708 	tree genop2 = currop->op1;
2709 	op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2710 	if (!op0)
2711 	  return NULL_TREE;
2712 	/* op1 should be a FIELD_DECL, which are represented by themselves.  */
2713 	op1 = currop->op0;
2714 	if (genop2)
2715 	  {
2716 	    genop2 = find_or_generate_expression (block, genop2, stmts);
2717 	    if (!genop2)
2718 	      return NULL_TREE;
2719 	  }
2720 	return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2721       }
2722 
2723     case SSA_NAME:
2724       {
2725 	genop = find_or_generate_expression (block, currop->op0, stmts);
2726 	return genop;
2727       }
2728     case STRING_CST:
2729     case INTEGER_CST:
2730     case COMPLEX_CST:
2731     case VECTOR_CST:
2732     case REAL_CST:
2733     case CONSTRUCTOR:
2734     case VAR_DECL:
2735     case PARM_DECL:
2736     case CONST_DECL:
2737     case RESULT_DECL:
2738     case FUNCTION_DECL:
2739       return currop->op0;
2740 
2741     default:
2742       gcc_unreachable ();
2743     }
2744 }
2745 
2746 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2747    COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2748    trying to rename aggregates into ssa form directly, which is a no no.
2749 
2750    Thus, this routine doesn't create temporaries, it just builds a
2751    single access expression for the array, calling
2752    find_or_generate_expression to build the innermost pieces.
2753 
2754    This function is a subroutine of create_expression_by_pieces, and
2755    should not be called on it's own unless you really know what you
2756    are doing.  */
2757 
2758 static tree
2759 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2760 				gimple_seq *stmts)
2761 {
2762   unsigned int op = 0;
2763   return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2764 }
2765 
2766 /* Find a simple leader for an expression, or generate one using
2767    create_expression_by_pieces from a NARY expression for the value.
2768    BLOCK is the basic_block we are looking for leaders in.
2769    OP is the tree expression to find a leader for or generate.
2770    Returns the leader or NULL_TREE on failure.  */
2771 
2772 static tree
2773 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2774 {
2775   pre_expr expr = get_or_alloc_expr_for (op);
2776   unsigned int lookfor = get_expr_value_id (expr);
2777   pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2778   if (leader)
2779     {
2780       if (leader->kind == NAME)
2781 	return PRE_EXPR_NAME (leader);
2782       else if (leader->kind == CONSTANT)
2783 	return PRE_EXPR_CONSTANT (leader);
2784 
2785       /* Defer.  */
2786       return NULL_TREE;
2787     }
2788 
2789   /* It must be a complex expression, so generate it recursively.  Note
2790      that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2791      where the insert algorithm fails to insert a required expression.  */
2792   bitmap exprset = value_expressions[lookfor];
2793   bitmap_iterator bi;
2794   unsigned int i;
2795   EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2796     {
2797       pre_expr temp = expression_for_id (i);
2798       /* We cannot insert random REFERENCE expressions at arbitrary
2799 	 places.  We can insert NARYs which eventually re-materializes
2800 	 its operand values.  */
2801       if (temp->kind == NARY)
2802 	return create_expression_by_pieces (block, temp, stmts,
2803 					    get_expr_type (expr));
2804     }
2805 
2806   /* Defer.  */
2807   return NULL_TREE;
2808 }
2809 
2810 #define NECESSARY GF_PLF_1
2811 
2812 /* Create an expression in pieces, so that we can handle very complex
2813    expressions that may be ANTIC, but not necessary GIMPLE.
2814    BLOCK is the basic block the expression will be inserted into,
2815    EXPR is the expression to insert (in value form)
2816    STMTS is a statement list to append the necessary insertions into.
2817 
2818    This function will die if we hit some value that shouldn't be
2819    ANTIC but is (IE there is no leader for it, or its components).
2820    The function returns NULL_TREE in case a different antic expression
2821    has to be inserted first.
2822    This function may also generate expressions that are themselves
2823    partially or fully redundant.  Those that are will be either made
2824    fully redundant during the next iteration of insert (for partially
2825    redundant ones), or eliminated by eliminate (for fully redundant
2826    ones).  */
2827 
2828 static tree
2829 create_expression_by_pieces (basic_block block, pre_expr expr,
2830 			     gimple_seq *stmts, tree type)
2831 {
2832   tree name;
2833   tree folded;
2834   gimple_seq forced_stmts = NULL;
2835   unsigned int value_id;
2836   gimple_stmt_iterator gsi;
2837   tree exprtype = type ? type : get_expr_type (expr);
2838   pre_expr nameexpr;
2839   gassign *newstmt;
2840 
2841   switch (expr->kind)
2842     {
2843       /* We may hit the NAME/CONSTANT case if we have to convert types
2844 	 that value numbering saw through.  */
2845     case NAME:
2846       folded = PRE_EXPR_NAME (expr);
2847       break;
2848     case CONSTANT:
2849       folded = PRE_EXPR_CONSTANT (expr);
2850       break;
2851     case REFERENCE:
2852       {
2853 	vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2854 	folded = create_component_ref_by_pieces (block, ref, stmts);
2855 	if (!folded)
2856 	  return NULL_TREE;
2857       }
2858       break;
2859     case NARY:
2860       {
2861 	vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2862 	tree *genop = XALLOCAVEC (tree, nary->length);
2863 	unsigned i;
2864 	for (i = 0; i < nary->length; ++i)
2865 	  {
2866 	    genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2867 	    if (!genop[i])
2868 	      return NULL_TREE;
2869 	    /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR.  It
2870 	       may have conversions stripped.  */
2871 	    if (nary->opcode == POINTER_PLUS_EXPR)
2872 	      {
2873 		if (i == 0)
2874 		  genop[i] = gimple_convert (&forced_stmts,
2875 					     nary->type, genop[i]);
2876 		else if (i == 1)
2877 		  genop[i] = gimple_convert (&forced_stmts,
2878 					     sizetype, genop[i]);
2879 	      }
2880 	    else
2881 	      genop[i] = gimple_convert (&forced_stmts,
2882 					 TREE_TYPE (nary->op[i]), genop[i]);
2883 	  }
2884 	if (nary->opcode == CONSTRUCTOR)
2885 	  {
2886 	    vec<constructor_elt, va_gc> *elts = NULL;
2887 	    for (i = 0; i < nary->length; ++i)
2888 	      CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2889 	    folded = build_constructor (nary->type, elts);
2890 	  }
2891 	else
2892 	  {
2893 	    switch (nary->length)
2894 	      {
2895 	      case 1:
2896 		folded = fold_build1 (nary->opcode, nary->type,
2897 				      genop[0]);
2898 		break;
2899 	      case 2:
2900 		folded = fold_build2 (nary->opcode, nary->type,
2901 				      genop[0], genop[1]);
2902 		break;
2903 	      case 3:
2904 		folded = fold_build3 (nary->opcode, nary->type,
2905 				      genop[0], genop[1], genop[2]);
2906 		break;
2907 	      default:
2908 		gcc_unreachable ();
2909 	      }
2910 	  }
2911       }
2912       break;
2913     default:
2914       gcc_unreachable ();
2915     }
2916 
2917   if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2918     folded = fold_convert (exprtype, folded);
2919 
2920   /* Force the generated expression to be a sequence of GIMPLE
2921      statements.
2922      We have to call unshare_expr because force_gimple_operand may
2923      modify the tree we pass to it.  */
2924   gimple_seq tem = NULL;
2925   folded = force_gimple_operand (unshare_expr (folded), &tem,
2926 				 false, NULL);
2927   gimple_seq_add_seq_without_update (&forced_stmts, tem);
2928 
2929   /* If we have any intermediate expressions to the value sets, add them
2930      to the value sets and chain them in the instruction stream.  */
2931   if (forced_stmts)
2932     {
2933       gsi = gsi_start (forced_stmts);
2934       for (; !gsi_end_p (gsi); gsi_next (&gsi))
2935 	{
2936 	  gimple stmt = gsi_stmt (gsi);
2937 	  tree forcedname = gimple_get_lhs (stmt);
2938 	  pre_expr nameexpr;
2939 
2940 	  if (TREE_CODE (forcedname) == SSA_NAME)
2941 	    {
2942 	      bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2943 	      VN_INFO_GET (forcedname)->valnum = forcedname;
2944 	      VN_INFO (forcedname)->value_id = get_next_value_id ();
2945 	      nameexpr = get_or_alloc_expr_for_name (forcedname);
2946 	      add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2947 	      bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2948 	      bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2949 	    }
2950 
2951 	  gimple_set_vuse (stmt, BB_LIVE_VOP_ON_EXIT (block));
2952 	  gimple_set_modified (stmt, true);
2953 	}
2954       gimple_seq_add_seq (stmts, forced_stmts);
2955     }
2956 
2957   name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2958   newstmt = gimple_build_assign (name, folded);
2959   gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2960   gimple_set_modified (newstmt, true);
2961   gimple_set_plf (newstmt, NECESSARY, false);
2962 
2963   gimple_seq_add_stmt (stmts, newstmt);
2964   bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
2965 
2966   /* Fold the last statement.  */
2967   gsi = gsi_last (*stmts);
2968   if (fold_stmt_inplace (&gsi))
2969     update_stmt (gsi_stmt (gsi));
2970 
2971   /* Add a value number to the temporary.
2972      The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2973      we are creating the expression by pieces, and this particular piece of
2974      the expression may have been represented.  There is no harm in replacing
2975      here.  */
2976   value_id = get_expr_value_id (expr);
2977   VN_INFO_GET (name)->value_id = value_id;
2978   VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2979   if (VN_INFO (name)->valnum == NULL_TREE)
2980     VN_INFO (name)->valnum = name;
2981   gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2982   nameexpr = get_or_alloc_expr_for_name (name);
2983   add_to_value (value_id, nameexpr);
2984   if (NEW_SETS (block))
2985     bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2986   bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2987 
2988   pre_stats.insertions++;
2989   if (dump_file && (dump_flags & TDF_DETAILS))
2990     {
2991       fprintf (dump_file, "Inserted ");
2992       print_gimple_stmt (dump_file, newstmt, 0, 0);
2993       fprintf (dump_file, " in predecessor %d (%04d)\n",
2994 	       block->index, value_id);
2995     }
2996 
2997   return name;
2998 }
2999 
3000 
3001 /* Insert the to-be-made-available values of expression EXPRNUM for each
3002    predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3003    merge the result with a phi node, given the same value number as
3004    NODE.  Return true if we have inserted new stuff.  */
3005 
3006 static bool
3007 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3008 			    vec<pre_expr> avail)
3009 {
3010   pre_expr expr = expression_for_id (exprnum);
3011   pre_expr newphi;
3012   unsigned int val = get_expr_value_id (expr);
3013   edge pred;
3014   bool insertions = false;
3015   bool nophi = false;
3016   basic_block bprime;
3017   pre_expr eprime;
3018   edge_iterator ei;
3019   tree type = get_expr_type (expr);
3020   tree temp;
3021   gphi *phi;
3022 
3023   /* Make sure we aren't creating an induction variable.  */
3024   if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3025     {
3026       bool firstinsideloop = false;
3027       bool secondinsideloop = false;
3028       firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3029 					       EDGE_PRED (block, 0)->src);
3030       secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3031 						EDGE_PRED (block, 1)->src);
3032       /* Induction variables only have one edge inside the loop.  */
3033       if ((firstinsideloop ^ secondinsideloop)
3034 	  && expr->kind != REFERENCE)
3035 	{
3036 	  if (dump_file && (dump_flags & TDF_DETAILS))
3037 	    fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3038 	  nophi = true;
3039 	}
3040     }
3041 
3042   /* Make the necessary insertions.  */
3043   FOR_EACH_EDGE (pred, ei, block->preds)
3044     {
3045       gimple_seq stmts = NULL;
3046       tree builtexpr;
3047       bprime = pred->src;
3048       eprime = avail[pred->dest_idx];
3049 
3050       if (eprime->kind != NAME && eprime->kind != CONSTANT)
3051 	{
3052 	  builtexpr = create_expression_by_pieces (bprime, eprime,
3053 						   &stmts, type);
3054 	  gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3055 	  gsi_insert_seq_on_edge (pred, stmts);
3056 	  if (!builtexpr)
3057 	    {
3058 	      /* We cannot insert a PHI node if we failed to insert
3059 		 on one edge.  */
3060 	      nophi = true;
3061 	      continue;
3062 	    }
3063 	  avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3064 	  insertions = true;
3065 	}
3066       else if (eprime->kind == CONSTANT)
3067 	{
3068 	  /* Constants may not have the right type, fold_convert
3069 	     should give us back a constant with the right type.  */
3070 	  tree constant = PRE_EXPR_CONSTANT (eprime);
3071 	  if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3072 	    {
3073 	      tree builtexpr = fold_convert (type, constant);
3074 	      if (!is_gimple_min_invariant (builtexpr))
3075 		{
3076 		  tree forcedexpr = force_gimple_operand (builtexpr,
3077 							  &stmts, true,
3078 							  NULL);
3079 		  if (!is_gimple_min_invariant (forcedexpr))
3080 		    {
3081 		      if (forcedexpr != builtexpr)
3082 			{
3083 			  VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3084 			  VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3085 			}
3086 		      if (stmts)
3087 			{
3088 			  gimple_stmt_iterator gsi;
3089 			  gsi = gsi_start (stmts);
3090 			  for (; !gsi_end_p (gsi); gsi_next (&gsi))
3091 			    {
3092 			      gimple stmt = gsi_stmt (gsi);
3093 			      tree lhs = gimple_get_lhs (stmt);
3094 			      if (TREE_CODE (lhs) == SSA_NAME)
3095 				bitmap_set_bit (inserted_exprs,
3096 						SSA_NAME_VERSION (lhs));
3097 			      gimple_set_plf (stmt, NECESSARY, false);
3098 			    }
3099 			  gsi_insert_seq_on_edge (pred, stmts);
3100 			}
3101 		      avail[pred->dest_idx]
3102 			= get_or_alloc_expr_for_name (forcedexpr);
3103 		    }
3104 		}
3105 	      else
3106 		avail[pred->dest_idx]
3107 		    = get_or_alloc_expr_for_constant (builtexpr);
3108 	    }
3109 	}
3110       else if (eprime->kind == NAME)
3111 	{
3112 	  /* We may have to do a conversion because our value
3113 	     numbering can look through types in certain cases, but
3114 	     our IL requires all operands of a phi node have the same
3115 	     type.  */
3116 	  tree name = PRE_EXPR_NAME (eprime);
3117 	  if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3118 	    {
3119 	      tree builtexpr;
3120 	      tree forcedexpr;
3121 	      builtexpr = fold_convert (type, name);
3122 	      forcedexpr = force_gimple_operand (builtexpr,
3123 						 &stmts, true,
3124 						 NULL);
3125 
3126 	      if (forcedexpr != name)
3127 		{
3128 		  VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3129 		  VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3130 		}
3131 
3132 	      if (stmts)
3133 		{
3134 		  gimple_stmt_iterator gsi;
3135 		  gsi = gsi_start (stmts);
3136 		  for (; !gsi_end_p (gsi); gsi_next (&gsi))
3137 		    {
3138 		      gimple stmt = gsi_stmt (gsi);
3139 		      tree lhs = gimple_get_lhs (stmt);
3140 		      if (TREE_CODE (lhs) == SSA_NAME)
3141 			bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3142 		      gimple_set_plf (stmt, NECESSARY, false);
3143 		    }
3144 		  gsi_insert_seq_on_edge (pred, stmts);
3145 		}
3146 	      avail[pred->dest_idx] = get_or_alloc_expr_for_name (forcedexpr);
3147 	    }
3148 	}
3149     }
3150   /* If we didn't want a phi node, and we made insertions, we still have
3151      inserted new stuff, and thus return true.  If we didn't want a phi node,
3152      and didn't make insertions, we haven't added anything new, so return
3153      false.  */
3154   if (nophi && insertions)
3155     return true;
3156   else if (nophi && !insertions)
3157     return false;
3158 
3159   /* Now build a phi for the new variable.  */
3160   temp = make_temp_ssa_name (type, NULL, "prephitmp");
3161   phi = create_phi_node (temp, block);
3162 
3163   gimple_set_plf (phi, NECESSARY, false);
3164   VN_INFO_GET (temp)->value_id = val;
3165   VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3166   if (VN_INFO (temp)->valnum == NULL_TREE)
3167     VN_INFO (temp)->valnum = temp;
3168   bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3169   FOR_EACH_EDGE (pred, ei, block->preds)
3170     {
3171       pre_expr ae = avail[pred->dest_idx];
3172       gcc_assert (get_expr_type (ae) == type
3173 		  || useless_type_conversion_p (type, get_expr_type (ae)));
3174       if (ae->kind == CONSTANT)
3175 	add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3176 		     pred, UNKNOWN_LOCATION);
3177       else
3178 	add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3179     }
3180 
3181   newphi = get_or_alloc_expr_for_name (temp);
3182   add_to_value (val, newphi);
3183 
3184   /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3185      this insertion, since we test for the existence of this value in PHI_GEN
3186      before proceeding with the partial redundancy checks in insert_aux.
3187 
3188      The value may exist in AVAIL_OUT, in particular, it could be represented
3189      by the expression we are trying to eliminate, in which case we want the
3190      replacement to occur.  If it's not existing in AVAIL_OUT, we want it
3191      inserted there.
3192 
3193      Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3194      this block, because if it did, it would have existed in our dominator's
3195      AVAIL_OUT, and would have been skipped due to the full redundancy check.
3196   */
3197 
3198   bitmap_insert_into_set (PHI_GEN (block), newphi);
3199   bitmap_value_replace_in_set (AVAIL_OUT (block),
3200 			       newphi);
3201   bitmap_insert_into_set (NEW_SETS (block),
3202 			  newphi);
3203 
3204   /* If we insert a PHI node for a conversion of another PHI node
3205      in the same basic-block try to preserve range information.
3206      This is important so that followup loop passes receive optimal
3207      number of iteration analysis results.  See PR61743.  */
3208   if (expr->kind == NARY
3209       && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3210       && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3211       && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3212       && INTEGRAL_TYPE_P (type)
3213       && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3214       && (TYPE_PRECISION (type)
3215 	  >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3216       && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3217     {
3218       wide_int min, max;
3219       if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3220 	  && !wi::neg_p (min, SIGNED)
3221 	  && !wi::neg_p (max, SIGNED))
3222 	/* Just handle extension and sign-changes of all-positive ranges.  */
3223 	set_range_info (temp,
3224 			SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3225 			wide_int_storage::from (min, TYPE_PRECISION (type),
3226 						TYPE_SIGN (type)),
3227 			wide_int_storage::from (max, TYPE_PRECISION (type),
3228 						TYPE_SIGN (type)));
3229     }
3230 
3231   if (dump_file && (dump_flags & TDF_DETAILS))
3232     {
3233       fprintf (dump_file, "Created phi ");
3234       print_gimple_stmt (dump_file, phi, 0, 0);
3235       fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3236     }
3237   pre_stats.phis++;
3238   return true;
3239 }
3240 
3241 
3242 
3243 /* Perform insertion of partially redundant values.
3244    For BLOCK, do the following:
3245    1.  Propagate the NEW_SETS of the dominator into the current block.
3246    If the block has multiple predecessors,
3247        2a. Iterate over the ANTIC expressions for the block to see if
3248 	   any of them are partially redundant.
3249        2b. If so, insert them into the necessary predecessors to make
3250 	   the expression fully redundant.
3251        2c. Insert a new PHI merging the values of the predecessors.
3252        2d. Insert the new PHI, and the new expressions, into the
3253 	   NEW_SETS set.
3254    3. Recursively call ourselves on the dominator children of BLOCK.
3255 
3256    Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3257    do_regular_insertion and do_partial_insertion.
3258 
3259 */
3260 
3261 static bool
3262 do_regular_insertion (basic_block block, basic_block dom)
3263 {
3264   bool new_stuff = false;
3265   vec<pre_expr> exprs;
3266   pre_expr expr;
3267   auto_vec<pre_expr> avail;
3268   int i;
3269 
3270   exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3271   avail.safe_grow (EDGE_COUNT (block->preds));
3272 
3273   FOR_EACH_VEC_ELT (exprs, i, expr)
3274     {
3275       if (expr->kind == NARY
3276 	  || expr->kind == REFERENCE)
3277 	{
3278 	  unsigned int val;
3279 	  bool by_some = false;
3280 	  bool cant_insert = false;
3281 	  bool all_same = true;
3282 	  pre_expr first_s = NULL;
3283 	  edge pred;
3284 	  basic_block bprime;
3285 	  pre_expr eprime = NULL;
3286 	  edge_iterator ei;
3287 	  pre_expr edoubleprime = NULL;
3288 	  bool do_insertion = false;
3289 
3290 	  val = get_expr_value_id (expr);
3291 	  if (bitmap_set_contains_value (PHI_GEN (block), val))
3292 	    continue;
3293 	  if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3294 	    {
3295 	      if (dump_file && (dump_flags & TDF_DETAILS))
3296 		{
3297 		  fprintf (dump_file, "Found fully redundant value: ");
3298 		  print_pre_expr (dump_file, expr);
3299 		  fprintf (dump_file, "\n");
3300 		}
3301 	      continue;
3302 	    }
3303 
3304 	  FOR_EACH_EDGE (pred, ei, block->preds)
3305 	    {
3306 	      unsigned int vprime;
3307 
3308 	      /* We should never run insertion for the exit block
3309 	         and so not come across fake pred edges.  */
3310 	      gcc_assert (!(pred->flags & EDGE_FAKE));
3311 	      bprime = pred->src;
3312 	      eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3313 				      bprime, block);
3314 
3315 	      /* eprime will generally only be NULL if the
3316 		 value of the expression, translated
3317 		 through the PHI for this predecessor, is
3318 		 undefined.  If that is the case, we can't
3319 		 make the expression fully redundant,
3320 		 because its value is undefined along a
3321 		 predecessor path.  We can thus break out
3322 		 early because it doesn't matter what the
3323 		 rest of the results are.  */
3324 	      if (eprime == NULL)
3325 		{
3326 		  avail[pred->dest_idx] = NULL;
3327 		  cant_insert = true;
3328 		  break;
3329 		}
3330 
3331 	      eprime = fully_constant_expression (eprime);
3332 	      vprime = get_expr_value_id (eprime);
3333 	      edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3334 						 vprime);
3335 	      if (edoubleprime == NULL)
3336 		{
3337 		  avail[pred->dest_idx] = eprime;
3338 		  all_same = false;
3339 		}
3340 	      else
3341 		{
3342 		  avail[pred->dest_idx] = edoubleprime;
3343 		  by_some = true;
3344 		  /* We want to perform insertions to remove a redundancy on
3345 		     a path in the CFG we want to optimize for speed.  */
3346 		  if (optimize_edge_for_speed_p (pred))
3347 		    do_insertion = true;
3348 		  if (first_s == NULL)
3349 		    first_s = edoubleprime;
3350 		  else if (!pre_expr_d::equal (first_s, edoubleprime))
3351 		    all_same = false;
3352 		}
3353 	    }
3354 	  /* If we can insert it, it's not the same value
3355 	     already existing along every predecessor, and
3356 	     it's defined by some predecessor, it is
3357 	     partially redundant.  */
3358 	  if (!cant_insert && !all_same && by_some)
3359 	    {
3360 	      if (!do_insertion)
3361 		{
3362 		  if (dump_file && (dump_flags & TDF_DETAILS))
3363 		    {
3364 		      fprintf (dump_file, "Skipping partial redundancy for "
3365 			       "expression ");
3366 		      print_pre_expr (dump_file, expr);
3367 		      fprintf (dump_file, " (%04d), no redundancy on to be "
3368 			       "optimized for speed edge\n", val);
3369 		    }
3370 		}
3371 	      else if (dbg_cnt (treepre_insert))
3372 		{
3373 		  if (dump_file && (dump_flags & TDF_DETAILS))
3374 		    {
3375 		      fprintf (dump_file, "Found partial redundancy for "
3376 			       "expression ");
3377 		      print_pre_expr (dump_file, expr);
3378 		      fprintf (dump_file, " (%04d)\n",
3379 			       get_expr_value_id (expr));
3380 		    }
3381 		  if (insert_into_preds_of_block (block,
3382 						  get_expression_id (expr),
3383 						  avail))
3384 		    new_stuff = true;
3385 		}
3386 	    }
3387 	  /* If all edges produce the same value and that value is
3388 	     an invariant, then the PHI has the same value on all
3389 	     edges.  Note this.  */
3390 	  else if (!cant_insert && all_same)
3391 	    {
3392 	      gcc_assert (edoubleprime->kind == CONSTANT
3393 			  || edoubleprime->kind == NAME);
3394 
3395 	      tree temp = make_temp_ssa_name (get_expr_type (expr),
3396 					      NULL, "pretmp");
3397 	      gassign *assign
3398 		= gimple_build_assign (temp,
3399 				       edoubleprime->kind == CONSTANT ?
3400 				       PRE_EXPR_CONSTANT (edoubleprime) :
3401 				       PRE_EXPR_NAME (edoubleprime));
3402 	      gimple_stmt_iterator gsi = gsi_after_labels (block);
3403 	      gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3404 
3405 	      gimple_set_plf (assign, NECESSARY, false);
3406 	      VN_INFO_GET (temp)->value_id = val;
3407 	      VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3408 	      if (VN_INFO (temp)->valnum == NULL_TREE)
3409 		VN_INFO (temp)->valnum = temp;
3410 	      bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3411 	      pre_expr newe = get_or_alloc_expr_for_name (temp);
3412 	      add_to_value (val, newe);
3413 	      bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3414 	      bitmap_insert_into_set (NEW_SETS (block), newe);
3415 	    }
3416 	}
3417     }
3418 
3419   exprs.release ();
3420   return new_stuff;
3421 }
3422 
3423 
3424 /* Perform insertion for partially anticipatable expressions.  There
3425    is only one case we will perform insertion for these.  This case is
3426    if the expression is partially anticipatable, and fully available.
3427    In this case, we know that putting it earlier will enable us to
3428    remove the later computation.  */
3429 
3430 
3431 static bool
3432 do_partial_partial_insertion (basic_block block, basic_block dom)
3433 {
3434   bool new_stuff = false;
3435   vec<pre_expr> exprs;
3436   pre_expr expr;
3437   auto_vec<pre_expr> avail;
3438   int i;
3439 
3440   exprs = sorted_array_from_bitmap_set (PA_IN (block));
3441   avail.safe_grow (EDGE_COUNT (block->preds));
3442 
3443   FOR_EACH_VEC_ELT (exprs, i, expr)
3444     {
3445       if (expr->kind == NARY
3446 	  || expr->kind == REFERENCE)
3447 	{
3448 	  unsigned int val;
3449 	  bool by_all = true;
3450 	  bool cant_insert = false;
3451 	  edge pred;
3452 	  basic_block bprime;
3453 	  pre_expr eprime = NULL;
3454 	  edge_iterator ei;
3455 
3456 	  val = get_expr_value_id (expr);
3457 	  if (bitmap_set_contains_value (PHI_GEN (block), val))
3458 	    continue;
3459 	  if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3460 	    continue;
3461 
3462 	  FOR_EACH_EDGE (pred, ei, block->preds)
3463 	    {
3464 	      unsigned int vprime;
3465 	      pre_expr edoubleprime;
3466 
3467 	      /* We should never run insertion for the exit block
3468 	         and so not come across fake pred edges.  */
3469 	      gcc_assert (!(pred->flags & EDGE_FAKE));
3470 	      bprime = pred->src;
3471 	      eprime = phi_translate (expr, ANTIC_IN (block),
3472 				      PA_IN (block),
3473 				      bprime, block);
3474 
3475 	      /* eprime will generally only be NULL if the
3476 		 value of the expression, translated
3477 		 through the PHI for this predecessor, is
3478 		 undefined.  If that is the case, we can't
3479 		 make the expression fully redundant,
3480 		 because its value is undefined along a
3481 		 predecessor path.  We can thus break out
3482 		 early because it doesn't matter what the
3483 		 rest of the results are.  */
3484 	      if (eprime == NULL)
3485 		{
3486 		  avail[pred->dest_idx] = NULL;
3487 		  cant_insert = true;
3488 		  break;
3489 		}
3490 
3491 	      eprime = fully_constant_expression (eprime);
3492 	      vprime = get_expr_value_id (eprime);
3493 	      edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3494 	      avail[pred->dest_idx] = edoubleprime;
3495 	      if (edoubleprime == NULL)
3496 		{
3497 		  by_all = false;
3498 		  break;
3499 		}
3500 	    }
3501 
3502 	  /* If we can insert it, it's not the same value
3503 	     already existing along every predecessor, and
3504 	     it's defined by some predecessor, it is
3505 	     partially redundant.  */
3506 	  if (!cant_insert && by_all)
3507 	    {
3508 	      edge succ;
3509 	      bool do_insertion = false;
3510 
3511 	      /* Insert only if we can remove a later expression on a path
3512 		 that we want to optimize for speed.
3513 		 The phi node that we will be inserting in BLOCK is not free,
3514 		 and inserting it for the sake of !optimize_for_speed successor
3515 		 may cause regressions on the speed path.  */
3516 	      FOR_EACH_EDGE (succ, ei, block->succs)
3517 		{
3518 		  if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3519 		      || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3520 		    {
3521 		      if (optimize_edge_for_speed_p (succ))
3522 			do_insertion = true;
3523 		    }
3524 		}
3525 
3526 	      if (!do_insertion)
3527 		{
3528 		  if (dump_file && (dump_flags & TDF_DETAILS))
3529 		    {
3530 		      fprintf (dump_file, "Skipping partial partial redundancy "
3531 			       "for expression ");
3532 		      print_pre_expr (dump_file, expr);
3533 		      fprintf (dump_file, " (%04d), not (partially) anticipated "
3534 			       "on any to be optimized for speed edges\n", val);
3535 		    }
3536 		}
3537 	      else if (dbg_cnt (treepre_insert))
3538 		{
3539 		  pre_stats.pa_insert++;
3540 		  if (dump_file && (dump_flags & TDF_DETAILS))
3541 		    {
3542 		      fprintf (dump_file, "Found partial partial redundancy "
3543 			       "for expression ");
3544 		      print_pre_expr (dump_file, expr);
3545 		      fprintf (dump_file, " (%04d)\n",
3546 			       get_expr_value_id (expr));
3547 		    }
3548 		  if (insert_into_preds_of_block (block,
3549 						  get_expression_id (expr),
3550 						  avail))
3551 		    new_stuff = true;
3552 		}
3553 	    }
3554 	}
3555     }
3556 
3557   exprs.release ();
3558   return new_stuff;
3559 }
3560 
3561 static bool
3562 insert_aux (basic_block block)
3563 {
3564   basic_block son;
3565   bool new_stuff = false;
3566 
3567   if (block)
3568     {
3569       basic_block dom;
3570       dom = get_immediate_dominator (CDI_DOMINATORS, block);
3571       if (dom)
3572 	{
3573 	  unsigned i;
3574 	  bitmap_iterator bi;
3575 	  bitmap_set_t newset = NEW_SETS (dom);
3576 	  if (newset)
3577 	    {
3578 	      /* Note that we need to value_replace both NEW_SETS, and
3579 		 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3580 		 represented by some non-simple expression here that we want
3581 		 to replace it with.  */
3582 	      FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3583 		{
3584 		  pre_expr expr = expression_for_id (i);
3585 		  bitmap_value_replace_in_set (NEW_SETS (block), expr);
3586 		  bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3587 		}
3588 	    }
3589 	  if (!single_pred_p (block))
3590 	    {
3591 	      new_stuff |= do_regular_insertion (block, dom);
3592 	      if (do_partial_partial)
3593 		new_stuff |= do_partial_partial_insertion (block, dom);
3594 	    }
3595 	}
3596     }
3597   for (son = first_dom_son (CDI_DOMINATORS, block);
3598        son;
3599        son = next_dom_son (CDI_DOMINATORS, son))
3600     {
3601       new_stuff |= insert_aux (son);
3602     }
3603 
3604   return new_stuff;
3605 }
3606 
3607 /* Perform insertion of partially redundant values.  */
3608 
3609 static void
3610 insert (void)
3611 {
3612   bool new_stuff = true;
3613   basic_block bb;
3614   int num_iterations = 0;
3615 
3616   FOR_ALL_BB_FN (bb, cfun)
3617     NEW_SETS (bb) = bitmap_set_new ();
3618 
3619   while (new_stuff)
3620     {
3621       num_iterations++;
3622       if (dump_file && dump_flags & TDF_DETAILS)
3623 	fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3624       new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3625 
3626       /* Clear the NEW sets before the next iteration.  We have already
3627          fully propagated its contents.  */
3628       if (new_stuff)
3629 	FOR_ALL_BB_FN (bb, cfun)
3630 	  bitmap_set_free (NEW_SETS (bb));
3631     }
3632   statistics_histogram_event (cfun, "insert iterations", num_iterations);
3633 }
3634 
3635 
3636 /* Compute the AVAIL set for all basic blocks.
3637 
3638    This function performs value numbering of the statements in each basic
3639    block.  The AVAIL sets are built from information we glean while doing
3640    this value numbering, since the AVAIL sets contain only one entry per
3641    value.
3642 
3643    AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3644    AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK].  */
3645 
3646 static void
3647 compute_avail (void)
3648 {
3649 
3650   basic_block block, son;
3651   basic_block *worklist;
3652   size_t sp = 0;
3653   unsigned i;
3654 
3655   /* We pretend that default definitions are defined in the entry block.
3656      This includes function arguments and the static chain decl.  */
3657   for (i = 1; i < num_ssa_names; ++i)
3658     {
3659       tree name = ssa_name (i);
3660       pre_expr e;
3661       if (!name
3662 	  || !SSA_NAME_IS_DEFAULT_DEF (name)
3663 	  || has_zero_uses (name)
3664 	  || virtual_operand_p (name))
3665 	continue;
3666 
3667       e = get_or_alloc_expr_for_name (name);
3668       add_to_value (get_expr_value_id (e), e);
3669       bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3670       bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3671 				    e);
3672     }
3673 
3674   if (dump_file && (dump_flags & TDF_DETAILS))
3675     {
3676       print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3677 			"tmp_gen", ENTRY_BLOCK);
3678       print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3679 			"avail_out", ENTRY_BLOCK);
3680     }
3681 
3682   /* Allocate the worklist.  */
3683   worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3684 
3685   /* Seed the algorithm by putting the dominator children of the entry
3686      block on the worklist.  */
3687   for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3688        son;
3689        son = next_dom_son (CDI_DOMINATORS, son))
3690     worklist[sp++] = son;
3691 
3692   BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3693     = ssa_default_def (cfun, gimple_vop (cfun));
3694 
3695   /* Loop until the worklist is empty.  */
3696   while (sp)
3697     {
3698       gimple stmt;
3699       basic_block dom;
3700 
3701       /* Pick a block from the worklist.  */
3702       block = worklist[--sp];
3703 
3704       /* Initially, the set of available values in BLOCK is that of
3705 	 its immediate dominator.  */
3706       dom = get_immediate_dominator (CDI_DOMINATORS, block);
3707       if (dom)
3708 	{
3709 	  bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3710 	  BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3711 	}
3712 
3713       /* Generate values for PHI nodes.  */
3714       for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3715 	   gsi_next (&gsi))
3716 	{
3717 	  tree result = gimple_phi_result (gsi.phi ());
3718 
3719 	  /* We have no need for virtual phis, as they don't represent
3720 	     actual computations.  */
3721 	  if (virtual_operand_p (result))
3722 	    {
3723 	      BB_LIVE_VOP_ON_EXIT (block) = result;
3724 	      continue;
3725 	    }
3726 
3727 	  pre_expr e = get_or_alloc_expr_for_name (result);
3728 	  add_to_value (get_expr_value_id (e), e);
3729 	  bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3730 	  bitmap_insert_into_set (PHI_GEN (block), e);
3731 	}
3732 
3733       BB_MAY_NOTRETURN (block) = 0;
3734 
3735       /* Now compute value numbers and populate value sets with all
3736 	 the expressions computed in BLOCK.  */
3737       for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3738 	   gsi_next (&gsi))
3739 	{
3740 	  ssa_op_iter iter;
3741 	  tree op;
3742 
3743 	  stmt = gsi_stmt (gsi);
3744 
3745 	  /* Cache whether the basic-block has any non-visible side-effect
3746 	     or control flow.
3747 	     If this isn't a call or it is the last stmt in the
3748 	     basic-block then the CFG represents things correctly.  */
3749 	  if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3750 	    {
3751 	      /* Non-looping const functions always return normally.
3752 		 Otherwise the call might not return or have side-effects
3753 		 that forbids hoisting possibly trapping expressions
3754 		 before it.  */
3755 	      int flags = gimple_call_flags (stmt);
3756 	      if (!(flags & ECF_CONST)
3757 		  || (flags & ECF_LOOPING_CONST_OR_PURE))
3758 		BB_MAY_NOTRETURN (block) = 1;
3759 	    }
3760 
3761 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3762 	    {
3763 	      pre_expr e = get_or_alloc_expr_for_name (op);
3764 
3765 	      add_to_value (get_expr_value_id (e), e);
3766 	      bitmap_insert_into_set (TMP_GEN (block), e);
3767 	      bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3768 	    }
3769 
3770 	  if (gimple_vdef (stmt))
3771 	    BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3772 
3773 	  if (gimple_has_side_effects (stmt)
3774 	      || stmt_could_throw_p (stmt)
3775 	      || is_gimple_debug (stmt))
3776 	    continue;
3777 
3778 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3779 	    {
3780 	      if (ssa_undefined_value_p (op))
3781 		continue;
3782 	      pre_expr e = get_or_alloc_expr_for_name (op);
3783 	      bitmap_value_insert_into_set (EXP_GEN (block), e);
3784 	    }
3785 
3786 	  switch (gimple_code (stmt))
3787 	    {
3788 	    case GIMPLE_RETURN:
3789 	      continue;
3790 
3791 	    case GIMPLE_CALL:
3792 	      {
3793 		vn_reference_t ref;
3794 		vn_reference_s ref1;
3795 		pre_expr result = NULL;
3796 
3797 		/* We can value number only calls to real functions.  */
3798 		if (gimple_call_internal_p (stmt))
3799 		  continue;
3800 
3801 		vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3802 		if (!ref)
3803 		  continue;
3804 
3805 		/* If the value of the call is not invalidated in
3806 		   this block until it is computed, add the expression
3807 		   to EXP_GEN.  */
3808 		if (!gimple_vuse (stmt)
3809 		    || gimple_code
3810 		         (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3811 		    || gimple_bb (SSA_NAME_DEF_STMT
3812 				    (gimple_vuse (stmt))) != block)
3813 		  {
3814 		    result = (pre_expr) pool_alloc (pre_expr_pool);
3815 		    result->kind = REFERENCE;
3816 		    result->id = 0;
3817 		    PRE_EXPR_REFERENCE (result) = ref;
3818 
3819 		    get_or_alloc_expression_id (result);
3820 		    add_to_value (get_expr_value_id (result), result);
3821 		    bitmap_value_insert_into_set (EXP_GEN (block), result);
3822 		  }
3823 		continue;
3824 	      }
3825 
3826 	    case GIMPLE_ASSIGN:
3827 	      {
3828 		pre_expr result = NULL;
3829 		switch (vn_get_stmt_kind (stmt))
3830 		  {
3831 		  case VN_NARY:
3832 		    {
3833 		      enum tree_code code = gimple_assign_rhs_code (stmt);
3834 		      vn_nary_op_t nary;
3835 
3836 		      /* COND_EXPR and VEC_COND_EXPR are awkward in
3837 			 that they contain an embedded complex expression.
3838 			 Don't even try to shove those through PRE.  */
3839 		      if (code == COND_EXPR
3840 			  || code == VEC_COND_EXPR)
3841 			continue;
3842 
3843 		      vn_nary_op_lookup_stmt (stmt, &nary);
3844 		      if (!nary)
3845 			continue;
3846 
3847 		      /* If the NARY traps and there was a preceding
3848 		         point in the block that might not return avoid
3849 			 adding the nary to EXP_GEN.  */
3850 		      if (BB_MAY_NOTRETURN (block)
3851 			  && vn_nary_may_trap (nary))
3852 			continue;
3853 
3854 		      result = (pre_expr) pool_alloc (pre_expr_pool);
3855 		      result->kind = NARY;
3856 		      result->id = 0;
3857 		      PRE_EXPR_NARY (result) = nary;
3858 		      break;
3859 		    }
3860 
3861 		  case VN_REFERENCE:
3862 		    {
3863 		      vn_reference_t ref;
3864 		      vn_reference_lookup (gimple_assign_rhs1 (stmt),
3865 					   gimple_vuse (stmt),
3866 					   VN_WALK, &ref, true);
3867 		      if (!ref)
3868 			continue;
3869 
3870 		      /* If the value of the reference is not invalidated in
3871 			 this block until it is computed, add the expression
3872 			 to EXP_GEN.  */
3873 		      if (gimple_vuse (stmt))
3874 			{
3875 			  gimple def_stmt;
3876 			  bool ok = true;
3877 			  def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3878 			  while (!gimple_nop_p (def_stmt)
3879 				 && gimple_code (def_stmt) != GIMPLE_PHI
3880 				 && gimple_bb (def_stmt) == block)
3881 			    {
3882 			      if (stmt_may_clobber_ref_p
3883 				    (def_stmt, gimple_assign_rhs1 (stmt)))
3884 				{
3885 				  ok = false;
3886 				  break;
3887 				}
3888 			      def_stmt
3889 				= SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3890 			    }
3891 			  if (!ok)
3892 			    continue;
3893 			}
3894 
3895 		      result = (pre_expr) pool_alloc (pre_expr_pool);
3896 		      result->kind = REFERENCE;
3897 		      result->id = 0;
3898 		      PRE_EXPR_REFERENCE (result) = ref;
3899 		      break;
3900 		    }
3901 
3902 		  default:
3903 		    continue;
3904 		  }
3905 
3906 		get_or_alloc_expression_id (result);
3907 		add_to_value (get_expr_value_id (result), result);
3908 		bitmap_value_insert_into_set (EXP_GEN (block), result);
3909 		continue;
3910 	      }
3911 	    default:
3912 	      break;
3913 	    }
3914 	}
3915 
3916       if (dump_file && (dump_flags & TDF_DETAILS))
3917 	{
3918 	  print_bitmap_set (dump_file, EXP_GEN (block),
3919 			    "exp_gen", block->index);
3920 	  print_bitmap_set (dump_file, PHI_GEN (block),
3921 			    "phi_gen", block->index);
3922 	  print_bitmap_set (dump_file, TMP_GEN (block),
3923 			    "tmp_gen", block->index);
3924 	  print_bitmap_set (dump_file, AVAIL_OUT (block),
3925 			    "avail_out", block->index);
3926 	}
3927 
3928       /* Put the dominator children of BLOCK on the worklist of blocks
3929 	 to compute available sets for.  */
3930       for (son = first_dom_son (CDI_DOMINATORS, block);
3931 	   son;
3932 	   son = next_dom_son (CDI_DOMINATORS, son))
3933 	worklist[sp++] = son;
3934     }
3935 
3936   free (worklist);
3937 }
3938 
3939 
3940 /* Local state for the eliminate domwalk.  */
3941 static vec<gimple> el_to_remove;
3942 static vec<gimple> el_to_fixup;
3943 static unsigned int el_todo;
3944 static vec<tree> el_avail;
3945 static vec<tree> el_avail_stack;
3946 
3947 /* Return a leader for OP that is available at the current point of the
3948    eliminate domwalk.  */
3949 
3950 static tree
3951 eliminate_avail (tree op)
3952 {
3953   tree valnum = VN_INFO (op)->valnum;
3954   if (TREE_CODE (valnum) == SSA_NAME)
3955     {
3956       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
3957 	return valnum;
3958       if (el_avail.length () > SSA_NAME_VERSION (valnum))
3959 	return el_avail[SSA_NAME_VERSION (valnum)];
3960     }
3961   else if (is_gimple_min_invariant (valnum))
3962     return valnum;
3963   return NULL_TREE;
3964 }
3965 
3966 /* At the current point of the eliminate domwalk make OP available.  */
3967 
3968 static void
3969 eliminate_push_avail (tree op)
3970 {
3971   tree valnum = VN_INFO (op)->valnum;
3972   if (TREE_CODE (valnum) == SSA_NAME)
3973     {
3974       if (el_avail.length () <= SSA_NAME_VERSION (valnum))
3975 	el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
3976       tree pushop = op;
3977       if (el_avail[SSA_NAME_VERSION (valnum)])
3978 	pushop = el_avail[SSA_NAME_VERSION (valnum)];
3979       el_avail_stack.safe_push (pushop);
3980       el_avail[SSA_NAME_VERSION (valnum)] = op;
3981     }
3982 }
3983 
3984 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
3985    the leader for the expression if insertion was successful.  */
3986 
3987 static tree
3988 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
3989 {
3990   tree expr = vn_get_expr_for (val);
3991   if (!CONVERT_EXPR_P (expr)
3992       && TREE_CODE (expr) != VIEW_CONVERT_EXPR)
3993     return NULL_TREE;
3994 
3995   tree op = TREE_OPERAND (expr, 0);
3996   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
3997   if (!leader)
3998     return NULL_TREE;
3999 
4000   tree res = make_temp_ssa_name (TREE_TYPE (val), NULL, "pretmp");
4001   gassign *tem = gimple_build_assign (res,
4002 				      fold_build1 (TREE_CODE (expr),
4003 						   TREE_TYPE (expr), leader));
4004   gsi_insert_before (gsi, tem, GSI_SAME_STMT);
4005   VN_INFO_GET (res)->valnum = val;
4006 
4007   if (TREE_CODE (leader) == SSA_NAME)
4008     gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
4009 
4010   pre_stats.insertions++;
4011   if (dump_file && (dump_flags & TDF_DETAILS))
4012     {
4013       fprintf (dump_file, "Inserted ");
4014       print_gimple_stmt (dump_file, tem, 0, 0);
4015     }
4016 
4017   return res;
4018 }
4019 
4020 class eliminate_dom_walker : public dom_walker
4021 {
4022 public:
4023   eliminate_dom_walker (cdi_direction direction, bool do_pre_)
4024       : dom_walker (direction), do_pre (do_pre_) {}
4025 
4026   virtual void before_dom_children (basic_block);
4027   virtual void after_dom_children (basic_block);
4028 
4029   bool do_pre;
4030 };
4031 
4032 /* Perform elimination for the basic-block B during the domwalk.  */
4033 
4034 void
4035 eliminate_dom_walker::before_dom_children (basic_block b)
4036 {
4037   /* Mark new bb.  */
4038   el_avail_stack.safe_push (NULL_TREE);
4039 
4040   /* ???  If we do nothing for unreachable blocks then this will confuse
4041      tailmerging.  Eventually we can reduce its reliance on SCCVN now
4042      that we fully copy/constant-propagate (most) things.  */
4043 
4044   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4045     {
4046       gphi *phi = gsi.phi ();
4047       tree res = PHI_RESULT (phi);
4048 
4049       if (virtual_operand_p (res))
4050 	{
4051 	  gsi_next (&gsi);
4052 	  continue;
4053 	}
4054 
4055       tree sprime = eliminate_avail (res);
4056       if (sprime
4057 	  && sprime != res)
4058 	{
4059 	  if (dump_file && (dump_flags & TDF_DETAILS))
4060 	    {
4061 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
4062 	      print_generic_expr (dump_file, res, 0);
4063 	      fprintf (dump_file, " with ");
4064 	      print_generic_expr (dump_file, sprime, 0);
4065 	      fprintf (dump_file, "\n");
4066 	    }
4067 
4068 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
4069 	  if (inserted_exprs
4070 	      && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4071 	    pre_stats.phis--;
4072 	  else
4073 	    pre_stats.eliminations++;
4074 
4075 	  /* If we will propagate into all uses don't bother to do
4076 	     anything.  */
4077 	  if (may_propagate_copy (res, sprime))
4078 	    {
4079 	      /* Mark the PHI for removal.  */
4080 	      el_to_remove.safe_push (phi);
4081 	      gsi_next (&gsi);
4082 	      continue;
4083 	    }
4084 
4085 	  remove_phi_node (&gsi, false);
4086 
4087 	  if (inserted_exprs
4088 	      && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4089 	      && TREE_CODE (sprime) == SSA_NAME)
4090 	    gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4091 
4092 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4093 	    sprime = fold_convert (TREE_TYPE (res), sprime);
4094 	  gimple stmt = gimple_build_assign (res, sprime);
4095 	  /* ???  It cannot yet be necessary (DOM walk).  */
4096 	  gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4097 
4098 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
4099 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4100 	  continue;
4101 	}
4102 
4103       eliminate_push_avail (res);
4104       gsi_next (&gsi);
4105     }
4106 
4107   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
4108        !gsi_end_p (gsi);
4109        gsi_next (&gsi))
4110     {
4111       tree sprime = NULL_TREE;
4112       gimple stmt = gsi_stmt (gsi);
4113       tree lhs = gimple_get_lhs (stmt);
4114       if (lhs && TREE_CODE (lhs) == SSA_NAME
4115 	  && !gimple_has_volatile_ops (stmt)
4116 	  /* See PR43491.  Do not replace a global register variable when
4117 	     it is a the RHS of an assignment.  Do replace local register
4118 	     variables since gcc does not guarantee a local variable will
4119 	     be allocated in register.
4120 	     ???  The fix isn't effective here.  This should instead
4121 	     be ensured by not value-numbering them the same but treating
4122 	     them like volatiles?  */
4123 	  && !(gimple_assign_single_p (stmt)
4124 	       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4125 		   && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4126 		   && is_global_var (gimple_assign_rhs1 (stmt)))))
4127 	{
4128 	  sprime = eliminate_avail (lhs);
4129 	  if (!sprime)
4130 	    {
4131 	      /* If there is no existing usable leader but SCCVN thinks
4132 		 it has an expression it wants to use as replacement,
4133 		 insert that.  */
4134 	      tree val = VN_INFO (lhs)->valnum;
4135 	      if (val != VN_TOP
4136 		  && TREE_CODE (val) == SSA_NAME
4137 		  && VN_INFO (val)->needs_insertion
4138 		  && VN_INFO (val)->expr != NULL_TREE
4139 		  && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4140 		eliminate_push_avail (sprime);
4141 	    }
4142 
4143 	  /* If this now constitutes a copy duplicate points-to
4144 	     and range info appropriately.  This is especially
4145 	     important for inserted code.  See tree-ssa-copy.c
4146 	     for similar code.  */
4147 	  if (sprime
4148 	      && TREE_CODE (sprime) == SSA_NAME)
4149 	    {
4150 	      basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4151 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
4152 		  && SSA_NAME_PTR_INFO (lhs)
4153 		  && !SSA_NAME_PTR_INFO (sprime))
4154 		{
4155 		  duplicate_ssa_name_ptr_info (sprime,
4156 					       SSA_NAME_PTR_INFO (lhs));
4157 		  if (b != sprime_b)
4158 		    mark_ptr_info_alignment_unknown
4159 			(SSA_NAME_PTR_INFO (sprime));
4160 		}
4161 	      else if (!POINTER_TYPE_P (TREE_TYPE (lhs))
4162 		       && SSA_NAME_RANGE_INFO (lhs)
4163 		       && !SSA_NAME_RANGE_INFO (sprime)
4164 		       && b == sprime_b)
4165 		duplicate_ssa_name_range_info (sprime,
4166 					       SSA_NAME_RANGE_TYPE (lhs),
4167 					       SSA_NAME_RANGE_INFO (lhs));
4168 	    }
4169 
4170 	  /* Inhibit the use of an inserted PHI on a loop header when
4171 	     the address of the memory reference is a simple induction
4172 	     variable.  In other cases the vectorizer won't do anything
4173 	     anyway (either it's loop invariant or a complicated
4174 	     expression).  */
4175 	  if (sprime
4176 	      && TREE_CODE (sprime) == SSA_NAME
4177 	      && do_pre
4178 	      && flag_tree_loop_vectorize
4179 	      && loop_outer (b->loop_father)
4180 	      && has_zero_uses (sprime)
4181 	      && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4182 	      && gimple_assign_load_p (stmt))
4183 	    {
4184 	      gimple def_stmt = SSA_NAME_DEF_STMT (sprime);
4185 	      basic_block def_bb = gimple_bb (def_stmt);
4186 	      if (gimple_code (def_stmt) == GIMPLE_PHI
4187 		  && b->loop_father->header == def_bb)
4188 		{
4189 		  ssa_op_iter iter;
4190 		  tree op;
4191 		  bool found = false;
4192 		  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4193 		    {
4194 		      affine_iv iv;
4195 		      def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4196 		      if (def_bb
4197 			  && flow_bb_inside_loop_p (b->loop_father, def_bb)
4198 			  && simple_iv (b->loop_father,
4199 					b->loop_father, op, &iv, true))
4200 			{
4201 			  found = true;
4202 			  break;
4203 			}
4204 		    }
4205 		  if (found)
4206 		    {
4207 		      if (dump_file && (dump_flags & TDF_DETAILS))
4208 			{
4209 			  fprintf (dump_file, "Not replacing ");
4210 			  print_gimple_expr (dump_file, stmt, 0, 0);
4211 			  fprintf (dump_file, " with ");
4212 			  print_generic_expr (dump_file, sprime, 0);
4213 			  fprintf (dump_file, " which would add a loop"
4214 				   " carried dependence to loop %d\n",
4215 				   b->loop_father->num);
4216 			}
4217 		      /* Don't keep sprime available.  */
4218 		      sprime = NULL_TREE;
4219 		    }
4220 		}
4221 	    }
4222 
4223 	  if (sprime)
4224 	    {
4225 	      /* If we can propagate the value computed for LHS into
4226 		 all uses don't bother doing anything with this stmt.  */
4227 	      if (may_propagate_copy (lhs, sprime))
4228 		{
4229 		  /* Mark it for removal.  */
4230 		  el_to_remove.safe_push (stmt);
4231 
4232 		  /* ???  Don't count copy/constant propagations.  */
4233 		  if (gimple_assign_single_p (stmt)
4234 		      && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4235 			  || gimple_assign_rhs1 (stmt) == sprime))
4236 		    continue;
4237 
4238 		  if (dump_file && (dump_flags & TDF_DETAILS))
4239 		    {
4240 		      fprintf (dump_file, "Replaced ");
4241 		      print_gimple_expr (dump_file, stmt, 0, 0);
4242 		      fprintf (dump_file, " with ");
4243 		      print_generic_expr (dump_file, sprime, 0);
4244 		      fprintf (dump_file, " in all uses of ");
4245 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4246 		    }
4247 
4248 		  pre_stats.eliminations++;
4249 		  continue;
4250 		}
4251 
4252 	      /* If this is an assignment from our leader (which
4253 	         happens in the case the value-number is a constant)
4254 		 then there is nothing to do.  */
4255 	      if (gimple_assign_single_p (stmt)
4256 		  && sprime == gimple_assign_rhs1 (stmt))
4257 		continue;
4258 
4259 	      /* Else replace its RHS.  */
4260 	      bool can_make_abnormal_goto
4261 		  = is_gimple_call (stmt)
4262 		  && stmt_can_make_abnormal_goto (stmt);
4263 
4264 	      if (dump_file && (dump_flags & TDF_DETAILS))
4265 		{
4266 		  fprintf (dump_file, "Replaced ");
4267 		  print_gimple_expr (dump_file, stmt, 0, 0);
4268 		  fprintf (dump_file, " with ");
4269 		  print_generic_expr (dump_file, sprime, 0);
4270 		  fprintf (dump_file, " in ");
4271 		  print_gimple_stmt (dump_file, stmt, 0, 0);
4272 		}
4273 
4274 	      if (TREE_CODE (sprime) == SSA_NAME)
4275 		gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4276 				NECESSARY, true);
4277 
4278 	      pre_stats.eliminations++;
4279 	      gimple orig_stmt = stmt;
4280 	      if (!useless_type_conversion_p (TREE_TYPE (lhs),
4281 					      TREE_TYPE (sprime)))
4282 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
4283 	      tree vdef = gimple_vdef (stmt);
4284 	      tree vuse = gimple_vuse (stmt);
4285 	      propagate_tree_value_into_stmt (&gsi, sprime);
4286 	      stmt = gsi_stmt (gsi);
4287 	      update_stmt (stmt);
4288 	      if (vdef != gimple_vdef (stmt))
4289 		VN_INFO (vdef)->valnum = vuse;
4290 
4291 	      /* If we removed EH side-effects from the statement, clean
4292 		 its EH information.  */
4293 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4294 		{
4295 		  bitmap_set_bit (need_eh_cleanup,
4296 				  gimple_bb (stmt)->index);
4297 		  if (dump_file && (dump_flags & TDF_DETAILS))
4298 		    fprintf (dump_file, "  Removed EH side-effects.\n");
4299 		}
4300 
4301 	      /* Likewise for AB side-effects.  */
4302 	      if (can_make_abnormal_goto
4303 		  && !stmt_can_make_abnormal_goto (stmt))
4304 		{
4305 		  bitmap_set_bit (need_ab_cleanup,
4306 				  gimple_bb (stmt)->index);
4307 		  if (dump_file && (dump_flags & TDF_DETAILS))
4308 		    fprintf (dump_file, "  Removed AB side-effects.\n");
4309 		}
4310 
4311 	      continue;
4312 	    }
4313 	}
4314 
4315       /* If the statement is a scalar store, see if the expression
4316          has the same value number as its rhs.  If so, the store is
4317          dead.  */
4318       if (gimple_assign_single_p (stmt)
4319 	  && !gimple_has_volatile_ops (stmt)
4320 	  && !is_gimple_reg (gimple_assign_lhs (stmt))
4321 	  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4322 	      || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4323 	{
4324 	  tree val;
4325 	  tree rhs = gimple_assign_rhs1 (stmt);
4326 	  vn_reference_t vnresult;
4327 	  val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALK,
4328 				     &vnresult, false);
4329 	  if (TREE_CODE (rhs) == SSA_NAME)
4330 	    rhs = VN_INFO (rhs)->valnum;
4331 	  if (val
4332 	      && operand_equal_p (val, rhs, 0))
4333 	    {
4334 	      /* We can only remove the later store if the former aliases
4335 		 at least all accesses the later one does or if the store
4336 		 was to readonly memory storing the same value.  */
4337 	      alias_set_type set = get_alias_set (lhs);
4338 	      if (! vnresult
4339 		  || vnresult->set == set
4340 		  || alias_set_subset_of (set, vnresult->set))
4341 		{
4342 		  if (dump_file && (dump_flags & TDF_DETAILS))
4343 		    {
4344 		      fprintf (dump_file, "Deleted redundant store ");
4345 		      print_gimple_stmt (dump_file, stmt, 0, 0);
4346 		    }
4347 
4348 		  /* Queue stmt for removal.  */
4349 		  el_to_remove.safe_push (stmt);
4350 		  continue;
4351 		}
4352 	    }
4353         }
4354 
4355       bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
4356       bool was_noreturn = (is_gimple_call (stmt)
4357 			   && gimple_call_noreturn_p (stmt));
4358       tree vdef = gimple_vdef (stmt);
4359       tree vuse = gimple_vuse (stmt);
4360 
4361       /* If we didn't replace the whole stmt (or propagate the result
4362          into all uses), replace all uses on this stmt with their
4363 	 leaders.  */
4364       use_operand_p use_p;
4365       ssa_op_iter iter;
4366       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4367 	{
4368 	  tree use = USE_FROM_PTR (use_p);
4369 	  /* ???  The call code above leaves stmt operands un-updated.  */
4370 	  if (TREE_CODE (use) != SSA_NAME)
4371 	    continue;
4372 	  tree sprime = eliminate_avail (use);
4373 	  if (sprime && sprime != use
4374 	      && may_propagate_copy (use, sprime)
4375 	      /* We substitute into debug stmts to avoid excessive
4376 	         debug temporaries created by removed stmts, but we need
4377 		 to avoid doing so for inserted sprimes as we never want
4378 		 to create debug temporaries for them.  */
4379 	      && (!inserted_exprs
4380 		  || TREE_CODE (sprime) != SSA_NAME
4381 		  || !is_gimple_debug (stmt)
4382 		  || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
4383 	    {
4384 	      propagate_value (use_p, sprime);
4385 	      gimple_set_modified (stmt, true);
4386 	      if (TREE_CODE (sprime) == SSA_NAME
4387 		  && !is_gimple_debug (stmt))
4388 		gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4389 				NECESSARY, true);
4390 	    }
4391 	}
4392 
4393       /* Visit indirect calls and turn them into direct calls if
4394 	 possible using the devirtualization machinery.  */
4395       if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4396 	{
4397 	  tree fn = gimple_call_fn (call_stmt);
4398 	  if (fn
4399 	      && flag_devirtualize
4400 	      && virtual_method_call_p (fn))
4401 	    {
4402 	      tree otr_type = obj_type_ref_class (fn);
4403 	      tree instance;
4404 	      ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance);
4405 	      bool final;
4406 
4407 	      context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt);
4408 
4409 	      vec <cgraph_node *>targets
4410 		= possible_polymorphic_call_targets (obj_type_ref_class (fn),
4411 						     tree_to_uhwi
4412 						       (OBJ_TYPE_REF_TOKEN (fn)),
4413 						     context,
4414 						     &final);
4415 	      if (dump_file)
4416 		dump_possible_polymorphic_call_targets (dump_file,
4417 							obj_type_ref_class (fn),
4418 							tree_to_uhwi
4419 							  (OBJ_TYPE_REF_TOKEN (fn)),
4420 							context);
4421 	      if (final && targets.length () <= 1 && dbg_cnt (devirt))
4422 		{
4423 		  tree fn;
4424 		  if (targets.length () == 1)
4425 		    fn = targets[0]->decl;
4426 		  else
4427 		    fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4428 		  if (dump_enabled_p ())
4429 		    {
4430 		      location_t loc = gimple_location_safe (stmt);
4431 		      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4432 				       "converting indirect call to "
4433 				       "function %s\n",
4434 				       cgraph_node::get (fn)->name ());
4435 		    }
4436 		  gimple_call_set_fndecl (call_stmt, fn);
4437 		  maybe_remove_unused_call_args (cfun, call_stmt);
4438 		  gimple_set_modified (stmt, true);
4439 		}
4440 	    }
4441 	}
4442 
4443       if (gimple_modified_p (stmt))
4444 	{
4445 	  /* If a formerly non-invariant ADDR_EXPR is turned into an
4446 	     invariant one it was on a separate stmt.  */
4447 	  if (gimple_assign_single_p (stmt)
4448 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
4449 	    recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
4450 	  gimple old_stmt = stmt;
4451 	  if (is_gimple_call (stmt))
4452 	    {
4453 	      /* ???  Only fold calls inplace for now, this may create new
4454 		 SSA names which in turn will confuse free_scc_vn SSA name
4455 		 release code.  */
4456 	      fold_stmt_inplace (&gsi);
4457 	      /* When changing a call into a noreturn call, cfg cleanup
4458 		 is needed to fix up the noreturn call.  */
4459 	      if (!was_noreturn && gimple_call_noreturn_p (stmt))
4460 		el_to_fixup.safe_push  (stmt);
4461 	    }
4462 	  else
4463 	    {
4464 	      fold_stmt (&gsi);
4465 	      stmt = gsi_stmt (gsi);
4466 	      if ((gimple_code (stmt) == GIMPLE_COND
4467 		   && (gimple_cond_true_p (as_a <gcond *> (stmt))
4468 		       || gimple_cond_false_p (as_a <gcond *> (stmt))))
4469 		  || (gimple_code (stmt) == GIMPLE_SWITCH
4470 		      && TREE_CODE (gimple_switch_index (
4471 				      as_a <gswitch *> (stmt)))
4472 		         == INTEGER_CST))
4473 		el_todo |= TODO_cleanup_cfg;
4474 	    }
4475 	  /* If we removed EH side-effects from the statement, clean
4476 	     its EH information.  */
4477 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
4478 	    {
4479 	      bitmap_set_bit (need_eh_cleanup,
4480 			      gimple_bb (stmt)->index);
4481 	      if (dump_file && (dump_flags & TDF_DETAILS))
4482 		fprintf (dump_file, "  Removed EH side-effects.\n");
4483 	    }
4484 	  /* Likewise for AB side-effects.  */
4485 	  if (can_make_abnormal_goto
4486 	      && !stmt_can_make_abnormal_goto (stmt))
4487 	    {
4488 	      bitmap_set_bit (need_ab_cleanup,
4489 			      gimple_bb (stmt)->index);
4490 	      if (dump_file && (dump_flags & TDF_DETAILS))
4491 		fprintf (dump_file, "  Removed AB side-effects.\n");
4492 	    }
4493 	  update_stmt (stmt);
4494 	  if (vdef != gimple_vdef (stmt))
4495 	    VN_INFO (vdef)->valnum = vuse;
4496 	}
4497 
4498       /* Make new values available - for fully redundant LHS we
4499          continue with the next stmt above and skip this.  */
4500       def_operand_p defp;
4501       FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4502 	eliminate_push_avail (DEF_FROM_PTR (defp));
4503     }
4504 
4505   /* Replace destination PHI arguments.  */
4506   edge_iterator ei;
4507   edge e;
4508   FOR_EACH_EDGE (e, ei, b->succs)
4509     {
4510       for (gphi_iterator gsi = gsi_start_phis (e->dest);
4511 	   !gsi_end_p (gsi);
4512 	   gsi_next (&gsi))
4513 	{
4514 	  gphi *phi = gsi.phi ();
4515 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4516 	  tree arg = USE_FROM_PTR (use_p);
4517 	  if (TREE_CODE (arg) != SSA_NAME
4518 	      || virtual_operand_p (arg))
4519 	    continue;
4520 	  tree sprime = eliminate_avail (arg);
4521 	  if (sprime && may_propagate_copy (arg, sprime))
4522 	    {
4523 	      propagate_value (use_p, sprime);
4524 	      if (TREE_CODE (sprime) == SSA_NAME)
4525 		gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4526 	    }
4527 	}
4528     }
4529 }
4530 
4531 /* Make no longer available leaders no longer available.  */
4532 
4533 void
4534 eliminate_dom_walker::after_dom_children (basic_block)
4535 {
4536   tree entry;
4537   while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4538     {
4539       tree valnum = VN_INFO (entry)->valnum;
4540       tree old = el_avail[SSA_NAME_VERSION (valnum)];
4541       if (old == entry)
4542 	el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
4543       else
4544 	el_avail[SSA_NAME_VERSION (valnum)] = entry;
4545     }
4546 }
4547 
4548 /* Eliminate fully redundant computations.  */
4549 
4550 static unsigned int
4551 eliminate (bool do_pre)
4552 {
4553   gimple_stmt_iterator gsi;
4554   gimple stmt;
4555 
4556   need_eh_cleanup = BITMAP_ALLOC (NULL);
4557   need_ab_cleanup = BITMAP_ALLOC (NULL);
4558 
4559   el_to_remove.create (0);
4560   el_to_fixup.create (0);
4561   el_todo = 0;
4562   el_avail.create (num_ssa_names);
4563   el_avail_stack.create (0);
4564 
4565   eliminate_dom_walker (CDI_DOMINATORS,
4566 			do_pre).walk (cfun->cfg->x_entry_block_ptr);
4567 
4568   el_avail.release ();
4569   el_avail_stack.release ();
4570 
4571   /* We cannot remove stmts during BB walk, especially not release SSA
4572      names there as this confuses the VN machinery.  The stmts ending
4573      up in el_to_remove are either stores or simple copies.
4574      Remove stmts in reverse order to make debug stmt creation possible.  */
4575   while (!el_to_remove.is_empty ())
4576     {
4577       stmt = el_to_remove.pop ();
4578 
4579       if (dump_file && (dump_flags & TDF_DETAILS))
4580 	{
4581 	  fprintf (dump_file, "Removing dead stmt ");
4582 	  print_gimple_stmt (dump_file, stmt, 0, 0);
4583 	}
4584 
4585       tree lhs;
4586       if (gimple_code (stmt) == GIMPLE_PHI)
4587 	lhs = gimple_phi_result (stmt);
4588       else
4589 	lhs = gimple_get_lhs (stmt);
4590 
4591       if (inserted_exprs
4592 	  && TREE_CODE (lhs) == SSA_NAME)
4593 	bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4594 
4595       gsi = gsi_for_stmt (stmt);
4596       if (gimple_code (stmt) == GIMPLE_PHI)
4597 	remove_phi_node (&gsi, true);
4598       else
4599 	{
4600 	  basic_block bb = gimple_bb (stmt);
4601 	  unlink_stmt_vdef (stmt);
4602 	  if (gsi_remove (&gsi, true))
4603 	    bitmap_set_bit (need_eh_cleanup, bb->index);
4604 	  release_defs (stmt);
4605 	}
4606 
4607       /* Removing a stmt may expose a forwarder block.  */
4608       el_todo |= TODO_cleanup_cfg;
4609     }
4610   el_to_remove.release ();
4611 
4612   /* Fixup stmts that became noreturn calls.  This may require splitting
4613      blocks and thus isn't possible during the dominator walk.  Do this
4614      in reverse order so we don't inadvertedly remove a stmt we want to
4615      fixup by visiting a dominating now noreturn call first.  */
4616   while (!el_to_fixup.is_empty ())
4617     {
4618       stmt = el_to_fixup.pop ();
4619 
4620       if (dump_file && (dump_flags & TDF_DETAILS))
4621 	{
4622 	  fprintf (dump_file, "Fixing up noreturn call ");
4623 	  print_gimple_stmt (dump_file, stmt, 0, 0);
4624 	}
4625 
4626       if (fixup_noreturn_call (stmt))
4627 	el_todo |= TODO_cleanup_cfg;
4628     }
4629   el_to_fixup.release ();
4630 
4631   return el_todo;
4632 }
4633 
4634 /* Perform CFG cleanups made necessary by elimination.  */
4635 
4636 static unsigned
4637 fini_eliminate (void)
4638 {
4639   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4640   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4641 
4642   if (do_eh_cleanup)
4643     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4644 
4645   if (do_ab_cleanup)
4646     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4647 
4648   BITMAP_FREE (need_eh_cleanup);
4649   BITMAP_FREE (need_ab_cleanup);
4650 
4651   if (do_eh_cleanup || do_ab_cleanup)
4652     return TODO_cleanup_cfg;
4653   return 0;
4654 }
4655 
4656 /* Borrow a bit of tree-ssa-dce.c for the moment.
4657    XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4658    this may be a bit faster, and we may want critical edges kept split.  */
4659 
4660 /* If OP's defining statement has not already been determined to be necessary,
4661    mark that statement necessary. Return the stmt, if it is newly
4662    necessary.  */
4663 
4664 static inline gimple
4665 mark_operand_necessary (tree op)
4666 {
4667   gimple stmt;
4668 
4669   gcc_assert (op);
4670 
4671   if (TREE_CODE (op) != SSA_NAME)
4672     return NULL;
4673 
4674   stmt = SSA_NAME_DEF_STMT (op);
4675   gcc_assert (stmt);
4676 
4677   if (gimple_plf (stmt, NECESSARY)
4678       || gimple_nop_p (stmt))
4679     return NULL;
4680 
4681   gimple_set_plf (stmt, NECESSARY, true);
4682   return stmt;
4683 }
4684 
4685 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4686    to insert PHI nodes sometimes, and because value numbering of casts isn't
4687    perfect, we sometimes end up inserting dead code.   This simple DCE-like
4688    pass removes any insertions we made that weren't actually used.  */
4689 
4690 static void
4691 remove_dead_inserted_code (void)
4692 {
4693   bitmap worklist;
4694   unsigned i;
4695   bitmap_iterator bi;
4696   gimple t;
4697 
4698   worklist = BITMAP_ALLOC (NULL);
4699   EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4700     {
4701       t = SSA_NAME_DEF_STMT (ssa_name (i));
4702       if (gimple_plf (t, NECESSARY))
4703 	bitmap_set_bit (worklist, i);
4704     }
4705   while (!bitmap_empty_p (worklist))
4706     {
4707       i = bitmap_first_set_bit (worklist);
4708       bitmap_clear_bit (worklist, i);
4709       t = SSA_NAME_DEF_STMT (ssa_name (i));
4710 
4711       /* PHI nodes are somewhat special in that each PHI alternative has
4712 	 data and control dependencies.  All the statements feeding the
4713 	 PHI node's arguments are always necessary. */
4714       if (gimple_code (t) == GIMPLE_PHI)
4715 	{
4716 	  unsigned k;
4717 
4718 	  for (k = 0; k < gimple_phi_num_args (t); k++)
4719 	    {
4720 	      tree arg = PHI_ARG_DEF (t, k);
4721 	      if (TREE_CODE (arg) == SSA_NAME)
4722 		{
4723 		  gimple n = mark_operand_necessary (arg);
4724 		  if (n)
4725 		    bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4726 		}
4727 	    }
4728 	}
4729       else
4730 	{
4731 	  /* Propagate through the operands.  Examine all the USE, VUSE and
4732 	     VDEF operands in this statement.  Mark all the statements
4733 	     which feed this statement's uses as necessary.  */
4734 	  ssa_op_iter iter;
4735 	  tree use;
4736 
4737 	  /* The operands of VDEF expressions are also needed as they
4738 	     represent potential definitions that may reach this
4739 	     statement (VDEF operands allow us to follow def-def
4740 	     links).  */
4741 
4742 	  FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4743 	    {
4744 	      gimple n = mark_operand_necessary (use);
4745 	      if (n)
4746 		bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4747 	    }
4748 	}
4749     }
4750 
4751   EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4752     {
4753       t = SSA_NAME_DEF_STMT (ssa_name (i));
4754       if (!gimple_plf (t, NECESSARY))
4755 	{
4756 	  gimple_stmt_iterator gsi;
4757 
4758 	  if (dump_file && (dump_flags & TDF_DETAILS))
4759 	    {
4760 	      fprintf (dump_file, "Removing unnecessary insertion:");
4761 	      print_gimple_stmt (dump_file, t, 0, 0);
4762 	    }
4763 
4764 	  gsi = gsi_for_stmt (t);
4765 	  if (gimple_code (t) == GIMPLE_PHI)
4766 	    remove_phi_node (&gsi, true);
4767 	  else
4768 	    {
4769 	      gsi_remove (&gsi, true);
4770 	      release_defs (t);
4771 	    }
4772 	}
4773     }
4774   BITMAP_FREE (worklist);
4775 }
4776 
4777 
4778 /* Initialize data structures used by PRE.  */
4779 
4780 static void
4781 init_pre (void)
4782 {
4783   basic_block bb;
4784 
4785   next_expression_id = 1;
4786   expressions.create (0);
4787   expressions.safe_push (NULL);
4788   value_expressions.create (get_max_value_id () + 1);
4789   value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4790   name_to_id.create (0);
4791 
4792   inserted_exprs = BITMAP_ALLOC (NULL);
4793 
4794   connect_infinite_loops_to_exit ();
4795   memset (&pre_stats, 0, sizeof (pre_stats));
4796 
4797   /* For ANTIC computation we need a postorder that also guarantees that
4798      a block with a single successor is visited after its successor.
4799      RPO on the inverted CFG has this property.  */
4800   postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
4801   postorder_num = inverted_post_order_compute (postorder);
4802 
4803   alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4804 
4805   calculate_dominance_info (CDI_DOMINATORS);
4806 
4807   bitmap_obstack_initialize (&grand_bitmap_obstack);
4808   phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4809   expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4810   bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4811 				       sizeof (struct bitmap_set), 30);
4812   pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4813 				     sizeof (struct pre_expr_d), 30);
4814   FOR_ALL_BB_FN (bb, cfun)
4815     {
4816       EXP_GEN (bb) = bitmap_set_new ();
4817       PHI_GEN (bb) = bitmap_set_new ();
4818       TMP_GEN (bb) = bitmap_set_new ();
4819       AVAIL_OUT (bb) = bitmap_set_new ();
4820     }
4821 }
4822 
4823 
4824 /* Deallocate data structures used by PRE.  */
4825 
4826 static void
4827 fini_pre ()
4828 {
4829   free (postorder);
4830   value_expressions.release ();
4831   BITMAP_FREE (inserted_exprs);
4832   bitmap_obstack_release (&grand_bitmap_obstack);
4833   free_alloc_pool (bitmap_set_pool);
4834   free_alloc_pool (pre_expr_pool);
4835   delete phi_translate_table;
4836   phi_translate_table = NULL;
4837   delete expression_to_id;
4838   expression_to_id = NULL;
4839   name_to_id.release ();
4840 
4841   free_aux_for_blocks ();
4842 }
4843 
4844 namespace {
4845 
4846 const pass_data pass_data_pre =
4847 {
4848   GIMPLE_PASS, /* type */
4849   "pre", /* name */
4850   OPTGROUP_NONE, /* optinfo_flags */
4851   TV_TREE_PRE, /* tv_id */
4852   /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4853      pass_pre.  */
4854   ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
4855   0, /* properties_provided */
4856   PROP_no_crit_edges, /* properties_destroyed */
4857   TODO_rebuild_alias, /* todo_flags_start */
4858   0, /* todo_flags_finish */
4859 };
4860 
4861 class pass_pre : public gimple_opt_pass
4862 {
4863 public:
4864   pass_pre (gcc::context *ctxt)
4865     : gimple_opt_pass (pass_data_pre, ctxt)
4866   {}
4867 
4868   /* opt_pass methods: */
4869   virtual bool gate (function *) { return flag_tree_pre != 0; }
4870   virtual unsigned int execute (function *);
4871 
4872 }; // class pass_pre
4873 
4874 unsigned int
4875 pass_pre::execute (function *fun)
4876 {
4877   unsigned int todo = 0;
4878 
4879   do_partial_partial =
4880     flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4881 
4882   /* This has to happen before SCCVN runs because
4883      loop_optimizer_init may create new phis, etc.  */
4884   loop_optimizer_init (LOOPS_NORMAL);
4885 
4886   if (!run_scc_vn (VN_WALK))
4887     {
4888       loop_optimizer_finalize ();
4889       return 0;
4890     }
4891 
4892   init_pre ();
4893   scev_initialize ();
4894 
4895   /* Collect and value number expressions computed in each basic block.  */
4896   compute_avail ();
4897 
4898   /* Insert can get quite slow on an incredibly large number of basic
4899      blocks due to some quadratic behavior.  Until this behavior is
4900      fixed, don't run it when he have an incredibly large number of
4901      bb's.  If we aren't going to run insert, there is no point in
4902      computing ANTIC, either, even though it's plenty fast.  */
4903   if (n_basic_blocks_for_fn (fun) < 4000)
4904     {
4905       compute_antic ();
4906       insert ();
4907     }
4908 
4909   /* Make sure to remove fake edges before committing our inserts.
4910      This makes sure we don't end up with extra critical edges that
4911      we would need to split.  */
4912   remove_fake_exit_edges ();
4913   gsi_commit_edge_inserts ();
4914 
4915   /* Eliminate folds statements which might (should not...) end up
4916      not keeping virtual operands up-to-date.  */
4917   gcc_assert (!need_ssa_update_p (fun));
4918 
4919   /* Remove all the redundant expressions.  */
4920   todo |= eliminate (true);
4921 
4922   statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4923   statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4924   statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4925   statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4926 
4927   clear_expression_ids ();
4928   remove_dead_inserted_code ();
4929 
4930   scev_finalize ();
4931   fini_pre ();
4932   todo |= fini_eliminate ();
4933   loop_optimizer_finalize ();
4934 
4935   /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4936      case we can merge the block with the remaining predecessor of the block.
4937      It should either:
4938      - call merge_blocks after each tail merge iteration
4939      - call merge_blocks after all tail merge iterations
4940      - mark TODO_cleanup_cfg when necessary
4941      - share the cfg cleanup with fini_pre.  */
4942   todo |= tail_merge_optimize (todo);
4943 
4944   free_scc_vn ();
4945 
4946   /* Tail merging invalidates the virtual SSA web, together with
4947      cfg-cleanup opportunities exposed by PRE this will wreck the
4948      SSA updating machinery.  So make sure to run update-ssa
4949      manually, before eventually scheduling cfg-cleanup as part of
4950      the todo.  */
4951   update_ssa (TODO_update_ssa_only_virtuals);
4952 
4953   return todo;
4954 }
4955 
4956 } // anon namespace
4957 
4958 gimple_opt_pass *
4959 make_pass_pre (gcc::context *ctxt)
4960 {
4961   return new pass_pre (ctxt);
4962 }
4963 
4964 namespace {
4965 
4966 const pass_data pass_data_fre =
4967 {
4968   GIMPLE_PASS, /* type */
4969   "fre", /* name */
4970   OPTGROUP_NONE, /* optinfo_flags */
4971   TV_TREE_FRE, /* tv_id */
4972   ( PROP_cfg | PROP_ssa ), /* properties_required */
4973   0, /* properties_provided */
4974   0, /* properties_destroyed */
4975   0, /* todo_flags_start */
4976   0, /* todo_flags_finish */
4977 };
4978 
4979 class pass_fre : public gimple_opt_pass
4980 {
4981 public:
4982   pass_fre (gcc::context *ctxt)
4983     : gimple_opt_pass (pass_data_fre, ctxt)
4984   {}
4985 
4986   /* opt_pass methods: */
4987   opt_pass * clone () { return new pass_fre (m_ctxt); }
4988   virtual bool gate (function *) { return flag_tree_fre != 0; }
4989   virtual unsigned int execute (function *);
4990 
4991 }; // class pass_fre
4992 
4993 unsigned int
4994 pass_fre::execute (function *fun)
4995 {
4996   unsigned int todo = 0;
4997 
4998   if (!run_scc_vn (VN_WALKREWRITE))
4999     return 0;
5000 
5001   memset (&pre_stats, 0, sizeof (pre_stats));
5002 
5003   /* Remove all the redundant expressions.  */
5004   todo |= eliminate (false);
5005 
5006   todo |= fini_eliminate ();
5007 
5008   free_scc_vn ();
5009 
5010   statistics_counter_event (fun, "Insertions", pre_stats.insertions);
5011   statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
5012 
5013   return todo;
5014 }
5015 
5016 } // anon namespace
5017 
5018 gimple_opt_pass *
5019 make_pass_fre (gcc::context *ctxt)
5020 {
5021   return new pass_fre (ctxt);
5022 }
5023