xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-sccvn.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* SCC value numbering for trees
2    Copyright (C) 2006-2019 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
74 
75 /* This algorithm is based on the SCC algorithm presented by Keith
76    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77    (http://citeseer.ist.psu.edu/41805.html).  In
78    straight line code, it is equivalent to a regular hash based value
79    numbering that is performed in reverse postorder.
80 
81    For code with cycles, there are two alternatives, both of which
82    require keeping the hashtables separate from the actual list of
83    value numbers for SSA names.
84 
85    1. Iterate value numbering in an RPO walk of the blocks, removing
86    all the entries from the hashtable after each iteration (but
87    keeping the SSA name->value number mapping between iterations).
88    Iterate until it does not change.
89 
90    2. Perform value numbering as part of an SCC walk on the SSA graph,
91    iterating only the cycles in the SSA graph until they do not change
92    (using a separate, optimistic hashtable for value numbering the SCC
93    operands).
94 
95    The second is not just faster in practice (because most SSA graph
96    cycles do not involve all the variables in the graph), it also has
97    some nice properties.
98 
99    One of these nice properties is that when we pop an SCC off the
100    stack, we are guaranteed to have processed all the operands coming from
101    *outside of that SCC*, so we do not need to do anything special to
102    ensure they have value numbers.
103 
104    Another nice property is that the SCC walk is done as part of a DFS
105    of the SSA graph, which makes it easy to perform combining and
106    simplifying operations at the same time.
107 
108    The code below is deliberately written in a way that makes it easy
109    to separate the SCC walk from the other work it does.
110 
111    In order to propagate constants through the code, we track which
112    expressions contain constants, and use those while folding.  In
113    theory, we could also track expressions whose value numbers are
114    replaced, in case we end up folding based on expression
115    identities.
116 
117    In order to value number memory, we assign value numbers to vuses.
118    This enables us to note that, for example, stores to the same
119    address of the same value from the same starting memory states are
120    equivalent.
121    TODO:
122 
123    1. We can iterate only the changing portions of the SCC's, but
124    I have not seen an SCC big enough for this to be a win.
125    2. If you differentiate between phi nodes for loops and phi nodes
126    for if-then-else, you can properly consider phi nodes in different
127    blocks for equivalence.
128    3. We could value number vuses in more cases, particularly, whole
129    structure copies.
130 */
131 
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
133 #define BB_EXECUTABLE BB_VISITED
134 
135 static vn_lookup_kind default_vn_walk_kind;
136 
137 /* vn_nary_op hashtable helpers.  */
138 
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141   typedef vn_nary_op_s *compare_type;
142   static inline hashval_t hash (const vn_nary_op_s *);
143   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145 
146 /* Return the computed hashcode for nary operation P1.  */
147 
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151   return vno1->hashcode;
152 }
153 
154 /* Compare nary operations P1 and P2 and return true if they are
155    equivalent.  */
156 
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160   return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
161 }
162 
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165 
166 
167 /* vn_phi hashtable helpers.  */
168 
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173 {
174   static inline hashval_t hash (const vn_phi_s *);
175   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 };
177 
178 /* Return the computed hashcode for phi operation P1.  */
179 
180 inline hashval_t
181 vn_phi_hasher::hash (const vn_phi_s *vp1)
182 {
183   return vp1->hashcode;
184 }
185 
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
187 
188 inline bool
189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190 {
191   return vp1 == vp2 || vn_phi_eq (vp1, vp2);
192 }
193 
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 
197 
198 /* Compare two reference operands P1 and P2 for equality.  Return true if
199    they are equal, and false otherwise.  */
200 
201 static int
202 vn_reference_op_eq (const void *p1, const void *p2)
203 {
204   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 
207   return (vro1->opcode == vro2->opcode
208 	  /* We do not care for differences in type qualification.  */
209 	  && (vro1->type == vro2->type
210 	      || (vro1->type && vro2->type
211 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 					 TYPE_MAIN_VARIANT (vro2->type))))
213 	  && expressions_equal_p (vro1->op0, vro2->op0)
214 	  && expressions_equal_p (vro1->op1, vro2->op1)
215 	  && expressions_equal_p (vro1->op2, vro2->op2));
216 }
217 
218 /* Free a reference operation structure VP.  */
219 
220 static inline void
221 free_reference (vn_reference_s *vr)
222 {
223   vr->operands.release ();
224 }
225 
226 
227 /* vn_reference hashtable helpers.  */
228 
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230 {
231   static inline hashval_t hash (const vn_reference_s *);
232   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
233 };
234 
235 /* Return the hashcode for a given reference operation P1.  */
236 
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 {
240   return vr1->hashcode;
241 }
242 
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 {
246   return v == c || vn_reference_eq (v, c);
247 }
248 
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
251 
252 
253 /* The set of VN hashtables.  */
254 
255 typedef struct vn_tables_s
256 {
257   vn_nary_op_table_type *nary;
258   vn_phi_table_type *phis;
259   vn_reference_table_type *references;
260 } *vn_tables_t;
261 
262 
263 /* vn_constant hashtable helpers.  */
264 
265 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
266 {
267   static inline hashval_t hash (const vn_constant_s *);
268   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
269 };
270 
271 /* Hash table hash function for vn_constant_t.  */
272 
273 inline hashval_t
274 vn_constant_hasher::hash (const vn_constant_s *vc1)
275 {
276   return vc1->hashcode;
277 }
278 
279 /* Hash table equality function for vn_constant_t.  */
280 
281 inline bool
282 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
283 {
284   if (vc1->hashcode != vc2->hashcode)
285     return false;
286 
287   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
288 }
289 
290 static hash_table<vn_constant_hasher> *constant_to_value_id;
291 static bitmap constant_value_ids;
292 
293 
294 /* Obstack we allocate the vn-tables elements from.  */
295 static obstack vn_tables_obstack;
296 /* Special obstack we never unwind.  */
297 static obstack vn_tables_insert_obstack;
298 
299 static vn_reference_t last_inserted_ref;
300 static vn_phi_t last_inserted_phi;
301 static vn_nary_op_t last_inserted_nary;
302 
303 /* Valid hashtables storing information we have proven to be
304    correct.  */
305 static vn_tables_t valid_info;
306 
307 
308 /* Valueization hook.  Valueize NAME if it is an SSA name, otherwise
309    just return it.  */
310 tree (*vn_valueize) (tree);
311 
312 
313 /* This represents the top of the VN lattice, which is the universal
314    value.  */
315 
316 tree VN_TOP;
317 
318 /* Unique counter for our value ids.  */
319 
320 static unsigned int next_value_id;
321 
322 
323 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
324    are allocated on an obstack for locality reasons, and to free them
325    without looping over the vec.  */
326 
327 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
328 {
329   typedef vn_ssa_aux_t value_type;
330   typedef tree compare_type;
331   static inline hashval_t hash (const value_type &);
332   static inline bool equal (const value_type &, const compare_type &);
333   static inline void mark_deleted (value_type &) {}
334   static inline void mark_empty (value_type &e) { e = NULL; }
335   static inline bool is_deleted (value_type &) { return false; }
336   static inline bool is_empty (value_type &e) { return e == NULL; }
337 };
338 
339 hashval_t
340 vn_ssa_aux_hasher::hash (const value_type &entry)
341 {
342   return SSA_NAME_VERSION (entry->name);
343 }
344 
345 bool
346 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
347 {
348   return name == entry->name;
349 }
350 
351 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
352 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
353 static struct obstack vn_ssa_aux_obstack;
354 
355 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
356 static unsigned int vn_nary_length_from_stmt (gimple *);
357 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
358 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
359 					    vn_nary_op_table_type *, bool);
360 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
361 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
362 					 enum tree_code, tree, tree *);
363 static tree vn_lookup_simplify_result (gimple_match_op *);
364 
365 /* Return whether there is value numbering information for a given SSA name.  */
366 
367 bool
368 has_VN_INFO (tree name)
369 {
370   return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
371 }
372 
373 vn_ssa_aux_t
374 VN_INFO (tree name)
375 {
376   vn_ssa_aux_t *res
377     = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
378 					    INSERT);
379   if (*res != NULL)
380     return *res;
381 
382   vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
383   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
384   newinfo->name = name;
385   newinfo->valnum = VN_TOP;
386   /* We are using the visited flag to handle uses with defs not within the
387      region being value-numbered.  */
388   newinfo->visited = false;
389 
390   /* Given we create the VN_INFOs on-demand now we have to do initialization
391      different than VN_TOP here.  */
392   if (SSA_NAME_IS_DEFAULT_DEF (name))
393     switch (TREE_CODE (SSA_NAME_VAR (name)))
394       {
395       case VAR_DECL:
396         /* All undefined vars are VARYING.  */
397         newinfo->valnum = name;
398 	newinfo->visited = true;
399 	break;
400 
401       case PARM_DECL:
402 	/* Parameters are VARYING but we can record a condition
403 	   if we know it is a non-NULL pointer.  */
404 	newinfo->visited = true;
405 	newinfo->valnum = name;
406 	if (POINTER_TYPE_P (TREE_TYPE (name))
407 	    && nonnull_arg_p (SSA_NAME_VAR (name)))
408 	  {
409 	    tree ops[2];
410 	    ops[0] = name;
411 	    ops[1] = build_int_cst (TREE_TYPE (name), 0);
412 	    vn_nary_op_t nary;
413 	    /* Allocate from non-unwinding stack.  */
414 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
415 	    init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
416 					 boolean_type_node, ops);
417 	    nary->predicated_values = 0;
418 	    nary->u.result = boolean_true_node;
419 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
420 	    gcc_assert (nary->unwind_to == NULL);
421 	    /* Also do not link it into the undo chain.  */
422 	    last_inserted_nary = nary->next;
423 	    nary->next = (vn_nary_op_t)(void *)-1;
424 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
425 	    init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
426 					 boolean_type_node, ops);
427 	    nary->predicated_values = 0;
428 	    nary->u.result = boolean_false_node;
429 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
430 	    gcc_assert (nary->unwind_to == NULL);
431 	    last_inserted_nary = nary->next;
432 	    nary->next = (vn_nary_op_t)(void *)-1;
433 	    if (dump_file && (dump_flags & TDF_DETAILS))
434 	      {
435 		fprintf (dump_file, "Recording ");
436 		print_generic_expr (dump_file, name, TDF_SLIM);
437 		fprintf (dump_file, " != 0\n");
438 	      }
439 	  }
440 	break;
441 
442       case RESULT_DECL:
443 	/* If the result is passed by invisible reference the default
444 	   def is initialized, otherwise it's uninitialized.  Still
445 	   undefined is varying.  */
446 	newinfo->visited = true;
447 	newinfo->valnum = name;
448 	break;
449 
450       default:
451 	gcc_unreachable ();
452       }
453   return newinfo;
454 }
455 
456 /* Return the SSA value of X.  */
457 
458 inline tree
459 SSA_VAL (tree x, bool *visited = NULL)
460 {
461   vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
462   if (visited)
463     *visited = tem && tem->visited;
464   return tem && tem->visited ? tem->valnum : x;
465 }
466 
467 /* Return the SSA value of the VUSE x, supporting released VDEFs
468    during elimination which will value-number the VDEF to the
469    associated VUSE (but not substitute in the whole lattice).  */
470 
471 static inline tree
472 vuse_ssa_val (tree x)
473 {
474   if (!x)
475     return NULL_TREE;
476 
477   do
478     {
479       x = SSA_VAL (x);
480       gcc_assert (x != VN_TOP);
481     }
482   while (SSA_NAME_IN_FREE_LIST (x));
483 
484   return x;
485 }
486 
487 /* Similar to the above but used as callback for walk_non_aliases_vuses
488    and thus should stop at unvisited VUSE to not walk across region
489    boundaries.  */
490 
491 static tree
492 vuse_valueize (tree vuse)
493 {
494   do
495     {
496       bool visited;
497       vuse = SSA_VAL (vuse, &visited);
498       if (!visited)
499 	return NULL_TREE;
500       gcc_assert (vuse != VN_TOP);
501     }
502   while (SSA_NAME_IN_FREE_LIST (vuse));
503   return vuse;
504 }
505 
506 
507 /* Return the vn_kind the expression computed by the stmt should be
508    associated with.  */
509 
510 enum vn_kind
511 vn_get_stmt_kind (gimple *stmt)
512 {
513   switch (gimple_code (stmt))
514     {
515     case GIMPLE_CALL:
516       return VN_REFERENCE;
517     case GIMPLE_PHI:
518       return VN_PHI;
519     case GIMPLE_ASSIGN:
520       {
521 	enum tree_code code = gimple_assign_rhs_code (stmt);
522 	tree rhs1 = gimple_assign_rhs1 (stmt);
523 	switch (get_gimple_rhs_class (code))
524 	  {
525 	  case GIMPLE_UNARY_RHS:
526 	  case GIMPLE_BINARY_RHS:
527 	  case GIMPLE_TERNARY_RHS:
528 	    return VN_NARY;
529 	  case GIMPLE_SINGLE_RHS:
530 	    switch (TREE_CODE_CLASS (code))
531 	      {
532 	      case tcc_reference:
533 		/* VOP-less references can go through unary case.  */
534 		if ((code == REALPART_EXPR
535 		     || code == IMAGPART_EXPR
536 		     || code == VIEW_CONVERT_EXPR
537 		     || code == BIT_FIELD_REF)
538 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
539 		  return VN_NARY;
540 
541 		/* Fallthrough.  */
542 	      case tcc_declaration:
543 		return VN_REFERENCE;
544 
545 	      case tcc_constant:
546 		return VN_CONSTANT;
547 
548 	      default:
549 		if (code == ADDR_EXPR)
550 		  return (is_gimple_min_invariant (rhs1)
551 			  ? VN_CONSTANT : VN_REFERENCE);
552 		else if (code == CONSTRUCTOR)
553 		  return VN_NARY;
554 		return VN_NONE;
555 	      }
556 	  default:
557 	    return VN_NONE;
558 	  }
559       }
560     default:
561       return VN_NONE;
562     }
563 }
564 
565 /* Lookup a value id for CONSTANT and return it.  If it does not
566    exist returns 0.  */
567 
568 unsigned int
569 get_constant_value_id (tree constant)
570 {
571   vn_constant_s **slot;
572   struct vn_constant_s vc;
573 
574   vc.hashcode = vn_hash_constant_with_type (constant);
575   vc.constant = constant;
576   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
577   if (slot)
578     return (*slot)->value_id;
579   return 0;
580 }
581 
582 /* Lookup a value id for CONSTANT, and if it does not exist, create a
583    new one and return it.  If it does exist, return it.  */
584 
585 unsigned int
586 get_or_alloc_constant_value_id (tree constant)
587 {
588   vn_constant_s **slot;
589   struct vn_constant_s vc;
590   vn_constant_t vcp;
591 
592   /* If the hashtable isn't initialized we're not running from PRE and thus
593      do not need value-ids.  */
594   if (!constant_to_value_id)
595     return 0;
596 
597   vc.hashcode = vn_hash_constant_with_type (constant);
598   vc.constant = constant;
599   slot = constant_to_value_id->find_slot (&vc, INSERT);
600   if (*slot)
601     return (*slot)->value_id;
602 
603   vcp = XNEW (struct vn_constant_s);
604   vcp->hashcode = vc.hashcode;
605   vcp->constant = constant;
606   vcp->value_id = get_next_value_id ();
607   *slot = vcp;
608   bitmap_set_bit (constant_value_ids, vcp->value_id);
609   return vcp->value_id;
610 }
611 
612 /* Return true if V is a value id for a constant.  */
613 
614 bool
615 value_id_constant_p (unsigned int v)
616 {
617   return bitmap_bit_p (constant_value_ids, v);
618 }
619 
620 /* Compute the hash for a reference operand VRO1.  */
621 
622 static void
623 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
624 {
625   hstate.add_int (vro1->opcode);
626   if (vro1->op0)
627     inchash::add_expr (vro1->op0, hstate);
628   if (vro1->op1)
629     inchash::add_expr (vro1->op1, hstate);
630   if (vro1->op2)
631     inchash::add_expr (vro1->op2, hstate);
632 }
633 
634 /* Compute a hash for the reference operation VR1 and return it.  */
635 
636 static hashval_t
637 vn_reference_compute_hash (const vn_reference_t vr1)
638 {
639   inchash::hash hstate;
640   hashval_t result;
641   int i;
642   vn_reference_op_t vro;
643   poly_int64 off = -1;
644   bool deref = false;
645 
646   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
647     {
648       if (vro->opcode == MEM_REF)
649 	deref = true;
650       else if (vro->opcode != ADDR_EXPR)
651 	deref = false;
652       if (maybe_ne (vro->off, -1))
653 	{
654 	  if (known_eq (off, -1))
655 	    off = 0;
656 	  off += vro->off;
657 	}
658       else
659 	{
660 	  if (maybe_ne (off, -1)
661 	      && maybe_ne (off, 0))
662 	    hstate.add_poly_int (off);
663 	  off = -1;
664 	  if (deref
665 	      && vro->opcode == ADDR_EXPR)
666 	    {
667 	      if (vro->op0)
668 		{
669 		  tree op = TREE_OPERAND (vro->op0, 0);
670 		  hstate.add_int (TREE_CODE (op));
671 		  inchash::add_expr (op, hstate);
672 		}
673 	    }
674 	  else
675 	    vn_reference_op_compute_hash (vro, hstate);
676 	}
677     }
678   result = hstate.end ();
679   /* ??? We would ICE later if we hash instead of adding that in. */
680   if (vr1->vuse)
681     result += SSA_NAME_VERSION (vr1->vuse);
682 
683   return result;
684 }
685 
686 /* Return true if reference operations VR1 and VR2 are equivalent.  This
687    means they have the same set of operands and vuses.  */
688 
689 bool
690 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
691 {
692   unsigned i, j;
693 
694   /* Early out if this is not a hash collision.  */
695   if (vr1->hashcode != vr2->hashcode)
696     return false;
697 
698   /* The VOP needs to be the same.  */
699   if (vr1->vuse != vr2->vuse)
700     return false;
701 
702   /* If the operands are the same we are done.  */
703   if (vr1->operands == vr2->operands)
704     return true;
705 
706   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
707     return false;
708 
709   if (INTEGRAL_TYPE_P (vr1->type)
710       && INTEGRAL_TYPE_P (vr2->type))
711     {
712       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
713 	return false;
714     }
715   else if (INTEGRAL_TYPE_P (vr1->type)
716 	   && (TYPE_PRECISION (vr1->type)
717 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
718     return false;
719   else if (INTEGRAL_TYPE_P (vr2->type)
720 	   && (TYPE_PRECISION (vr2->type)
721 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
722     return false;
723 
724   i = 0;
725   j = 0;
726   do
727     {
728       poly_int64 off1 = 0, off2 = 0;
729       vn_reference_op_t vro1, vro2;
730       vn_reference_op_s tem1, tem2;
731       bool deref1 = false, deref2 = false;
732       for (; vr1->operands.iterate (i, &vro1); i++)
733 	{
734 	  if (vro1->opcode == MEM_REF)
735 	    deref1 = true;
736 	  /* Do not look through a storage order barrier.  */
737 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
738 	    return false;
739 	  if (known_eq (vro1->off, -1))
740 	    break;
741 	  off1 += vro1->off;
742 	}
743       for (; vr2->operands.iterate (j, &vro2); j++)
744 	{
745 	  if (vro2->opcode == MEM_REF)
746 	    deref2 = true;
747 	  /* Do not look through a storage order barrier.  */
748 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
749 	    return false;
750 	  if (known_eq (vro2->off, -1))
751 	    break;
752 	  off2 += vro2->off;
753 	}
754       if (maybe_ne (off1, off2))
755 	return false;
756       if (deref1 && vro1->opcode == ADDR_EXPR)
757 	{
758 	  memset (&tem1, 0, sizeof (tem1));
759 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
760 	  tem1.type = TREE_TYPE (tem1.op0);
761 	  tem1.opcode = TREE_CODE (tem1.op0);
762 	  vro1 = &tem1;
763 	  deref1 = false;
764 	}
765       if (deref2 && vro2->opcode == ADDR_EXPR)
766 	{
767 	  memset (&tem2, 0, sizeof (tem2));
768 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
769 	  tem2.type = TREE_TYPE (tem2.op0);
770 	  tem2.opcode = TREE_CODE (tem2.op0);
771 	  vro2 = &tem2;
772 	  deref2 = false;
773 	}
774       if (deref1 != deref2)
775 	return false;
776       if (!vn_reference_op_eq (vro1, vro2))
777 	return false;
778       ++j;
779       ++i;
780     }
781   while (vr1->operands.length () != i
782 	 || vr2->operands.length () != j);
783 
784   return true;
785 }
786 
787 /* Copy the operations present in load/store REF into RESULT, a vector of
788    vn_reference_op_s's.  */
789 
790 static void
791 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
792 {
793   if (TREE_CODE (ref) == TARGET_MEM_REF)
794     {
795       vn_reference_op_s temp;
796 
797       result->reserve (3);
798 
799       memset (&temp, 0, sizeof (temp));
800       temp.type = TREE_TYPE (ref);
801       temp.opcode = TREE_CODE (ref);
802       temp.op0 = TMR_INDEX (ref);
803       temp.op1 = TMR_STEP (ref);
804       temp.op2 = TMR_OFFSET (ref);
805       temp.off = -1;
806       temp.clique = MR_DEPENDENCE_CLIQUE (ref);
807       temp.base = MR_DEPENDENCE_BASE (ref);
808       result->quick_push (temp);
809 
810       memset (&temp, 0, sizeof (temp));
811       temp.type = NULL_TREE;
812       temp.opcode = ERROR_MARK;
813       temp.op0 = TMR_INDEX2 (ref);
814       temp.off = -1;
815       result->quick_push (temp);
816 
817       memset (&temp, 0, sizeof (temp));
818       temp.type = NULL_TREE;
819       temp.opcode = TREE_CODE (TMR_BASE (ref));
820       temp.op0 = TMR_BASE (ref);
821       temp.off = -1;
822       result->quick_push (temp);
823       return;
824     }
825 
826   /* For non-calls, store the information that makes up the address.  */
827   tree orig = ref;
828   while (ref)
829     {
830       vn_reference_op_s temp;
831 
832       memset (&temp, 0, sizeof (temp));
833       temp.type = TREE_TYPE (ref);
834       temp.opcode = TREE_CODE (ref);
835       temp.off = -1;
836 
837       switch (temp.opcode)
838 	{
839 	case MODIFY_EXPR:
840 	  temp.op0 = TREE_OPERAND (ref, 1);
841 	  break;
842 	case WITH_SIZE_EXPR:
843 	  temp.op0 = TREE_OPERAND (ref, 1);
844 	  temp.off = 0;
845 	  break;
846 	case MEM_REF:
847 	  /* The base address gets its own vn_reference_op_s structure.  */
848 	  temp.op0 = TREE_OPERAND (ref, 1);
849 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
850 	    temp.off = -1;
851 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
852 	  temp.base = MR_DEPENDENCE_BASE (ref);
853 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
854 	  break;
855 	case BIT_FIELD_REF:
856 	  /* Record bits, position and storage order.  */
857 	  temp.op0 = TREE_OPERAND (ref, 1);
858 	  temp.op1 = TREE_OPERAND (ref, 2);
859 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
860 	    temp.off = -1;
861 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
862 	  break;
863 	case COMPONENT_REF:
864 	  /* The field decl is enough to unambiguously specify the field,
865 	     a matching type is not necessary and a mismatching type
866 	     is always a spurious difference.  */
867 	  temp.type = NULL_TREE;
868 	  temp.op0 = TREE_OPERAND (ref, 1);
869 	  temp.op1 = TREE_OPERAND (ref, 2);
870 	  {
871 	    tree this_offset = component_ref_field_offset (ref);
872 	    if (this_offset
873 		&& poly_int_tree_p (this_offset))
874 	      {
875 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
876 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
877 		  {
878 		    poly_offset_int off
879 		      = (wi::to_poly_offset (this_offset)
880 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
881 		    /* Probibit value-numbering zero offset components
882 		       of addresses the same before the pass folding
883 		       __builtin_object_size had a chance to run
884 		       (checking cfun->after_inlining does the
885 		       trick here).  */
886 		    if (TREE_CODE (orig) != ADDR_EXPR
887 			|| maybe_ne (off, 0)
888 			|| cfun->after_inlining)
889 		      off.to_shwi (&temp.off);
890 		  }
891 	      }
892 	  }
893 	  break;
894 	case ARRAY_RANGE_REF:
895 	case ARRAY_REF:
896 	  {
897 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
898 	    /* Record index as operand.  */
899 	    temp.op0 = TREE_OPERAND (ref, 1);
900 	    /* Always record lower bounds and element size.  */
901 	    temp.op1 = array_ref_low_bound (ref);
902 	    /* But record element size in units of the type alignment.  */
903 	    temp.op2 = TREE_OPERAND (ref, 3);
904 	    temp.align = eltype->type_common.align;
905 	    if (! temp.op2)
906 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
907 				     size_int (TYPE_ALIGN_UNIT (eltype)));
908 	    if (poly_int_tree_p (temp.op0)
909 		&& poly_int_tree_p (temp.op1)
910 		&& TREE_CODE (temp.op2) == INTEGER_CST)
911 	      {
912 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
913 					- wi::to_poly_offset (temp.op1))
914 				       * wi::to_offset (temp.op2)
915 				       * vn_ref_op_align_unit (&temp));
916 		off.to_shwi (&temp.off);
917 	      }
918 	  }
919 	  break;
920 	case VAR_DECL:
921 	  if (DECL_HARD_REGISTER (ref))
922 	    {
923 	      temp.op0 = ref;
924 	      break;
925 	    }
926 	  /* Fallthru.  */
927 	case PARM_DECL:
928 	case CONST_DECL:
929 	case RESULT_DECL:
930 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
931 	     when valueizing MEM[ptr] with ptr = &decl.  */
932 	  temp.opcode = MEM_REF;
933 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
934 	  temp.off = 0;
935 	  result->safe_push (temp);
936 	  temp.opcode = ADDR_EXPR;
937 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
938 	  temp.type = TREE_TYPE (temp.op0);
939 	  temp.off = -1;
940 	  break;
941 	case STRING_CST:
942 	case INTEGER_CST:
943 	case COMPLEX_CST:
944 	case VECTOR_CST:
945 	case REAL_CST:
946 	case FIXED_CST:
947 	case CONSTRUCTOR:
948 	case SSA_NAME:
949 	  temp.op0 = ref;
950 	  break;
951 	case ADDR_EXPR:
952 	  if (is_gimple_min_invariant (ref))
953 	    {
954 	      temp.op0 = ref;
955 	      break;
956 	    }
957 	  break;
958 	  /* These are only interesting for their operands, their
959 	     existence, and their type.  They will never be the last
960 	     ref in the chain of references (IE they require an
961 	     operand), so we don't have to put anything
962 	     for op* as it will be handled by the iteration  */
963 	case REALPART_EXPR:
964 	  temp.off = 0;
965 	  break;
966 	case VIEW_CONVERT_EXPR:
967 	  temp.off = 0;
968 	  temp.reverse = storage_order_barrier_p (ref);
969 	  break;
970 	case IMAGPART_EXPR:
971 	  /* This is only interesting for its constant offset.  */
972 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
973 	  break;
974 	default:
975 	  gcc_unreachable ();
976 	}
977       result->safe_push (temp);
978 
979       if (REFERENCE_CLASS_P (ref)
980 	  || TREE_CODE (ref) == MODIFY_EXPR
981 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
982 	  || (TREE_CODE (ref) == ADDR_EXPR
983 	      && !is_gimple_min_invariant (ref)))
984 	ref = TREE_OPERAND (ref, 0);
985       else
986 	ref = NULL_TREE;
987     }
988 }
989 
990 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
991    operands in *OPS, the reference alias set SET and the reference type TYPE.
992    Return true if something useful was produced.  */
993 
994 bool
995 ao_ref_init_from_vn_reference (ao_ref *ref,
996 			       alias_set_type set, tree type,
997 			       vec<vn_reference_op_s> ops)
998 {
999   vn_reference_op_t op;
1000   unsigned i;
1001   tree base = NULL_TREE;
1002   tree *op0_p = &base;
1003   poly_offset_int offset = 0;
1004   poly_offset_int max_size;
1005   poly_offset_int size = -1;
1006   tree size_tree = NULL_TREE;
1007   alias_set_type base_alias_set = -1;
1008 
1009   /* First get the final access size from just the outermost expression.  */
1010   op = &ops[0];
1011   if (op->opcode == COMPONENT_REF)
1012     size_tree = DECL_SIZE (op->op0);
1013   else if (op->opcode == BIT_FIELD_REF)
1014     size_tree = op->op0;
1015   else
1016     {
1017       machine_mode mode = TYPE_MODE (type);
1018       if (mode == BLKmode)
1019 	size_tree = TYPE_SIZE (type);
1020       else
1021 	size = GET_MODE_BITSIZE (mode);
1022     }
1023   if (size_tree != NULL_TREE
1024       && poly_int_tree_p (size_tree))
1025     size = wi::to_poly_offset (size_tree);
1026 
1027   /* Initially, maxsize is the same as the accessed element size.
1028      In the following it will only grow (or become -1).  */
1029   max_size = size;
1030 
1031   /* Compute cumulative bit-offset for nested component-refs and array-refs,
1032      and find the ultimate containing object.  */
1033   FOR_EACH_VEC_ELT (ops, i, op)
1034     {
1035       switch (op->opcode)
1036 	{
1037 	/* These may be in the reference ops, but we cannot do anything
1038 	   sensible with them here.  */
1039 	case ADDR_EXPR:
1040 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
1041 	  if (base != NULL_TREE
1042 	      && TREE_CODE (base) == MEM_REF
1043 	      && op->op0
1044 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
1045 	    {
1046 	      vn_reference_op_t pop = &ops[i-1];
1047 	      base = TREE_OPERAND (op->op0, 0);
1048 	      if (known_eq (pop->off, -1))
1049 		{
1050 		  max_size = -1;
1051 		  offset = 0;
1052 		}
1053 	      else
1054 		offset += pop->off * BITS_PER_UNIT;
1055 	      op0_p = NULL;
1056 	      break;
1057 	    }
1058 	  /* Fallthru.  */
1059 	case CALL_EXPR:
1060 	  return false;
1061 
1062 	/* Record the base objects.  */
1063 	case MEM_REF:
1064 	  base_alias_set = get_deref_alias_set (op->op0);
1065 	  *op0_p = build2 (MEM_REF, op->type,
1066 			   NULL_TREE, op->op0);
1067 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1068 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
1069 	  op0_p = &TREE_OPERAND (*op0_p, 0);
1070 	  break;
1071 
1072 	case VAR_DECL:
1073 	case PARM_DECL:
1074 	case RESULT_DECL:
1075 	case SSA_NAME:
1076 	  *op0_p = op->op0;
1077 	  op0_p = NULL;
1078 	  break;
1079 
1080 	/* And now the usual component-reference style ops.  */
1081 	case BIT_FIELD_REF:
1082 	  offset += wi::to_poly_offset (op->op1);
1083 	  break;
1084 
1085 	case COMPONENT_REF:
1086 	  {
1087 	    tree field = op->op0;
1088 	    /* We do not have a complete COMPONENT_REF tree here so we
1089 	       cannot use component_ref_field_offset.  Do the interesting
1090 	       parts manually.  */
1091 	    tree this_offset = DECL_FIELD_OFFSET (field);
1092 
1093 	    if (op->op1 || !poly_int_tree_p (this_offset))
1094 	      max_size = -1;
1095 	    else
1096 	      {
1097 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1098 					   << LOG2_BITS_PER_UNIT);
1099 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1100 		offset += woffset;
1101 	      }
1102 	    break;
1103 	  }
1104 
1105 	case ARRAY_RANGE_REF:
1106 	case ARRAY_REF:
1107 	  /* We recorded the lower bound and the element size.  */
1108 	  if (!poly_int_tree_p (op->op0)
1109 	      || !poly_int_tree_p (op->op1)
1110 	      || TREE_CODE (op->op2) != INTEGER_CST)
1111 	    max_size = -1;
1112 	  else
1113 	    {
1114 	      poly_offset_int woffset
1115 		= wi::sext (wi::to_poly_offset (op->op0)
1116 			    - wi::to_poly_offset (op->op1),
1117 			    TYPE_PRECISION (TREE_TYPE (op->op0)));
1118 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1119 	      woffset <<= LOG2_BITS_PER_UNIT;
1120 	      offset += woffset;
1121 	    }
1122 	  break;
1123 
1124 	case REALPART_EXPR:
1125 	  break;
1126 
1127 	case IMAGPART_EXPR:
1128 	  offset += size;
1129 	  break;
1130 
1131 	case VIEW_CONVERT_EXPR:
1132 	  break;
1133 
1134 	case STRING_CST:
1135 	case INTEGER_CST:
1136 	case COMPLEX_CST:
1137 	case VECTOR_CST:
1138 	case REAL_CST:
1139 	case CONSTRUCTOR:
1140 	case CONST_DECL:
1141 	  return false;
1142 
1143 	default:
1144 	  return false;
1145 	}
1146     }
1147 
1148   if (base == NULL_TREE)
1149     return false;
1150 
1151   ref->ref = NULL_TREE;
1152   ref->base = base;
1153   ref->ref_alias_set = set;
1154   if (base_alias_set != -1)
1155     ref->base_alias_set = base_alias_set;
1156   else
1157     ref->base_alias_set = get_alias_set (base);
1158   /* We discount volatiles from value-numbering elsewhere.  */
1159   ref->volatile_p = false;
1160 
1161   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1162     {
1163       ref->offset = 0;
1164       ref->size = -1;
1165       ref->max_size = -1;
1166       return true;
1167     }
1168 
1169   if (!offset.to_shwi (&ref->offset))
1170     {
1171       ref->offset = 0;
1172       ref->max_size = -1;
1173       return true;
1174     }
1175 
1176   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1177     ref->max_size = -1;
1178 
1179   return true;
1180 }
1181 
1182 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1183    vn_reference_op_s's.  */
1184 
1185 static void
1186 copy_reference_ops_from_call (gcall *call,
1187 			      vec<vn_reference_op_s> *result)
1188 {
1189   vn_reference_op_s temp;
1190   unsigned i;
1191   tree lhs = gimple_call_lhs (call);
1192   int lr;
1193 
1194   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1195      different.  By adding the lhs here in the vector, we ensure that the
1196      hashcode is different, guaranteeing a different value number.  */
1197   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1198     {
1199       memset (&temp, 0, sizeof (temp));
1200       temp.opcode = MODIFY_EXPR;
1201       temp.type = TREE_TYPE (lhs);
1202       temp.op0 = lhs;
1203       temp.off = -1;
1204       result->safe_push (temp);
1205     }
1206 
1207   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1208   memset (&temp, 0, sizeof (temp));
1209   temp.type = gimple_call_fntype (call);
1210   temp.opcode = CALL_EXPR;
1211   temp.op0 = gimple_call_fn (call);
1212   temp.op1 = gimple_call_chain (call);
1213   if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1214     temp.op2 = size_int (lr);
1215   temp.off = -1;
1216   result->safe_push (temp);
1217 
1218   /* Copy the call arguments.  As they can be references as well,
1219      just chain them together.  */
1220   for (i = 0; i < gimple_call_num_args (call); ++i)
1221     {
1222       tree callarg = gimple_call_arg (call, i);
1223       copy_reference_ops_from_ref (callarg, result);
1224     }
1225 }
1226 
1227 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1228    *I_P to point to the last element of the replacement.  */
1229 static bool
1230 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1231 			    unsigned int *i_p)
1232 {
1233   unsigned int i = *i_p;
1234   vn_reference_op_t op = &(*ops)[i];
1235   vn_reference_op_t mem_op = &(*ops)[i - 1];
1236   tree addr_base;
1237   poly_int64 addr_offset = 0;
1238 
1239   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1240      from .foo.bar to the preceding MEM_REF offset and replace the
1241      address with &OBJ.  */
1242   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1243 					     &addr_offset);
1244   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1245   if (addr_base != TREE_OPERAND (op->op0, 0))
1246     {
1247       poly_offset_int off
1248 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1249 				  SIGNED)
1250 	   + addr_offset);
1251       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1252       op->op0 = build_fold_addr_expr (addr_base);
1253       if (tree_fits_shwi_p (mem_op->op0))
1254 	mem_op->off = tree_to_shwi (mem_op->op0);
1255       else
1256 	mem_op->off = -1;
1257       return true;
1258     }
1259   return false;
1260 }
1261 
1262 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1263    *I_P to point to the last element of the replacement.  */
1264 static bool
1265 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1266 				     unsigned int *i_p)
1267 {
1268   bool changed = false;
1269   vn_reference_op_t op;
1270 
1271   do
1272     {
1273       unsigned int i = *i_p;
1274       op = &(*ops)[i];
1275       vn_reference_op_t mem_op = &(*ops)[i - 1];
1276       gimple *def_stmt;
1277       enum tree_code code;
1278       poly_offset_int off;
1279 
1280       def_stmt = SSA_NAME_DEF_STMT (op->op0);
1281       if (!is_gimple_assign (def_stmt))
1282 	return changed;
1283 
1284       code = gimple_assign_rhs_code (def_stmt);
1285       if (code != ADDR_EXPR
1286 	  && code != POINTER_PLUS_EXPR)
1287 	return changed;
1288 
1289       off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1290 
1291       /* The only thing we have to do is from &OBJ.foo.bar add the offset
1292 	 from .foo.bar to the preceding MEM_REF offset and replace the
1293 	 address with &OBJ.  */
1294       if (code == ADDR_EXPR)
1295 	{
1296 	  tree addr, addr_base;
1297 	  poly_int64 addr_offset;
1298 
1299 	  addr = gimple_assign_rhs1 (def_stmt);
1300 	  addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1301 						     &addr_offset);
1302 	  /* If that didn't work because the address isn't invariant propagate
1303 	     the reference tree from the address operation in case the current
1304 	     dereference isn't offsetted.  */
1305 	  if (!addr_base
1306 	      && *i_p == ops->length () - 1
1307 	      && known_eq (off, 0)
1308 	      /* This makes us disable this transform for PRE where the
1309 		 reference ops might be also used for code insertion which
1310 		 is invalid.  */
1311 	      && default_vn_walk_kind == VN_WALKREWRITE)
1312 	    {
1313 	      auto_vec<vn_reference_op_s, 32> tem;
1314 	      copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1315 	      /* Make sure to preserve TBAA info.  The only objects not
1316 		 wrapped in MEM_REFs that can have their address taken are
1317 		 STRING_CSTs.  */
1318 	      if (tem.length () >= 2
1319 		  && tem[tem.length () - 2].opcode == MEM_REF)
1320 		{
1321 		  vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1322 		  new_mem_op->op0
1323 		      = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1324 					  wi::to_poly_wide (new_mem_op->op0));
1325 		}
1326 	      else
1327 		gcc_assert (tem.last ().opcode == STRING_CST);
1328 	      ops->pop ();
1329 	      ops->pop ();
1330 	      ops->safe_splice (tem);
1331 	      --*i_p;
1332 	      return true;
1333 	    }
1334 	  if (!addr_base
1335 	      || TREE_CODE (addr_base) != MEM_REF
1336 	      || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1337 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1338 								    0))))
1339 	    return changed;
1340 
1341 	  off += addr_offset;
1342 	  off += mem_ref_offset (addr_base);
1343 	  op->op0 = TREE_OPERAND (addr_base, 0);
1344 	}
1345       else
1346 	{
1347 	  tree ptr, ptroff;
1348 	  ptr = gimple_assign_rhs1 (def_stmt);
1349 	  ptroff = gimple_assign_rhs2 (def_stmt);
1350 	  if (TREE_CODE (ptr) != SSA_NAME
1351 	      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1352 	      /* Make sure to not endlessly recurse.
1353 		 See gcc.dg/tree-ssa/20040408-1.c for an example.  Can easily
1354 		 happen when we value-number a PHI to its backedge value.  */
1355 	      || SSA_VAL (ptr) == op->op0
1356 	      || !poly_int_tree_p (ptroff))
1357 	    return changed;
1358 
1359 	  off += wi::to_poly_offset (ptroff);
1360 	  op->op0 = ptr;
1361 	}
1362 
1363       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1364       if (tree_fits_shwi_p (mem_op->op0))
1365 	mem_op->off = tree_to_shwi (mem_op->op0);
1366       else
1367 	mem_op->off = -1;
1368       /* ???  Can end up with endless recursion here!?
1369 	 gcc.c-torture/execute/strcmp-1.c  */
1370       if (TREE_CODE (op->op0) == SSA_NAME)
1371 	op->op0 = SSA_VAL (op->op0);
1372       if (TREE_CODE (op->op0) != SSA_NAME)
1373 	op->opcode = TREE_CODE (op->op0);
1374 
1375       changed = true;
1376     }
1377   /* Tail-recurse.  */
1378   while (TREE_CODE (op->op0) == SSA_NAME);
1379 
1380   /* Fold a remaining *&.  */
1381   if (TREE_CODE (op->op0) == ADDR_EXPR)
1382     vn_reference_fold_indirect (ops, i_p);
1383 
1384   return changed;
1385 }
1386 
1387 /* Optimize the reference REF to a constant if possible or return
1388    NULL_TREE if not.  */
1389 
1390 tree
1391 fully_constant_vn_reference_p (vn_reference_t ref)
1392 {
1393   vec<vn_reference_op_s> operands = ref->operands;
1394   vn_reference_op_t op;
1395 
1396   /* Try to simplify the translated expression if it is
1397      a call to a builtin function with at most two arguments.  */
1398   op = &operands[0];
1399   if (op->opcode == CALL_EXPR
1400       && TREE_CODE (op->op0) == ADDR_EXPR
1401       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1402       && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1403       && operands.length () >= 2
1404       && operands.length () <= 3)
1405     {
1406       vn_reference_op_t arg0, arg1 = NULL;
1407       bool anyconst = false;
1408       arg0 = &operands[1];
1409       if (operands.length () > 2)
1410 	arg1 = &operands[2];
1411       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1412 	  || (arg0->opcode == ADDR_EXPR
1413 	      && is_gimple_min_invariant (arg0->op0)))
1414 	anyconst = true;
1415       if (arg1
1416 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1417 	      || (arg1->opcode == ADDR_EXPR
1418 		  && is_gimple_min_invariant (arg1->op0))))
1419 	anyconst = true;
1420       if (anyconst)
1421 	{
1422 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1423 					 arg1 ? 2 : 1,
1424 					 arg0->op0,
1425 					 arg1 ? arg1->op0 : NULL);
1426 	  if (folded
1427 	      && TREE_CODE (folded) == NOP_EXPR)
1428 	    folded = TREE_OPERAND (folded, 0);
1429 	  if (folded
1430 	      && is_gimple_min_invariant (folded))
1431 	    return folded;
1432 	}
1433     }
1434 
1435   /* Simplify reads from constants or constant initializers.  */
1436   else if (BITS_PER_UNIT == 8
1437 	   && COMPLETE_TYPE_P (ref->type)
1438 	   && is_gimple_reg_type (ref->type))
1439     {
1440       poly_int64 off = 0;
1441       HOST_WIDE_INT size;
1442       if (INTEGRAL_TYPE_P (ref->type))
1443 	size = TYPE_PRECISION (ref->type);
1444       else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1445 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1446       else
1447 	return NULL_TREE;
1448       if (size % BITS_PER_UNIT != 0
1449 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1450 	return NULL_TREE;
1451       size /= BITS_PER_UNIT;
1452       unsigned i;
1453       for (i = 0; i < operands.length (); ++i)
1454 	{
1455 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1456 	    {
1457 	      ++i;
1458 	      break;
1459 	    }
1460 	  if (known_eq (operands[i].off, -1))
1461 	    return NULL_TREE;
1462 	  off += operands[i].off;
1463 	  if (operands[i].opcode == MEM_REF)
1464 	    {
1465 	      ++i;
1466 	      break;
1467 	    }
1468 	}
1469       vn_reference_op_t base = &operands[--i];
1470       tree ctor = error_mark_node;
1471       tree decl = NULL_TREE;
1472       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1473 	ctor = base->op0;
1474       else if (base->opcode == MEM_REF
1475 	       && base[1].opcode == ADDR_EXPR
1476 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1477 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1478 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1479 	{
1480 	  decl = TREE_OPERAND (base[1].op0, 0);
1481 	  if (TREE_CODE (decl) == STRING_CST)
1482 	    ctor = decl;
1483 	  else
1484 	    ctor = ctor_for_folding (decl);
1485 	}
1486       if (ctor == NULL_TREE)
1487 	return build_zero_cst (ref->type);
1488       else if (ctor != error_mark_node)
1489 	{
1490 	  HOST_WIDE_INT const_off;
1491 	  if (decl)
1492 	    {
1493 	      tree res = fold_ctor_reference (ref->type, ctor,
1494 					      off * BITS_PER_UNIT,
1495 					      size * BITS_PER_UNIT, decl);
1496 	      if (res)
1497 		{
1498 		  STRIP_USELESS_TYPE_CONVERSION (res);
1499 		  if (is_gimple_min_invariant (res))
1500 		    return res;
1501 		}
1502 	    }
1503 	  else if (off.is_constant (&const_off))
1504 	    {
1505 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1506 	      int len = native_encode_expr (ctor, buf, size, const_off);
1507 	      if (len > 0)
1508 		return native_interpret_expr (ref->type, buf, len);
1509 	    }
1510 	}
1511     }
1512 
1513   return NULL_TREE;
1514 }
1515 
1516 /* Return true if OPS contain a storage order barrier.  */
1517 
1518 static bool
1519 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1520 {
1521   vn_reference_op_t op;
1522   unsigned i;
1523 
1524   FOR_EACH_VEC_ELT (ops, i, op)
1525     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1526       return true;
1527 
1528   return false;
1529 }
1530 
1531 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1532    structures into their value numbers.  This is done in-place, and
1533    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1534    whether any operands were valueized.  */
1535 
1536 static vec<vn_reference_op_s>
1537 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1538 		 bool with_avail = false)
1539 {
1540   vn_reference_op_t vro;
1541   unsigned int i;
1542 
1543   *valueized_anything = false;
1544 
1545   FOR_EACH_VEC_ELT (orig, i, vro)
1546     {
1547       if (vro->opcode == SSA_NAME
1548 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1549 	{
1550 	  tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1551 	  if (tem != vro->op0)
1552 	    {
1553 	      *valueized_anything = true;
1554 	      vro->op0 = tem;
1555 	    }
1556 	  /* If it transforms from an SSA_NAME to a constant, update
1557 	     the opcode.  */
1558 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1559 	    vro->opcode = TREE_CODE (vro->op0);
1560 	}
1561       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1562 	{
1563 	  tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1564 	  if (tem != vro->op1)
1565 	    {
1566 	      *valueized_anything = true;
1567 	      vro->op1 = tem;
1568 	    }
1569 	}
1570       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1571 	{
1572 	  tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1573 	  if (tem != vro->op2)
1574 	    {
1575 	      *valueized_anything = true;
1576 	      vro->op2 = tem;
1577 	    }
1578 	}
1579       /* If it transforms from an SSA_NAME to an address, fold with
1580 	 a preceding indirect reference.  */
1581       if (i > 0
1582 	  && vro->op0
1583 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1584 	  && orig[i - 1].opcode == MEM_REF)
1585 	{
1586 	  if (vn_reference_fold_indirect (&orig, &i))
1587 	    *valueized_anything = true;
1588 	}
1589       else if (i > 0
1590 	       && vro->opcode == SSA_NAME
1591 	       && orig[i - 1].opcode == MEM_REF)
1592 	{
1593 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1594 	    *valueized_anything = true;
1595 	}
1596       /* If it transforms a non-constant ARRAY_REF into a constant
1597 	 one, adjust the constant offset.  */
1598       else if (vro->opcode == ARRAY_REF
1599 	       && known_eq (vro->off, -1)
1600 	       && poly_int_tree_p (vro->op0)
1601 	       && poly_int_tree_p (vro->op1)
1602 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1603 	{
1604 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1605 				  - wi::to_poly_offset (vro->op1))
1606 				 * wi::to_offset (vro->op2)
1607 				 * vn_ref_op_align_unit (vro));
1608 	  off.to_shwi (&vro->off);
1609 	}
1610     }
1611 
1612   return orig;
1613 }
1614 
1615 static vec<vn_reference_op_s>
1616 valueize_refs (vec<vn_reference_op_s> orig)
1617 {
1618   bool tem;
1619   return valueize_refs_1 (orig, &tem);
1620 }
1621 
1622 static vec<vn_reference_op_s> shared_lookup_references;
1623 
1624 /* Create a vector of vn_reference_op_s structures from REF, a
1625    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1626    this function.  *VALUEIZED_ANYTHING will specify whether any
1627    operands were valueized.  */
1628 
1629 static vec<vn_reference_op_s>
1630 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1631 {
1632   if (!ref)
1633     return vNULL;
1634   shared_lookup_references.truncate (0);
1635   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1636   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1637 					      valueized_anything);
1638   return shared_lookup_references;
1639 }
1640 
1641 /* Create a vector of vn_reference_op_s structures from CALL, a
1642    call statement.  The vector is shared among all callers of
1643    this function.  */
1644 
1645 static vec<vn_reference_op_s>
1646 valueize_shared_reference_ops_from_call (gcall *call)
1647 {
1648   if (!call)
1649     return vNULL;
1650   shared_lookup_references.truncate (0);
1651   copy_reference_ops_from_call (call, &shared_lookup_references);
1652   shared_lookup_references = valueize_refs (shared_lookup_references);
1653   return shared_lookup_references;
1654 }
1655 
1656 /* Lookup a SCCVN reference operation VR in the current hash table.
1657    Returns the resulting value number if it exists in the hash table,
1658    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1659    vn_reference_t stored in the hashtable if something is found.  */
1660 
1661 static tree
1662 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1663 {
1664   vn_reference_s **slot;
1665   hashval_t hash;
1666 
1667   hash = vr->hashcode;
1668   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1669   if (slot)
1670     {
1671       if (vnresult)
1672 	*vnresult = (vn_reference_t)*slot;
1673       return ((vn_reference_t)*slot)->result;
1674     }
1675 
1676   return NULL_TREE;
1677 }
1678 
1679 struct vn_walk_cb_data
1680 {
1681   vn_walk_cb_data (vn_reference_t vr_, tree *last_vuse_ptr_,
1682                    vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
1683     : vr (vr_), last_vuse_ptr (last_vuse_ptr_), vn_walk_kind (vn_walk_kind_),
1684       tbaa_p (tbaa_p_)
1685     {}
1686 
1687   vn_reference_t vr;
1688   tree *last_vuse_ptr;
1689   vn_lookup_kind vn_walk_kind;
1690   bool tbaa_p;
1691 };
1692 
1693 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1694    with the current VUSE and performs the expression lookup.  */
1695 
1696 static void *
1697 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
1698 {
1699   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1700   vn_reference_t vr = data->vr;
1701   vn_reference_s **slot;
1702   hashval_t hash;
1703 
1704   if (data->last_vuse_ptr)
1705     *data->last_vuse_ptr = vuse;
1706 
1707   /* Fixup vuse and hash.  */
1708   if (vr->vuse)
1709     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1710   vr->vuse = vuse_ssa_val (vuse);
1711   if (vr->vuse)
1712     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1713 
1714   hash = vr->hashcode;
1715   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1716   if (slot)
1717     return *slot;
1718 
1719   return NULL;
1720 }
1721 
1722 /* Lookup an existing or insert a new vn_reference entry into the
1723    value table for the VUSE, SET, TYPE, OPERANDS reference which
1724    has the value VALUE which is either a constant or an SSA name.  */
1725 
1726 static vn_reference_t
1727 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1728 					  alias_set_type set,
1729 					  tree type,
1730 					  vec<vn_reference_op_s,
1731 					        va_heap> operands,
1732 					  tree value)
1733 {
1734   vn_reference_s vr1;
1735   vn_reference_t result;
1736   unsigned value_id;
1737   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1738   vr1.operands = operands;
1739   vr1.type = type;
1740   vr1.set = set;
1741   vr1.hashcode = vn_reference_compute_hash (&vr1);
1742   if (vn_reference_lookup_1 (&vr1, &result))
1743     return result;
1744   if (TREE_CODE (value) == SSA_NAME)
1745     value_id = VN_INFO (value)->value_id;
1746   else
1747     value_id = get_or_alloc_constant_value_id (value);
1748   return vn_reference_insert_pieces (vuse, set, type,
1749 				     operands.copy (), value, value_id);
1750 }
1751 
1752 /* Return a value-number for RCODE OPS... either by looking up an existing
1753    value-number for the simplified result or by inserting the operation if
1754    INSERT is true.  */
1755 
1756 static tree
1757 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1758 {
1759   tree result = NULL_TREE;
1760   /* We will be creating a value number for
1761        RCODE (OPS...).
1762      So first simplify and lookup this expression to see if it
1763      is already available.  */
1764   mprts_hook = vn_lookup_simplify_result;
1765   bool res = false;
1766   switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1767     {
1768     case 1:
1769       res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1770       break;
1771     case 2:
1772       res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1773       break;
1774     case 3:
1775       res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1776       break;
1777     }
1778   mprts_hook = NULL;
1779   gimple *new_stmt = NULL;
1780   if (res
1781       && gimple_simplified_result_is_gimple_val (res_op))
1782     {
1783       /* The expression is already available.  */
1784       result = res_op->ops[0];
1785       /* Valueize it, simplification returns sth in AVAIL only.  */
1786       if (TREE_CODE (result) == SSA_NAME)
1787 	result = SSA_VAL (result);
1788     }
1789   else
1790     {
1791       tree val = vn_lookup_simplify_result (res_op);
1792       if (!val && insert)
1793 	{
1794 	  gimple_seq stmts = NULL;
1795 	  result = maybe_push_res_to_seq (res_op, &stmts);
1796 	  if (result)
1797 	    {
1798 	      gcc_assert (gimple_seq_singleton_p (stmts));
1799 	      new_stmt = gimple_seq_first_stmt (stmts);
1800 	    }
1801 	}
1802       else
1803 	/* The expression is already available.  */
1804 	result = val;
1805     }
1806   if (new_stmt)
1807     {
1808       /* The expression is not yet available, value-number lhs to
1809 	 the new SSA_NAME we created.  */
1810       /* Initialize value-number information properly.  */
1811       vn_ssa_aux_t result_info = VN_INFO (result);
1812       result_info->valnum = result;
1813       result_info->value_id = get_next_value_id ();
1814       result_info->visited = 1;
1815       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1816 					  new_stmt);
1817       result_info->needs_insertion = true;
1818       /* ???  PRE phi-translation inserts NARYs without corresponding
1819          SSA name result.  Re-use those but set their result according
1820 	 to the stmt we just built.  */
1821       vn_nary_op_t nary = NULL;
1822       vn_nary_op_lookup_stmt (new_stmt, &nary);
1823       if (nary)
1824 	{
1825 	  gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
1826 	  nary->u.result = gimple_assign_lhs (new_stmt);
1827 	}
1828       /* As all "inserted" statements are singleton SCCs, insert
1829 	 to the valid table.  This is strictly needed to
1830 	 avoid re-generating new value SSA_NAMEs for the same
1831 	 expression during SCC iteration over and over (the
1832 	 optimistic table gets cleared after each iteration).
1833 	 We do not need to insert into the optimistic table, as
1834 	 lookups there will fall back to the valid table.  */
1835       else
1836 	{
1837 	  unsigned int length = vn_nary_length_from_stmt (new_stmt);
1838 	  vn_nary_op_t vno1
1839 	    = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1840 	  vno1->value_id = result_info->value_id;
1841 	  vno1->length = length;
1842 	  vno1->predicated_values = 0;
1843 	  vno1->u.result = result;
1844 	  init_vn_nary_op_from_stmt (vno1, new_stmt);
1845 	  vn_nary_op_insert_into (vno1, valid_info->nary, true);
1846 	  /* Also do not link it into the undo chain.  */
1847 	  last_inserted_nary = vno1->next;
1848 	  vno1->next = (vn_nary_op_t)(void *)-1;
1849 	}
1850       if (dump_file && (dump_flags & TDF_DETAILS))
1851 	{
1852 	  fprintf (dump_file, "Inserting name ");
1853 	  print_generic_expr (dump_file, result);
1854 	  fprintf (dump_file, " for expression ");
1855 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1856 	  fprintf (dump_file, "\n");
1857 	}
1858     }
1859   return result;
1860 }
1861 
1862 /* Return a value-number for RCODE OPS... either by looking up an existing
1863    value-number for the simplified result or by inserting the operation.  */
1864 
1865 static tree
1866 vn_nary_build_or_lookup (gimple_match_op *res_op)
1867 {
1868   return vn_nary_build_or_lookup_1 (res_op, true);
1869 }
1870 
1871 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1872    its value if present.  */
1873 
1874 tree
1875 vn_nary_simplify (vn_nary_op_t nary)
1876 {
1877   if (nary->length > gimple_match_op::MAX_NUM_OPS)
1878     return NULL_TREE;
1879   gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1880 		      nary->type, nary->length);
1881   memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1882   return vn_nary_build_or_lookup_1 (&op, false);
1883 }
1884 
1885 /* Elimination engine.  */
1886 
1887 class eliminate_dom_walker : public dom_walker
1888 {
1889 public:
1890   eliminate_dom_walker (cdi_direction, bitmap);
1891   ~eliminate_dom_walker ();
1892 
1893   virtual edge before_dom_children (basic_block);
1894   virtual void after_dom_children (basic_block);
1895 
1896   virtual tree eliminate_avail (basic_block, tree op);
1897   virtual void eliminate_push_avail (basic_block, tree op);
1898   tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
1899 
1900   void eliminate_stmt (basic_block, gimple_stmt_iterator *);
1901 
1902   unsigned eliminate_cleanup (bool region_p = false);
1903 
1904   bool do_pre;
1905   unsigned int el_todo;
1906   unsigned int eliminations;
1907   unsigned int insertions;
1908 
1909   /* SSA names that had their defs inserted by PRE if do_pre.  */
1910   bitmap inserted_exprs;
1911 
1912   /* Blocks with statements that have had their EH properties changed.  */
1913   bitmap need_eh_cleanup;
1914 
1915   /* Blocks with statements that have had their AB properties changed.  */
1916   bitmap need_ab_cleanup;
1917 
1918   /* Local state for the eliminate domwalk.  */
1919   auto_vec<gimple *> to_remove;
1920   auto_vec<gimple *> to_fixup;
1921   auto_vec<tree> avail;
1922   auto_vec<tree> avail_stack;
1923 };
1924 
1925 /* Adaptor to the elimination engine using RPO availability.  */
1926 
1927 class rpo_elim : public eliminate_dom_walker
1928 {
1929 public:
1930   rpo_elim(basic_block entry_)
1931     : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_) {}
1932   ~rpo_elim();
1933 
1934   virtual tree eliminate_avail (basic_block, tree op);
1935 
1936   virtual void eliminate_push_avail (basic_block, tree);
1937 
1938   basic_block entry;
1939   /* Instead of having a local availability lattice for each
1940      basic-block and availability at X defined as union of
1941      the local availabilities at X and its dominators we're
1942      turning this upside down and track availability per
1943      value given values are usually made available at very
1944      few points (at least one).
1945      So we have a value -> vec<location, leader> map where
1946      LOCATION is specifying the basic-block LEADER is made
1947      available for VALUE.  We push to this vector in RPO
1948      order thus for iteration we can simply pop the last
1949      entries.
1950      LOCATION is the basic-block index and LEADER is its
1951      SSA name version.  */
1952   /* ???  We'd like to use auto_vec here with embedded storage
1953      but that doesn't play well until we can provide move
1954      constructors and use std::move on hash-table expansion.
1955      So for now this is a bit more expensive than necessary.
1956      We eventually want to switch to a chaining scheme like
1957      for hashtable entries for unwinding which would make
1958      making the vector part of the vn_ssa_aux structure possible.  */
1959   typedef hash_map<tree, vec<std::pair<int, int> > > rpo_avail_t;
1960   rpo_avail_t m_rpo_avail;
1961 };
1962 
1963 /* Global RPO state for access from hooks.  */
1964 static rpo_elim *rpo_avail;
1965 basic_block vn_context_bb;
1966 
1967 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1968    from the statement defining VUSE and if not successful tries to
1969    translate *REFP and VR_ through an aggregate copy at the definition
1970    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
1971    of *REF and *VR.  If only disambiguation was performed then
1972    *DISAMBIGUATE_ONLY is set to true.  */
1973 
1974 static void *
1975 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
1976 		       bool *disambiguate_only)
1977 {
1978   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1979   vn_reference_t vr = data->vr;
1980   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1981   tree base = ao_ref_base (ref);
1982   HOST_WIDE_INT offseti, maxsizei;
1983   static vec<vn_reference_op_s> lhs_ops;
1984   ao_ref lhs_ref;
1985   bool lhs_ref_ok = false;
1986   poly_int64 copy_size;
1987 
1988   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1989   if (is_gimple_assign (def_stmt))
1990     {
1991       tree lhs = gimple_assign_lhs (def_stmt);
1992       bool valueized_anything = false;
1993       /* Avoid re-allocation overhead.  */
1994       lhs_ops.truncate (0);
1995       basic_block saved_rpo_bb = vn_context_bb;
1996       vn_context_bb = gimple_bb (def_stmt);
1997       copy_reference_ops_from_ref (lhs, &lhs_ops);
1998       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
1999       vn_context_bb = saved_rpo_bb;
2000       if (valueized_anything)
2001 	{
2002 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
2003 						      get_alias_set (lhs),
2004 						      TREE_TYPE (lhs), lhs_ops);
2005 	  if (lhs_ref_ok
2006 	      && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2007 	    {
2008 	      *disambiguate_only = true;
2009 	      return NULL;
2010 	    }
2011 	}
2012       else
2013 	{
2014 	  ao_ref_init (&lhs_ref, lhs);
2015 	  lhs_ref_ok = true;
2016 	}
2017 
2018       /* If we reach a clobbering statement try to skip it and see if
2019          we find a VN result with exactly the same value as the
2020 	 possible clobber.  In this case we can ignore the clobber
2021 	 and return the found value.  */
2022       if (data->vn_walk_kind == VN_WALKREWRITE
2023 	  && is_gimple_reg_type (TREE_TYPE (lhs))
2024 	  && types_compatible_p (TREE_TYPE (lhs), vr->type)
2025 	  && ref->ref)
2026 	{
2027 	  tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2028 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
2029 	  data->last_vuse_ptr = NULL;
2030 	  tree saved_vuse = vr->vuse;
2031 	  hashval_t saved_hashcode = vr->hashcode;
2032 	  void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2033 	  /* Need to restore vr->vuse and vr->hashcode.  */
2034 	  vr->vuse = saved_vuse;
2035 	  vr->hashcode = saved_hashcode;
2036 	  data->last_vuse_ptr = saved_last_vuse_ptr;
2037 	  if (res && res != (void *)-1)
2038 	    {
2039 	      vn_reference_t vnresult = (vn_reference_t) res;
2040 	      if (vnresult->result
2041 		  && operand_equal_p (vnresult->result,
2042 				      gimple_assign_rhs1 (def_stmt), 0)
2043 		  /* We have to honor our promise about union type punning
2044 		     and also support arbitrary overlaps with
2045 		     -fno-strict-aliasing.  So simply resort to alignment to
2046 		     rule out overlaps.  Do this check last because it is
2047 		     quite expensive compared to the hash-lookup above.  */
2048 		  && multiple_p (get_object_alignment (ref->ref), ref->size)
2049 		  && multiple_p (get_object_alignment (lhs), ref->size))
2050 		return res;
2051 	    }
2052 	}
2053     }
2054   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2055 	   && gimple_call_num_args (def_stmt) <= 4)
2056     {
2057       /* For builtin calls valueize its arguments and call the
2058          alias oracle again.  Valueization may improve points-to
2059 	 info of pointers and constify size and position arguments.
2060 	 Originally this was motivated by PR61034 which has
2061 	 conditional calls to free falsely clobbering ref because
2062 	 of imprecise points-to info of the argument.  */
2063       tree oldargs[4];
2064       bool valueized_anything = false;
2065       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2066 	{
2067 	  oldargs[i] = gimple_call_arg (def_stmt, i);
2068 	  tree val = vn_valueize (oldargs[i]);
2069 	  if (val != oldargs[i])
2070 	    {
2071 	      gimple_call_set_arg (def_stmt, i, val);
2072 	      valueized_anything = true;
2073 	    }
2074 	}
2075       if (valueized_anything)
2076 	{
2077 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2078 					       ref);
2079 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2080 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
2081 	  if (!res)
2082 	    {
2083 	      *disambiguate_only = true;
2084 	      return NULL;
2085 	    }
2086 	}
2087     }
2088 
2089   /* If we are looking for redundant stores do not create new hashtable
2090      entries from aliasing defs with made up alias-sets.  */
2091   if (*disambiguate_only || !data->tbaa_p)
2092     return (void *)-1;
2093 
2094   /* If we cannot constrain the size of the reference we cannot
2095      test if anything kills it.  */
2096   if (!ref->max_size_known_p ())
2097     return (void *)-1;
2098 
2099   poly_int64 offset = ref->offset;
2100   poly_int64 maxsize = ref->max_size;
2101 
2102   /* We can't deduce anything useful from clobbers.  */
2103   if (gimple_clobber_p (def_stmt))
2104     return (void *)-1;
2105 
2106   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
2107      from that definition.
2108      1) Memset.  */
2109   if (is_gimple_reg_type (vr->type)
2110       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2111       && (integer_zerop (gimple_call_arg (def_stmt, 1))
2112 	  || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2113 	       || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2114 	      && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2115 	      && offset.is_constant (&offseti)
2116 	      && offseti % BITS_PER_UNIT == 0
2117 	      && multiple_p (ref->size, BITS_PER_UNIT)))
2118       && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2119       && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2120 	  || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2121     {
2122       tree base2;
2123       poly_int64 offset2, size2, maxsize2;
2124       bool reverse;
2125       tree ref2 = gimple_call_arg (def_stmt, 0);
2126       if (TREE_CODE (ref2) == SSA_NAME)
2127 	{
2128 	  ref2 = SSA_VAL (ref2);
2129 	  if (TREE_CODE (ref2) == SSA_NAME
2130 	      && (TREE_CODE (base) != MEM_REF
2131 		  || TREE_OPERAND (base, 0) != ref2))
2132 	    {
2133 	      gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2134 	      if (gimple_assign_single_p (def_stmt)
2135 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2136 		ref2 = gimple_assign_rhs1 (def_stmt);
2137 	    }
2138 	}
2139       if (TREE_CODE (ref2) == ADDR_EXPR)
2140 	{
2141 	  ref2 = TREE_OPERAND (ref2, 0);
2142 	  base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2143 					   &reverse);
2144 	  if (!known_size_p (maxsize2)
2145 	      || !known_eq (maxsize2, size2)
2146 	      || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2147 	    return (void *)-1;
2148 	}
2149       else if (TREE_CODE (ref2) == SSA_NAME)
2150 	{
2151 	  poly_int64 soff;
2152 	  if (TREE_CODE (base) != MEM_REF
2153 	      || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2154 	    return (void *)-1;
2155 	  offset += soff;
2156 	  offset2 = 0;
2157 	  if (TREE_OPERAND (base, 0) != ref2)
2158 	    {
2159 	      gimple *def = SSA_NAME_DEF_STMT (ref2);
2160 	      if (is_gimple_assign (def)
2161 		  && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2162 		  && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2163 		  && poly_int_tree_p (gimple_assign_rhs2 (def))
2164 		  && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2165 		      << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2166 		{
2167 		  ref2 = gimple_assign_rhs1 (def);
2168 		  if (TREE_CODE (ref2) == SSA_NAME)
2169 		    ref2 = SSA_VAL (ref2);
2170 		}
2171 	      else
2172 		return (void *)-1;
2173 	    }
2174 	}
2175       else
2176 	return (void *)-1;
2177       tree len = gimple_call_arg (def_stmt, 2);
2178       if (known_subrange_p (offset, maxsize, offset2,
2179 			    wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2180 	{
2181 	  tree val;
2182 	  if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2183 	    val = build_zero_cst (vr->type);
2184 	  else if (INTEGRAL_TYPE_P (vr->type)
2185 		   && known_eq (ref->size, 8))
2186 	    {
2187 	      gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2188 				      vr->type, gimple_call_arg (def_stmt, 1));
2189 	      val = vn_nary_build_or_lookup (&res_op);
2190 	      if (!val
2191 		  || (TREE_CODE (val) == SSA_NAME
2192 		      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2193 		return (void *)-1;
2194 	    }
2195 	  else
2196 	    {
2197 	      unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2198 	      unsigned char *buf = XALLOCAVEC (unsigned char, len);
2199 	      memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2200 		      len);
2201 	      val = native_interpret_expr (vr->type, buf, len);
2202 	      if (!val)
2203 		return (void *)-1;
2204 	    }
2205 	  return vn_reference_lookup_or_insert_for_pieces
2206 	           (vuse, vr->set, vr->type, vr->operands, val);
2207 	}
2208     }
2209 
2210   /* 2) Assignment from an empty CONSTRUCTOR.  */
2211   else if (is_gimple_reg_type (vr->type)
2212 	   && gimple_assign_single_p (def_stmt)
2213 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2214 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2215     {
2216       tree base2;
2217       poly_int64 offset2, size2, maxsize2;
2218       bool reverse;
2219       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2220 				       &offset2, &size2, &maxsize2, &reverse);
2221       if (known_size_p (maxsize2)
2222 	  && known_eq (maxsize2, size2)
2223 	  && operand_equal_p (base, base2, 0)
2224 	  && known_subrange_p (offset, maxsize, offset2, size2))
2225 	{
2226 	  tree val = build_zero_cst (vr->type);
2227 	  return vn_reference_lookup_or_insert_for_pieces
2228 	           (vuse, vr->set, vr->type, vr->operands, val);
2229 	}
2230     }
2231 
2232   /* 3) Assignment from a constant.  We can use folds native encode/interpret
2233      routines to extract the assigned bits.  */
2234   else if (known_eq (ref->size, maxsize)
2235 	   && is_gimple_reg_type (vr->type)
2236 	   && !contains_storage_order_barrier_p (vr->operands)
2237 	   && gimple_assign_single_p (def_stmt)
2238 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2239 	   /* native_encode and native_decode operate on arrays of bytes
2240 	      and so fundamentally need a compile-time size and offset.  */
2241 	   && maxsize.is_constant (&maxsizei)
2242 	   && maxsizei % BITS_PER_UNIT == 0
2243 	   && offset.is_constant (&offseti)
2244 	   && offseti % BITS_PER_UNIT == 0
2245 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2246 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2247 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2248     {
2249       tree base2;
2250       HOST_WIDE_INT offset2, size2;
2251       bool reverse;
2252       base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2253 					   &offset2, &size2, &reverse);
2254       if (base2
2255 	  && !reverse
2256 	  && size2 % BITS_PER_UNIT == 0
2257 	  && offset2 % BITS_PER_UNIT == 0
2258 	  && operand_equal_p (base, base2, 0)
2259 	  && known_subrange_p (offseti, maxsizei, offset2, size2))
2260 	{
2261 	  /* We support up to 512-bit values (for V8DFmode).  */
2262 	  unsigned char buffer[64];
2263 	  int len;
2264 
2265 	  tree rhs = gimple_assign_rhs1 (def_stmt);
2266 	  if (TREE_CODE (rhs) == SSA_NAME)
2267 	    rhs = SSA_VAL (rhs);
2268 	  unsigned pad = 0;
2269 	  if (BYTES_BIG_ENDIAN
2270 	      && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs))))
2271 	    {
2272 	      /* On big-endian the padding is at the 'front' so
2273 		 just skip the initial bytes.  */
2274 	      fixed_size_mode mode
2275 		  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (rhs)));
2276 	      pad = GET_MODE_SIZE (mode) - size2 / BITS_PER_UNIT;
2277 	    }
2278 	  len = native_encode_expr (rhs,
2279 				    buffer, sizeof (buffer),
2280 				    ((offseti - offset2) / BITS_PER_UNIT
2281 				     + pad));
2282 	  if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2283 	    {
2284 	      tree type = vr->type;
2285 	      /* Make sure to interpret in a type that has a range
2286 	         covering the whole access size.  */
2287 	      if (INTEGRAL_TYPE_P (vr->type)
2288 		  && maxsizei != TYPE_PRECISION (vr->type))
2289 		type = build_nonstandard_integer_type (maxsizei,
2290 						       TYPE_UNSIGNED (type));
2291 	      tree val = native_interpret_expr (type, buffer,
2292 						maxsizei / BITS_PER_UNIT);
2293 	      /* If we chop off bits because the types precision doesn't
2294 		 match the memory access size this is ok when optimizing
2295 		 reads but not when called from the DSE code during
2296 		 elimination.  */
2297 	      if (val
2298 		  && type != vr->type)
2299 		{
2300 		  if (! int_fits_type_p (val, vr->type))
2301 		    val = NULL_TREE;
2302 		  else
2303 		    val = fold_convert (vr->type, val);
2304 		}
2305 
2306 	      if (val)
2307 		return vn_reference_lookup_or_insert_for_pieces
2308 			 (vuse, vr->set, vr->type, vr->operands, val);
2309 	    }
2310 	}
2311     }
2312 
2313   /* 4) Assignment from an SSA name which definition we may be able
2314      to access pieces from.  */
2315   else if (known_eq (ref->size, maxsize)
2316 	   && is_gimple_reg_type (vr->type)
2317 	   && !contains_storage_order_barrier_p (vr->operands)
2318 	   && gimple_assign_single_p (def_stmt)
2319 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2320     {
2321       tree base2;
2322       poly_int64 offset2, size2, maxsize2;
2323       bool reverse;
2324       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2325 				       &offset2, &size2, &maxsize2,
2326 				       &reverse);
2327       tree def_rhs = gimple_assign_rhs1 (def_stmt);
2328       if (!reverse
2329 	  && known_size_p (maxsize2)
2330 	  && known_eq (maxsize2, size2)
2331 	  && operand_equal_p (base, base2, 0)
2332 	  && known_subrange_p (offset, maxsize, offset2, size2)
2333 	  /* ???  We can't handle bitfield precision extracts without
2334 	     either using an alternate type for the BIT_FIELD_REF and
2335 	     then doing a conversion or possibly adjusting the offset
2336 	     according to endianness.  */
2337 	  && (! INTEGRAL_TYPE_P (vr->type)
2338 	      || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2339 	  && multiple_p (ref->size, BITS_PER_UNIT)
2340 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2341 	      || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2342 	{
2343 	  gimple_match_op op (gimple_match_cond::UNCOND,
2344 			      BIT_FIELD_REF, vr->type,
2345 			      vn_valueize (def_rhs),
2346 			      bitsize_int (ref->size),
2347 			      bitsize_int (offset - offset2));
2348 	  tree val = vn_nary_build_or_lookup (&op);
2349 	  if (val
2350 	      && (TREE_CODE (val) != SSA_NAME
2351 		  || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2352 	    {
2353 	      vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2354 		  (vuse, vr->set, vr->type, vr->operands, val);
2355 	      return res;
2356 	    }
2357 	}
2358     }
2359 
2360   /* 5) For aggregate copies translate the reference through them if
2361      the copy kills ref.  */
2362   else if (data->vn_walk_kind == VN_WALKREWRITE
2363 	   && gimple_assign_single_p (def_stmt)
2364 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
2365 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2366 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2367     {
2368       tree base2;
2369       int i, j, k;
2370       auto_vec<vn_reference_op_s> rhs;
2371       vn_reference_op_t vro;
2372       ao_ref r;
2373 
2374       if (!lhs_ref_ok)
2375 	return (void *)-1;
2376 
2377       /* See if the assignment kills REF.  */
2378       base2 = ao_ref_base (&lhs_ref);
2379       if (!lhs_ref.max_size_known_p ()
2380 	  || (base != base2
2381 	      && (TREE_CODE (base) != MEM_REF
2382 		  || TREE_CODE (base2) != MEM_REF
2383 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2384 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2385 					  TREE_OPERAND (base2, 1))))
2386 	  || !stmt_kills_ref_p (def_stmt, ref))
2387 	return (void *)-1;
2388 
2389       /* Find the common base of ref and the lhs.  lhs_ops already
2390          contains valueized operands for the lhs.  */
2391       i = vr->operands.length () - 1;
2392       j = lhs_ops.length () - 1;
2393       while (j >= 0 && i >= 0
2394 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2395 	{
2396 	  i--;
2397 	  j--;
2398 	}
2399 
2400       /* ???  The innermost op should always be a MEM_REF and we already
2401          checked that the assignment to the lhs kills vr.  Thus for
2402 	 aggregate copies using char[] types the vn_reference_op_eq
2403 	 may fail when comparing types for compatibility.  But we really
2404 	 don't care here - further lookups with the rewritten operands
2405 	 will simply fail if we messed up types too badly.  */
2406       poly_int64 extra_off = 0;
2407       if (j == 0 && i >= 0
2408 	  && lhs_ops[0].opcode == MEM_REF
2409 	  && maybe_ne (lhs_ops[0].off, -1))
2410 	{
2411 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2412 	    i--, j--;
2413 	  else if (vr->operands[i].opcode == MEM_REF
2414 		   && maybe_ne (vr->operands[i].off, -1))
2415 	    {
2416 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
2417 	      i--, j--;
2418 	    }
2419 	}
2420 
2421       /* i now points to the first additional op.
2422 	 ???  LHS may not be completely contained in VR, one or more
2423 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
2424 	 try handling outermost VIEW_CONVERT_EXPRs.  */
2425       if (j != -1)
2426 	return (void *)-1;
2427 
2428       /* Punt if the additional ops contain a storage order barrier.  */
2429       for (k = i; k >= 0; k--)
2430 	{
2431 	  vro = &vr->operands[k];
2432 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2433 	    return (void *)-1;
2434 	}
2435 
2436       /* Now re-write REF to be based on the rhs of the assignment.  */
2437       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2438 
2439       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
2440       if (maybe_ne (extra_off, 0))
2441 	{
2442 	  if (rhs.length () < 2)
2443 	    return (void *)-1;
2444 	  int ix = rhs.length () - 2;
2445 	  if (rhs[ix].opcode != MEM_REF
2446 	      || known_eq (rhs[ix].off, -1))
2447 	    return (void *)-1;
2448 	  rhs[ix].off += extra_off;
2449 	  rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2450 					 build_int_cst (TREE_TYPE (rhs[ix].op0),
2451 							extra_off));
2452 	}
2453 
2454       /* We need to pre-pend vr->operands[0..i] to rhs.  */
2455       vec<vn_reference_op_s> old = vr->operands;
2456       if (i + 1 + rhs.length () > vr->operands.length ())
2457 	vr->operands.safe_grow (i + 1 + rhs.length ());
2458       else
2459 	vr->operands.truncate (i + 1 + rhs.length ());
2460       FOR_EACH_VEC_ELT (rhs, j, vro)
2461 	vr->operands[i + 1 + j] = *vro;
2462       vr->operands = valueize_refs (vr->operands);
2463       if (old == shared_lookup_references)
2464 	shared_lookup_references = vr->operands;
2465       vr->hashcode = vn_reference_compute_hash (vr);
2466 
2467       /* Try folding the new reference to a constant.  */
2468       tree val = fully_constant_vn_reference_p (vr);
2469       if (val)
2470 	return vn_reference_lookup_or_insert_for_pieces
2471 		 (vuse, vr->set, vr->type, vr->operands, val);
2472 
2473       /* Adjust *ref from the new operands.  */
2474       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2475 	return (void *)-1;
2476       /* This can happen with bitfields.  */
2477       if (maybe_ne (ref->size, r.size))
2478 	return (void *)-1;
2479       *ref = r;
2480 
2481       /* Do not update last seen VUSE after translating.  */
2482       data->last_vuse_ptr = NULL;
2483 
2484       /* Keep looking for the adjusted *REF / VR pair.  */
2485       return NULL;
2486     }
2487 
2488   /* 6) For memcpy copies translate the reference through them if
2489      the copy kills ref.  */
2490   else if (data->vn_walk_kind == VN_WALKREWRITE
2491 	   && is_gimple_reg_type (vr->type)
2492 	   /* ???  Handle BCOPY as well.  */
2493 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2494 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2495 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2496 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2497 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2498 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2499 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2500 	   && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2501     {
2502       tree lhs, rhs;
2503       ao_ref r;
2504       poly_int64 rhs_offset, lhs_offset;
2505       vn_reference_op_s op;
2506       poly_uint64 mem_offset;
2507       poly_int64 at, byte_maxsize;
2508 
2509       /* Only handle non-variable, addressable refs.  */
2510       if (maybe_ne (ref->size, maxsize)
2511 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
2512 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2513 	return (void *)-1;
2514 
2515       /* Extract a pointer base and an offset for the destination.  */
2516       lhs = gimple_call_arg (def_stmt, 0);
2517       lhs_offset = 0;
2518       if (TREE_CODE (lhs) == SSA_NAME)
2519 	{
2520 	  lhs = vn_valueize (lhs);
2521 	  if (TREE_CODE (lhs) == SSA_NAME)
2522 	    {
2523 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2524 	      if (gimple_assign_single_p (def_stmt)
2525 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2526 		lhs = gimple_assign_rhs1 (def_stmt);
2527 	    }
2528 	}
2529       if (TREE_CODE (lhs) == ADDR_EXPR)
2530 	{
2531 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2532 						    &lhs_offset);
2533 	  if (!tem)
2534 	    return (void *)-1;
2535 	  if (TREE_CODE (tem) == MEM_REF
2536 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2537 	    {
2538 	      lhs = TREE_OPERAND (tem, 0);
2539 	      if (TREE_CODE (lhs) == SSA_NAME)
2540 		lhs = vn_valueize (lhs);
2541 	      lhs_offset += mem_offset;
2542 	    }
2543 	  else if (DECL_P (tem))
2544 	    lhs = build_fold_addr_expr (tem);
2545 	  else
2546 	    return (void *)-1;
2547 	}
2548       if (TREE_CODE (lhs) != SSA_NAME
2549 	  && TREE_CODE (lhs) != ADDR_EXPR)
2550 	return (void *)-1;
2551 
2552       /* Extract a pointer base and an offset for the source.  */
2553       rhs = gimple_call_arg (def_stmt, 1);
2554       rhs_offset = 0;
2555       if (TREE_CODE (rhs) == SSA_NAME)
2556 	rhs = vn_valueize (rhs);
2557       if (TREE_CODE (rhs) == ADDR_EXPR)
2558 	{
2559 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2560 						    &rhs_offset);
2561 	  if (!tem)
2562 	    return (void *)-1;
2563 	  if (TREE_CODE (tem) == MEM_REF
2564 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2565 	    {
2566 	      rhs = TREE_OPERAND (tem, 0);
2567 	      rhs_offset += mem_offset;
2568 	    }
2569 	  else if (DECL_P (tem)
2570 		   || TREE_CODE (tem) == STRING_CST)
2571 	    rhs = build_fold_addr_expr (tem);
2572 	  else
2573 	    return (void *)-1;
2574 	}
2575       if (TREE_CODE (rhs) != SSA_NAME
2576 	  && TREE_CODE (rhs) != ADDR_EXPR)
2577 	return (void *)-1;
2578 
2579       /* The bases of the destination and the references have to agree.  */
2580       if (TREE_CODE (base) == MEM_REF)
2581 	{
2582 	  if (TREE_OPERAND (base, 0) != lhs
2583 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2584 	    return (void *) -1;
2585 	  at += mem_offset;
2586 	}
2587       else if (!DECL_P (base)
2588 	       || TREE_CODE (lhs) != ADDR_EXPR
2589 	       || TREE_OPERAND (lhs, 0) != base)
2590 	return (void *)-1;
2591 
2592       /* If the access is completely outside of the memcpy destination
2593 	 area there is no aliasing.  */
2594       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2595 	return NULL;
2596       /* And the access has to be contained within the memcpy destination.  */
2597       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2598 	return (void *)-1;
2599 
2600       /* Make room for 2 operands in the new reference.  */
2601       if (vr->operands.length () < 2)
2602 	{
2603 	  vec<vn_reference_op_s> old = vr->operands;
2604 	  vr->operands.safe_grow_cleared (2);
2605 	  if (old == shared_lookup_references)
2606 	    shared_lookup_references = vr->operands;
2607 	}
2608       else
2609 	vr->operands.truncate (2);
2610 
2611       /* The looked-through reference is a simple MEM_REF.  */
2612       memset (&op, 0, sizeof (op));
2613       op.type = vr->type;
2614       op.opcode = MEM_REF;
2615       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2616       op.off = at - lhs_offset + rhs_offset;
2617       vr->operands[0] = op;
2618       op.type = TREE_TYPE (rhs);
2619       op.opcode = TREE_CODE (rhs);
2620       op.op0 = rhs;
2621       op.off = -1;
2622       vr->operands[1] = op;
2623       vr->hashcode = vn_reference_compute_hash (vr);
2624 
2625       /* Try folding the new reference to a constant.  */
2626       tree val = fully_constant_vn_reference_p (vr);
2627       if (val)
2628 	return vn_reference_lookup_or_insert_for_pieces
2629 		 (vuse, vr->set, vr->type, vr->operands, val);
2630 
2631       /* Adjust *ref from the new operands.  */
2632       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2633 	return (void *)-1;
2634       /* This can happen with bitfields.  */
2635       if (maybe_ne (ref->size, r.size))
2636 	return (void *)-1;
2637       *ref = r;
2638 
2639       /* Do not update last seen VUSE after translating.  */
2640       data->last_vuse_ptr = NULL;
2641 
2642       /* Keep looking for the adjusted *REF / VR pair.  */
2643       return NULL;
2644     }
2645 
2646   /* Bail out and stop walking.  */
2647   return (void *)-1;
2648 }
2649 
2650 /* Return a reference op vector from OP that can be used for
2651    vn_reference_lookup_pieces.  The caller is responsible for releasing
2652    the vector.  */
2653 
2654 vec<vn_reference_op_s>
2655 vn_reference_operands_for_lookup (tree op)
2656 {
2657   bool valueized;
2658   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2659 }
2660 
2661 /* Lookup a reference operation by it's parts, in the current hash table.
2662    Returns the resulting value number if it exists in the hash table,
2663    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2664    vn_reference_t stored in the hashtable if something is found.  */
2665 
2666 tree
2667 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2668 			    vec<vn_reference_op_s> operands,
2669 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2670 {
2671   struct vn_reference_s vr1;
2672   vn_reference_t tmp;
2673   tree cst;
2674 
2675   if (!vnresult)
2676     vnresult = &tmp;
2677   *vnresult = NULL;
2678 
2679   vr1.vuse = vuse_ssa_val (vuse);
2680   shared_lookup_references.truncate (0);
2681   shared_lookup_references.safe_grow (operands.length ());
2682   memcpy (shared_lookup_references.address (),
2683 	  operands.address (),
2684 	  sizeof (vn_reference_op_s)
2685 	  * operands.length ());
2686   vr1.operands = operands = shared_lookup_references
2687     = valueize_refs (shared_lookup_references);
2688   vr1.type = type;
2689   vr1.set = set;
2690   vr1.hashcode = vn_reference_compute_hash (&vr1);
2691   if ((cst = fully_constant_vn_reference_p (&vr1)))
2692     return cst;
2693 
2694   vn_reference_lookup_1 (&vr1, vnresult);
2695   if (!*vnresult
2696       && kind != VN_NOWALK
2697       && vr1.vuse)
2698     {
2699       ao_ref r;
2700       unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
2701       vn_walk_cb_data data (&vr1, NULL, kind, true);
2702       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2703 	*vnresult =
2704 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
2705 						  vn_reference_lookup_2,
2706 						  vn_reference_lookup_3,
2707 						  vuse_valueize, limit, &data);
2708       gcc_checking_assert (vr1.operands == shared_lookup_references);
2709     }
2710 
2711   if (*vnresult)
2712      return (*vnresult)->result;
2713 
2714   return NULL_TREE;
2715 }
2716 
2717 /* Lookup OP in the current hash table, and return the resulting value
2718    number if it exists in the hash table.  Return NULL_TREE if it does
2719    not exist in the hash table or if the result field of the structure
2720    was NULL..  VNRESULT will be filled in with the vn_reference_t
2721    stored in the hashtable if one exists.  When TBAA_P is false assume
2722    we are looking up a store and treat it as having alias-set zero.
2723    *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.  */
2724 
2725 tree
2726 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2727 		     vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
2728 {
2729   vec<vn_reference_op_s> operands;
2730   struct vn_reference_s vr1;
2731   tree cst;
2732   bool valuezied_anything;
2733 
2734   if (vnresult)
2735     *vnresult = NULL;
2736 
2737   vr1.vuse = vuse_ssa_val (vuse);
2738   vr1.operands = operands
2739     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2740   vr1.type = TREE_TYPE (op);
2741   vr1.set = get_alias_set (op);
2742   vr1.hashcode = vn_reference_compute_hash (&vr1);
2743   if ((cst = fully_constant_vn_reference_p (&vr1)))
2744     return cst;
2745 
2746   if (kind != VN_NOWALK
2747       && vr1.vuse)
2748     {
2749       vn_reference_t wvnresult;
2750       ao_ref r;
2751       unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
2752       /* Make sure to use a valueized reference if we valueized anything.
2753          Otherwise preserve the full reference for advanced TBAA.  */
2754       if (!valuezied_anything
2755 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2756 					     vr1.operands))
2757 	ao_ref_init (&r, op);
2758       vn_walk_cb_data data (&vr1, last_vuse_ptr, kind, tbaa_p);
2759       wvnresult =
2760 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
2761 						vn_reference_lookup_2,
2762 						vn_reference_lookup_3,
2763 						vuse_valueize, limit, &data);
2764       gcc_checking_assert (vr1.operands == shared_lookup_references);
2765       if (wvnresult)
2766 	{
2767 	  if (vnresult)
2768 	    *vnresult = wvnresult;
2769 	  return wvnresult->result;
2770 	}
2771 
2772       return NULL_TREE;
2773     }
2774 
2775   return vn_reference_lookup_1 (&vr1, vnresult);
2776 }
2777 
2778 /* Lookup CALL in the current hash table and return the entry in
2779    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2780 
2781 void
2782 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2783 			  vn_reference_t vr)
2784 {
2785   if (vnresult)
2786     *vnresult = NULL;
2787 
2788   tree vuse = gimple_vuse (call);
2789 
2790   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2791   vr->operands = valueize_shared_reference_ops_from_call (call);
2792   vr->type = gimple_expr_type (call);
2793   vr->set = 0;
2794   vr->hashcode = vn_reference_compute_hash (vr);
2795   vn_reference_lookup_1 (vr, vnresult);
2796 }
2797 
2798 /* Insert OP into the current hash table with a value number of RESULT.  */
2799 
2800 static void
2801 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2802 {
2803   vn_reference_s **slot;
2804   vn_reference_t vr1;
2805   bool tem;
2806 
2807   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2808   if (TREE_CODE (result) == SSA_NAME)
2809     vr1->value_id = VN_INFO (result)->value_id;
2810   else
2811     vr1->value_id = get_or_alloc_constant_value_id (result);
2812   vr1->vuse = vuse_ssa_val (vuse);
2813   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2814   vr1->type = TREE_TYPE (op);
2815   vr1->set = get_alias_set (op);
2816   vr1->hashcode = vn_reference_compute_hash (vr1);
2817   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2818   vr1->result_vdef = vdef;
2819 
2820   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2821 						      INSERT);
2822 
2823   /* Because IL walking on reference lookup can end up visiting
2824      a def that is only to be visited later in iteration order
2825      when we are about to make an irreducible region reducible
2826      the def can be effectively processed and its ref being inserted
2827      by vn_reference_lookup_3 already.  So we cannot assert (!*slot)
2828      but save a lookup if we deal with already inserted refs here.  */
2829   if (*slot)
2830     {
2831       /* We cannot assert that we have the same value either because
2832          when disentangling an irreducible region we may end up visiting
2833 	 a use before the corresponding def.  That's a missed optimization
2834 	 only though.  See gcc.dg/tree-ssa/pr87126.c for example.  */
2835       if (dump_file && (dump_flags & TDF_DETAILS)
2836 	  && !operand_equal_p ((*slot)->result, vr1->result, 0))
2837 	{
2838 	  fprintf (dump_file, "Keeping old value ");
2839 	  print_generic_expr (dump_file, (*slot)->result);
2840 	  fprintf (dump_file, " because of collision\n");
2841 	}
2842       free_reference (vr1);
2843       obstack_free (&vn_tables_obstack, vr1);
2844       return;
2845     }
2846 
2847   *slot = vr1;
2848   vr1->next = last_inserted_ref;
2849   last_inserted_ref = vr1;
2850 }
2851 
2852 /* Insert a reference by it's pieces into the current hash table with
2853    a value number of RESULT.  Return the resulting reference
2854    structure we created.  */
2855 
2856 vn_reference_t
2857 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2858 			    vec<vn_reference_op_s> operands,
2859 			    tree result, unsigned int value_id)
2860 
2861 {
2862   vn_reference_s **slot;
2863   vn_reference_t vr1;
2864 
2865   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2866   vr1->value_id = value_id;
2867   vr1->vuse = vuse_ssa_val (vuse);
2868   vr1->operands = valueize_refs (operands);
2869   vr1->type = type;
2870   vr1->set = set;
2871   vr1->hashcode = vn_reference_compute_hash (vr1);
2872   if (result && TREE_CODE (result) == SSA_NAME)
2873     result = SSA_VAL (result);
2874   vr1->result = result;
2875 
2876   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2877 						      INSERT);
2878 
2879   /* At this point we should have all the things inserted that we have
2880      seen before, and we should never try inserting something that
2881      already exists.  */
2882   gcc_assert (!*slot);
2883 
2884   *slot = vr1;
2885   vr1->next = last_inserted_ref;
2886   last_inserted_ref = vr1;
2887   return vr1;
2888 }
2889 
2890 /* Compute and return the hash value for nary operation VBO1.  */
2891 
2892 static hashval_t
2893 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2894 {
2895   inchash::hash hstate;
2896   unsigned i;
2897 
2898   for (i = 0; i < vno1->length; ++i)
2899     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2900       vno1->op[i] = SSA_VAL (vno1->op[i]);
2901 
2902   if (((vno1->length == 2
2903 	&& commutative_tree_code (vno1->opcode))
2904        || (vno1->length == 3
2905 	   && commutative_ternary_tree_code (vno1->opcode)))
2906       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2907     std::swap (vno1->op[0], vno1->op[1]);
2908   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2909 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2910     {
2911       std::swap (vno1->op[0], vno1->op[1]);
2912       vno1->opcode = swap_tree_comparison  (vno1->opcode);
2913     }
2914 
2915   hstate.add_int (vno1->opcode);
2916   for (i = 0; i < vno1->length; ++i)
2917     inchash::add_expr (vno1->op[i], hstate);
2918 
2919   return hstate.end ();
2920 }
2921 
2922 /* Compare nary operations VNO1 and VNO2 and return true if they are
2923    equivalent.  */
2924 
2925 bool
2926 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2927 {
2928   unsigned i;
2929 
2930   if (vno1->hashcode != vno2->hashcode)
2931     return false;
2932 
2933   if (vno1->length != vno2->length)
2934     return false;
2935 
2936   if (vno1->opcode != vno2->opcode
2937       || !types_compatible_p (vno1->type, vno2->type))
2938     return false;
2939 
2940   for (i = 0; i < vno1->length; ++i)
2941     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2942       return false;
2943 
2944   /* BIT_INSERT_EXPR has an implict operand as the type precision
2945      of op1.  Need to check to make sure they are the same.  */
2946   if (vno1->opcode == BIT_INSERT_EXPR
2947       && TREE_CODE (vno1->op[1]) == INTEGER_CST
2948       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2949 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2950     return false;
2951 
2952   return true;
2953 }
2954 
2955 /* Initialize VNO from the pieces provided.  */
2956 
2957 static void
2958 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2959 			     enum tree_code code, tree type, tree *ops)
2960 {
2961   vno->opcode = code;
2962   vno->length = length;
2963   vno->type = type;
2964   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2965 }
2966 
2967 /* Initialize VNO from OP.  */
2968 
2969 static void
2970 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2971 {
2972   unsigned i;
2973 
2974   vno->opcode = TREE_CODE (op);
2975   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2976   vno->type = TREE_TYPE (op);
2977   for (i = 0; i < vno->length; ++i)
2978     vno->op[i] = TREE_OPERAND (op, i);
2979 }
2980 
2981 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2982 
2983 static unsigned int
2984 vn_nary_length_from_stmt (gimple *stmt)
2985 {
2986   switch (gimple_assign_rhs_code (stmt))
2987     {
2988     case REALPART_EXPR:
2989     case IMAGPART_EXPR:
2990     case VIEW_CONVERT_EXPR:
2991       return 1;
2992 
2993     case BIT_FIELD_REF:
2994       return 3;
2995 
2996     case CONSTRUCTOR:
2997       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2998 
2999     default:
3000       return gimple_num_ops (stmt) - 1;
3001     }
3002 }
3003 
3004 /* Initialize VNO from STMT.  */
3005 
3006 static void
3007 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3008 {
3009   unsigned i;
3010 
3011   vno->opcode = gimple_assign_rhs_code (stmt);
3012   vno->type = gimple_expr_type (stmt);
3013   switch (vno->opcode)
3014     {
3015     case REALPART_EXPR:
3016     case IMAGPART_EXPR:
3017     case VIEW_CONVERT_EXPR:
3018       vno->length = 1;
3019       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3020       break;
3021 
3022     case BIT_FIELD_REF:
3023       vno->length = 3;
3024       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3025       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3026       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3027       break;
3028 
3029     case CONSTRUCTOR:
3030       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3031       for (i = 0; i < vno->length; ++i)
3032 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3033       break;
3034 
3035     default:
3036       gcc_checking_assert (!gimple_assign_single_p (stmt));
3037       vno->length = gimple_num_ops (stmt) - 1;
3038       for (i = 0; i < vno->length; ++i)
3039 	vno->op[i] = gimple_op (stmt, i + 1);
3040     }
3041 }
3042 
3043 /* Compute the hashcode for VNO and look for it in the hash table;
3044    return the resulting value number if it exists in the hash table.
3045    Return NULL_TREE if it does not exist in the hash table or if the
3046    result field of the operation is NULL.  VNRESULT will contain the
3047    vn_nary_op_t from the hashtable if it exists.  */
3048 
3049 static tree
3050 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3051 {
3052   vn_nary_op_s **slot;
3053 
3054   if (vnresult)
3055     *vnresult = NULL;
3056 
3057   vno->hashcode = vn_nary_op_compute_hash (vno);
3058   slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3059   if (!slot)
3060     return NULL_TREE;
3061   if (vnresult)
3062     *vnresult = *slot;
3063   return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3064 }
3065 
3066 /* Lookup a n-ary operation by its pieces and return the resulting value
3067    number if it exists in the hash table.  Return NULL_TREE if it does
3068    not exist in the hash table or if the result field of the operation
3069    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3070    if it exists.  */
3071 
3072 tree
3073 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3074 			  tree type, tree *ops, vn_nary_op_t *vnresult)
3075 {
3076   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3077 				  sizeof_vn_nary_op (length));
3078   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3079   return vn_nary_op_lookup_1 (vno1, vnresult);
3080 }
3081 
3082 /* Lookup OP in the current hash table, and return the resulting value
3083    number if it exists in the hash table.  Return NULL_TREE if it does
3084    not exist in the hash table or if the result field of the operation
3085    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3086    if it exists.  */
3087 
3088 tree
3089 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
3090 {
3091   vn_nary_op_t vno1
3092     = XALLOCAVAR (struct vn_nary_op_s,
3093 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
3094   init_vn_nary_op_from_op (vno1, op);
3095   return vn_nary_op_lookup_1 (vno1, vnresult);
3096 }
3097 
3098 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3099    value number if it exists in the hash table.  Return NULL_TREE if
3100    it does not exist in the hash table.  VNRESULT will contain the
3101    vn_nary_op_t from the hashtable if it exists.  */
3102 
3103 tree
3104 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3105 {
3106   vn_nary_op_t vno1
3107     = XALLOCAVAR (struct vn_nary_op_s,
3108 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3109   init_vn_nary_op_from_stmt (vno1, stmt);
3110   return vn_nary_op_lookup_1 (vno1, vnresult);
3111 }
3112 
3113 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
3114 
3115 static vn_nary_op_t
3116 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3117 {
3118   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3119 }
3120 
3121 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3122    obstack.  */
3123 
3124 static vn_nary_op_t
3125 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3126 {
3127   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3128 
3129   vno1->value_id = value_id;
3130   vno1->length = length;
3131   vno1->predicated_values = 0;
3132   vno1->u.result = result;
3133 
3134   return vno1;
3135 }
3136 
3137 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
3138    VNO->HASHCODE first.  */
3139 
3140 static vn_nary_op_t
3141 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3142 			bool compute_hash)
3143 {
3144   vn_nary_op_s **slot;
3145 
3146   if (compute_hash)
3147     {
3148       vno->hashcode = vn_nary_op_compute_hash (vno);
3149       gcc_assert (! vno->predicated_values
3150 		  || (! vno->u.values->next
3151 		      && vno->u.values->n == 1));
3152     }
3153 
3154   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3155   vno->unwind_to = *slot;
3156   if (*slot)
3157     {
3158       /* Prefer non-predicated values.
3159          ???  Only if those are constant, otherwise, with constant predicated
3160 	 value, turn them into predicated values with entry-block validity
3161 	 (???  but we always find the first valid result currently).  */
3162       if ((*slot)->predicated_values
3163 	  && ! vno->predicated_values)
3164 	{
3165 	  /* ???  We cannot remove *slot from the unwind stack list.
3166 	     For the moment we deal with this by skipping not found
3167 	     entries but this isn't ideal ...  */
3168 	  *slot = vno;
3169 	  /* ???  Maintain a stack of states we can unwind in
3170 	     vn_nary_op_s?  But how far do we unwind?  In reality
3171 	     we need to push change records somewhere...  Or not
3172 	     unwind vn_nary_op_s and linking them but instead
3173 	     unwind the results "list", linking that, which also
3174 	     doesn't move on hashtable resize.  */
3175 	  /* We can also have a ->unwind_to recording *slot there.
3176 	     That way we can make u.values a fixed size array with
3177 	     recording the number of entries but of course we then
3178 	     have always N copies for each unwind_to-state.  Or we
3179              make sure to only ever append and each unwinding will
3180 	     pop off one entry (but how to deal with predicated
3181 	     replaced with non-predicated here?)  */
3182 	  vno->next = last_inserted_nary;
3183 	  last_inserted_nary = vno;
3184 	  return vno;
3185 	}
3186       else if (vno->predicated_values
3187 	       && ! (*slot)->predicated_values)
3188 	return *slot;
3189       else if (vno->predicated_values
3190 	       && (*slot)->predicated_values)
3191 	{
3192 	  /* ???  Factor this all into a insert_single_predicated_value
3193 	     routine.  */
3194 	  gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3195 	  basic_block vno_bb
3196 	    = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3197 	  vn_pval *nval = vno->u.values;
3198 	  vn_pval **next = &vno->u.values;
3199 	  bool found = false;
3200 	  for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3201 	    {
3202 	      if (expressions_equal_p (val->result, vno->u.values->result))
3203 		{
3204 		  found = true;
3205 		  for (unsigned i = 0; i < val->n; ++i)
3206 		    {
3207 		      basic_block val_bb
3208 			= BASIC_BLOCK_FOR_FN (cfun,
3209 					      val->valid_dominated_by_p[i]);
3210 		      if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3211 			/* Value registered with more generic predicate.  */
3212 			return *slot;
3213 		      else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3214 			/* Shouldn't happen, we insert in RPO order.  */
3215 			gcc_unreachable ();
3216 		    }
3217 		  /* Append value.  */
3218 		  *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3219 						     sizeof (vn_pval)
3220 						     + val->n * sizeof (int));
3221 		  (*next)->next = NULL;
3222 		  (*next)->result = val->result;
3223 		  (*next)->n = val->n + 1;
3224 		  memcpy ((*next)->valid_dominated_by_p,
3225 			  val->valid_dominated_by_p,
3226 			  val->n * sizeof (int));
3227 		  (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3228 		  next = &(*next)->next;
3229 		  if (dump_file && (dump_flags & TDF_DETAILS))
3230 		    fprintf (dump_file, "Appending predicate to value.\n");
3231 		  continue;
3232 		}
3233 	      /* Copy other predicated values.  */
3234 	      *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3235 						 sizeof (vn_pval)
3236 						 + (val->n-1) * sizeof (int));
3237 	      memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3238 	      (*next)->next = NULL;
3239 	      next = &(*next)->next;
3240 	    }
3241 	  if (!found)
3242 	    *next = nval;
3243 
3244 	  *slot = vno;
3245 	  vno->next = last_inserted_nary;
3246 	  last_inserted_nary = vno;
3247 	  return vno;
3248 	}
3249 
3250       /* While we do not want to insert things twice it's awkward to
3251 	 avoid it in the case where visit_nary_op pattern-matches stuff
3252 	 and ends up simplifying the replacement to itself.  We then
3253 	 get two inserts, one from visit_nary_op and one from
3254 	 vn_nary_build_or_lookup.
3255 	 So allow inserts with the same value number.  */
3256       if ((*slot)->u.result == vno->u.result)
3257 	return *slot;
3258     }
3259 
3260   /* ???  There's also optimistic vs. previous commited state merging
3261      that is problematic for the case of unwinding.  */
3262 
3263   /* ???  We should return NULL if we do not use 'vno' and have the
3264      caller release it.  */
3265   gcc_assert (!*slot);
3266 
3267   *slot = vno;
3268   vno->next = last_inserted_nary;
3269   last_inserted_nary = vno;
3270   return vno;
3271 }
3272 
3273 /* Insert a n-ary operation into the current hash table using it's
3274    pieces.  Return the vn_nary_op_t structure we created and put in
3275    the hashtable.  */
3276 
3277 vn_nary_op_t
3278 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3279 			  tree type, tree *ops,
3280 			  tree result, unsigned int value_id)
3281 {
3282   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3283   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3284   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3285 }
3286 
3287 static vn_nary_op_t
3288 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3289 				     tree type, tree *ops,
3290 				     tree result, unsigned int value_id,
3291 				     edge pred_e)
3292 {
3293   /* ???  Currently tracking BBs.  */
3294   if (! single_pred_p (pred_e->dest))
3295     {
3296       /* Never record for backedges.  */
3297       if (pred_e->flags & EDGE_DFS_BACK)
3298 	return NULL;
3299       edge_iterator ei;
3300       edge e;
3301       int cnt = 0;
3302       /* Ignore backedges.  */
3303       FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3304 	if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3305 	  cnt++;
3306       if (cnt != 1)
3307 	return NULL;
3308     }
3309   if (dump_file && (dump_flags & TDF_DETAILS)
3310       /* ???  Fix dumping, but currently we only get comparisons.  */
3311       && TREE_CODE_CLASS (code) == tcc_comparison)
3312     {
3313       fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3314 	       pred_e->dest->index);
3315       print_generic_expr (dump_file, ops[0], TDF_SLIM);
3316       fprintf (dump_file, " %s ", get_tree_code_name (code));
3317       print_generic_expr (dump_file, ops[1], TDF_SLIM);
3318       fprintf (dump_file, " == %s\n",
3319 	       integer_zerop (result) ? "false" : "true");
3320     }
3321   vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3322   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3323   vno1->predicated_values = 1;
3324   vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3325 					      sizeof (vn_pval));
3326   vno1->u.values->next = NULL;
3327   vno1->u.values->result = result;
3328   vno1->u.values->n = 1;
3329   vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3330   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3331 }
3332 
3333 static bool
3334 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3335 
3336 static tree
3337 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3338 {
3339   if (! vno->predicated_values)
3340     return vno->u.result;
3341   for (vn_pval *val = vno->u.values; val; val = val->next)
3342     for (unsigned i = 0; i < val->n; ++i)
3343       if (dominated_by_p_w_unex (bb,
3344 			  BASIC_BLOCK_FOR_FN
3345 			    (cfun, val->valid_dominated_by_p[i])))
3346 	return val->result;
3347   return NULL_TREE;
3348 }
3349 
3350 /* Insert OP into the current hash table with a value number of
3351    RESULT.  Return the vn_nary_op_t structure we created and put in
3352    the hashtable.  */
3353 
3354 vn_nary_op_t
3355 vn_nary_op_insert (tree op, tree result)
3356 {
3357   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3358   vn_nary_op_t vno1;
3359 
3360   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3361   init_vn_nary_op_from_op (vno1, op);
3362   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3363 }
3364 
3365 /* Insert the rhs of STMT into the current hash table with a value number of
3366    RESULT.  */
3367 
3368 static vn_nary_op_t
3369 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3370 {
3371   vn_nary_op_t vno1
3372     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3373 			result, VN_INFO (result)->value_id);
3374   init_vn_nary_op_from_stmt (vno1, stmt);
3375   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3376 }
3377 
3378 /* Compute a hashcode for PHI operation VP1 and return it.  */
3379 
3380 static inline hashval_t
3381 vn_phi_compute_hash (vn_phi_t vp1)
3382 {
3383   inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3384 			? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3385   tree phi1op;
3386   tree type;
3387   edge e;
3388   edge_iterator ei;
3389 
3390   /* If all PHI arguments are constants we need to distinguish
3391      the PHI node via its type.  */
3392   type = vp1->type;
3393   hstate.merge_hash (vn_hash_type (type));
3394 
3395   FOR_EACH_EDGE (e, ei, vp1->block->preds)
3396     {
3397       /* Don't hash backedge values they need to be handled as VN_TOP
3398          for optimistic value-numbering.  */
3399       if (e->flags & EDGE_DFS_BACK)
3400 	continue;
3401 
3402       phi1op = vp1->phiargs[e->dest_idx];
3403       if (phi1op == VN_TOP)
3404 	continue;
3405       inchash::add_expr (phi1op, hstate);
3406     }
3407 
3408   return hstate.end ();
3409 }
3410 
3411 
3412 /* Return true if COND1 and COND2 represent the same condition, set
3413    *INVERTED_P if one needs to be inverted to make it the same as
3414    the other.  */
3415 
3416 static bool
3417 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3418 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3419 {
3420   enum tree_code code1 = gimple_cond_code (cond1);
3421   enum tree_code code2 = gimple_cond_code (cond2);
3422 
3423   *inverted_p = false;
3424   if (code1 == code2)
3425     ;
3426   else if (code1 == swap_tree_comparison (code2))
3427     std::swap (lhs2, rhs2);
3428   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3429     *inverted_p = true;
3430   else if (code1 == invert_tree_comparison
3431 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3432     {
3433       std::swap (lhs2, rhs2);
3434       *inverted_p = true;
3435     }
3436   else
3437     return false;
3438 
3439   return ((expressions_equal_p (lhs1, lhs2)
3440 	   && expressions_equal_p (rhs1, rhs2))
3441 	  || (commutative_tree_code (code1)
3442 	      && expressions_equal_p (lhs1, rhs2)
3443 	      && expressions_equal_p (rhs1, lhs2)));
3444 }
3445 
3446 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
3447 
3448 static int
3449 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3450 {
3451   if (vp1->hashcode != vp2->hashcode)
3452     return false;
3453 
3454   if (vp1->block != vp2->block)
3455     {
3456       if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3457 	return false;
3458 
3459       switch (EDGE_COUNT (vp1->block->preds))
3460 	{
3461 	case 1:
3462 	  /* Single-arg PHIs are just copies.  */
3463 	  break;
3464 
3465 	case 2:
3466 	  {
3467 	    /* Rule out backedges into the PHI.  */
3468 	    if (vp1->block->loop_father->header == vp1->block
3469 		|| vp2->block->loop_father->header == vp2->block)
3470 	      return false;
3471 
3472 	    /* If the PHI nodes do not have compatible types
3473 	       they are not the same.  */
3474 	    if (!types_compatible_p (vp1->type, vp2->type))
3475 	      return false;
3476 
3477 	    basic_block idom1
3478 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3479 	    basic_block idom2
3480 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3481 	    /* If the immediate dominator end in switch stmts multiple
3482 	       values may end up in the same PHI arg via intermediate
3483 	       CFG merges.  */
3484 	    if (EDGE_COUNT (idom1->succs) != 2
3485 		|| EDGE_COUNT (idom2->succs) != 2)
3486 	      return false;
3487 
3488 	    /* Verify the controlling stmt is the same.  */
3489 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3490 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3491 	    if (! last1 || ! last2)
3492 	      return false;
3493 	    bool inverted_p;
3494 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3495 				      last2, vp2->cclhs, vp2->ccrhs,
3496 				      &inverted_p))
3497 	      return false;
3498 
3499 	    /* Get at true/false controlled edges into the PHI.  */
3500 	    edge te1, te2, fe1, fe2;
3501 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
3502 						       &te1, &fe1)
3503 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
3504 							  &te2, &fe2))
3505 	      return false;
3506 
3507 	    /* Swap edges if the second condition is the inverted of the
3508 	       first.  */
3509 	    if (inverted_p)
3510 	      std::swap (te2, fe2);
3511 
3512 	    /* ???  Handle VN_TOP specially.  */
3513 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3514 				       vp2->phiargs[te2->dest_idx])
3515 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3516 					  vp2->phiargs[fe2->dest_idx]))
3517 	      return false;
3518 
3519 	    return true;
3520 	  }
3521 
3522 	default:
3523 	  return false;
3524 	}
3525     }
3526 
3527   /* If the PHI nodes do not have compatible types
3528      they are not the same.  */
3529   if (!types_compatible_p (vp1->type, vp2->type))
3530     return false;
3531 
3532   /* Any phi in the same block will have it's arguments in the
3533      same edge order, because of how we store phi nodes.  */
3534   for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3535     {
3536       tree phi1op = vp1->phiargs[i];
3537       tree phi2op = vp2->phiargs[i];
3538       if (phi1op == VN_TOP || phi2op == VN_TOP)
3539 	continue;
3540       if (!expressions_equal_p (phi1op, phi2op))
3541 	return false;
3542     }
3543 
3544   return true;
3545 }
3546 
3547 /* Lookup PHI in the current hash table, and return the resulting
3548    value number if it exists in the hash table.  Return NULL_TREE if
3549    it does not exist in the hash table. */
3550 
3551 static tree
3552 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3553 {
3554   vn_phi_s **slot;
3555   struct vn_phi_s *vp1;
3556   edge e;
3557   edge_iterator ei;
3558 
3559   vp1 = XALLOCAVAR (struct vn_phi_s,
3560 		    sizeof (struct vn_phi_s)
3561 		    + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3562 
3563   /* Canonicalize the SSA_NAME's to their value number.  */
3564   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3565     {
3566       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3567       if (TREE_CODE (def) == SSA_NAME
3568 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3569 	def = SSA_VAL (def);
3570       vp1->phiargs[e->dest_idx] = def;
3571     }
3572   vp1->type = TREE_TYPE (gimple_phi_result (phi));
3573   vp1->block = gimple_bb (phi);
3574   /* Extract values of the controlling condition.  */
3575   vp1->cclhs = NULL_TREE;
3576   vp1->ccrhs = NULL_TREE;
3577   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3578   if (EDGE_COUNT (idom1->succs) == 2)
3579     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3580       {
3581 	/* ???  We want to use SSA_VAL here.  But possibly not
3582 	   allow VN_TOP.  */
3583 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3584 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3585       }
3586   vp1->hashcode = vn_phi_compute_hash (vp1);
3587   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3588   if (!slot)
3589     return NULL_TREE;
3590   return (*slot)->result;
3591 }
3592 
3593 /* Insert PHI into the current hash table with a value number of
3594    RESULT.  */
3595 
3596 static vn_phi_t
3597 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3598 {
3599   vn_phi_s **slot;
3600   vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3601 					   sizeof (vn_phi_s)
3602 					   + ((gimple_phi_num_args (phi) - 1)
3603 					      * sizeof (tree)));
3604   edge e;
3605   edge_iterator ei;
3606 
3607   /* Canonicalize the SSA_NAME's to their value number.  */
3608   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3609     {
3610       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3611       if (TREE_CODE (def) == SSA_NAME
3612 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3613 	def = SSA_VAL (def);
3614       vp1->phiargs[e->dest_idx] = def;
3615     }
3616   vp1->value_id = VN_INFO (result)->value_id;
3617   vp1->type = TREE_TYPE (gimple_phi_result (phi));
3618   vp1->block = gimple_bb (phi);
3619   /* Extract values of the controlling condition.  */
3620   vp1->cclhs = NULL_TREE;
3621   vp1->ccrhs = NULL_TREE;
3622   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3623   if (EDGE_COUNT (idom1->succs) == 2)
3624     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3625       {
3626 	/* ???  We want to use SSA_VAL here.  But possibly not
3627 	   allow VN_TOP.  */
3628 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3629 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3630       }
3631   vp1->result = result;
3632   vp1->hashcode = vn_phi_compute_hash (vp1);
3633 
3634   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3635   gcc_assert (!*slot);
3636 
3637   *slot = vp1;
3638   vp1->next = last_inserted_phi;
3639   last_inserted_phi = vp1;
3640   return vp1;
3641 }
3642 
3643 
3644 /* Return true if BB1 is dominated by BB2 taking into account edges
3645    that are not executable.  */
3646 
3647 static bool
3648 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3649 {
3650   edge_iterator ei;
3651   edge e;
3652 
3653   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3654     return true;
3655 
3656   /* Before iterating we'd like to know if there exists a
3657      (executable) path from bb2 to bb1 at all, if not we can
3658      directly return false.  For now simply iterate once.  */
3659 
3660   /* Iterate to the single executable bb1 predecessor.  */
3661   if (EDGE_COUNT (bb1->preds) > 1)
3662     {
3663       edge prede = NULL;
3664       FOR_EACH_EDGE (e, ei, bb1->preds)
3665 	if (e->flags & EDGE_EXECUTABLE)
3666 	  {
3667 	    if (prede)
3668 	      {
3669 		prede = NULL;
3670 		break;
3671 	      }
3672 	    prede = e;
3673 	  }
3674       if (prede)
3675 	{
3676 	  bb1 = prede->src;
3677 
3678 	  /* Re-do the dominance check with changed bb1.  */
3679 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3680 	    return true;
3681 	}
3682     }
3683 
3684   /* Iterate to the single executable bb2 successor.  */
3685   edge succe = NULL;
3686   FOR_EACH_EDGE (e, ei, bb2->succs)
3687     if (e->flags & EDGE_EXECUTABLE)
3688       {
3689 	if (succe)
3690 	  {
3691 	    succe = NULL;
3692 	    break;
3693 	  }
3694 	succe = e;
3695       }
3696   if (succe)
3697     {
3698       /* Verify the reached block is only reached through succe.
3699 	 If there is only one edge we can spare us the dominator
3700 	 check and iterate directly.  */
3701       if (EDGE_COUNT (succe->dest->preds) > 1)
3702 	{
3703 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
3704 	    if (e != succe
3705 		&& (e->flags & EDGE_EXECUTABLE))
3706 	      {
3707 		succe = NULL;
3708 		break;
3709 	      }
3710 	}
3711       if (succe)
3712 	{
3713 	  bb2 = succe->dest;
3714 
3715 	  /* Re-do the dominance check with changed bb2.  */
3716 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3717 	    return true;
3718 	}
3719     }
3720 
3721   /* We could now iterate updating bb1 / bb2.  */
3722   return false;
3723 }
3724 
3725 /* Set the value number of FROM to TO, return true if it has changed
3726    as a result.  */
3727 
3728 static inline bool
3729 set_ssa_val_to (tree from, tree to)
3730 {
3731   vn_ssa_aux_t from_info = VN_INFO (from);
3732   tree currval = from_info->valnum; // SSA_VAL (from)
3733   poly_int64 toff, coff;
3734 
3735   /* The only thing we allow as value numbers are ssa_names
3736      and invariants.  So assert that here.  We don't allow VN_TOP
3737      as visiting a stmt should produce a value-number other than
3738      that.
3739      ???  Still VN_TOP can happen for unreachable code, so force
3740      it to varying in that case.  Not all code is prepared to
3741      get VN_TOP on valueization.  */
3742   if (to == VN_TOP)
3743     {
3744       /* ???  When iterating and visiting PHI <undef, backedge-value>
3745          for the first time we rightfully get VN_TOP and we need to
3746 	 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3747 	 With SCCVN we were simply lucky we iterated the other PHI
3748 	 cycles first and thus visited the backedge-value DEF.  */
3749       if (currval == VN_TOP)
3750 	goto set_and_exit;
3751       if (dump_file && (dump_flags & TDF_DETAILS))
3752 	fprintf (dump_file, "Forcing value number to varying on "
3753 		 "receiving VN_TOP\n");
3754       to = from;
3755     }
3756 
3757   gcc_checking_assert (to != NULL_TREE
3758 		       && ((TREE_CODE (to) == SSA_NAME
3759 			    && (to == from || SSA_VAL (to) == to))
3760 			   || is_gimple_min_invariant (to)));
3761 
3762   if (from != to)
3763     {
3764       if (currval == from)
3765 	{
3766 	  if (dump_file && (dump_flags & TDF_DETAILS))
3767 	    {
3768 	      fprintf (dump_file, "Not changing value number of ");
3769 	      print_generic_expr (dump_file, from);
3770 	      fprintf (dump_file, " from VARYING to ");
3771 	      print_generic_expr (dump_file, to);
3772 	      fprintf (dump_file, "\n");
3773 	    }
3774 	  return false;
3775 	}
3776       bool curr_invariant = is_gimple_min_invariant (currval);
3777       bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
3778 			     && ssa_undefined_value_p (currval, false));
3779       if (currval != VN_TOP
3780 	  && !curr_invariant
3781 	  && !curr_undefined
3782 	  && is_gimple_min_invariant (to))
3783 	{
3784 	  if (dump_file && (dump_flags & TDF_DETAILS))
3785 	    {
3786 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3787 		       "value number of ");
3788 	      print_generic_expr (dump_file, from);
3789 	      fprintf (dump_file, " from ");
3790 	      print_generic_expr (dump_file, currval);
3791 	      fprintf (dump_file, " (non-constant) to ");
3792 	      print_generic_expr (dump_file, to);
3793 	      fprintf (dump_file, " (constant)\n");
3794 	    }
3795 	  to = from;
3796 	}
3797       else if (currval != VN_TOP
3798 	       && !curr_undefined
3799 	       && TREE_CODE (to) == SSA_NAME
3800 	       && ssa_undefined_value_p (to, false))
3801 	{
3802 	  if (dump_file && (dump_flags & TDF_DETAILS))
3803 	    {
3804 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3805 		       "value number of ");
3806 	      print_generic_expr (dump_file, from);
3807 	      fprintf (dump_file, " from ");
3808 	      print_generic_expr (dump_file, currval);
3809 	      fprintf (dump_file, " (non-undefined) to ");
3810 	      print_generic_expr (dump_file, to);
3811 	      fprintf (dump_file, " (undefined)\n");
3812 	    }
3813 	  to = from;
3814 	}
3815       else if (TREE_CODE (to) == SSA_NAME
3816 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3817 	to = from;
3818     }
3819 
3820 set_and_exit:
3821   if (dump_file && (dump_flags & TDF_DETAILS))
3822     {
3823       fprintf (dump_file, "Setting value number of ");
3824       print_generic_expr (dump_file, from);
3825       fprintf (dump_file, " to ");
3826       print_generic_expr (dump_file, to);
3827     }
3828 
3829   if (currval != to
3830       && !operand_equal_p (currval, to, 0)
3831       /* Different undefined SSA names are not actually different.  See
3832          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
3833       && !(TREE_CODE (currval) == SSA_NAME
3834 	   && TREE_CODE (to) == SSA_NAME
3835 	   && ssa_undefined_value_p (currval, false)
3836 	   && ssa_undefined_value_p (to, false))
3837       /* ???  For addresses involving volatile objects or types operand_equal_p
3838          does not reliably detect ADDR_EXPRs as equal.  We know we are only
3839 	 getting invariant gimple addresses here, so can use
3840 	 get_addr_base_and_unit_offset to do this comparison.  */
3841       && !(TREE_CODE (currval) == ADDR_EXPR
3842 	   && TREE_CODE (to) == ADDR_EXPR
3843 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3844 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3845 	   && known_eq (coff, toff)))
3846     {
3847       if (dump_file && (dump_flags & TDF_DETAILS))
3848 	fprintf (dump_file, " (changed)\n");
3849       from_info->valnum = to;
3850       return true;
3851     }
3852   if (dump_file && (dump_flags & TDF_DETAILS))
3853     fprintf (dump_file, "\n");
3854   return false;
3855 }
3856 
3857 /* Set all definitions in STMT to value number to themselves.
3858    Return true if a value number changed. */
3859 
3860 static bool
3861 defs_to_varying (gimple *stmt)
3862 {
3863   bool changed = false;
3864   ssa_op_iter iter;
3865   def_operand_p defp;
3866 
3867   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3868     {
3869       tree def = DEF_FROM_PTR (defp);
3870       changed |= set_ssa_val_to (def, def);
3871     }
3872   return changed;
3873 }
3874 
3875 /* Visit a copy between LHS and RHS, return true if the value number
3876    changed.  */
3877 
3878 static bool
3879 visit_copy (tree lhs, tree rhs)
3880 {
3881   /* Valueize.  */
3882   rhs = SSA_VAL (rhs);
3883 
3884   return set_ssa_val_to (lhs, rhs);
3885 }
3886 
3887 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3888    is the same.  */
3889 
3890 static tree
3891 valueized_wider_op (tree wide_type, tree op)
3892 {
3893   if (TREE_CODE (op) == SSA_NAME)
3894     op = vn_valueize (op);
3895 
3896   /* Either we have the op widened available.  */
3897   tree ops[3] = {};
3898   ops[0] = op;
3899   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3900 				       wide_type, ops, NULL);
3901   if (tem)
3902     return tem;
3903 
3904   /* Or the op is truncated from some existing value.  */
3905   if (TREE_CODE (op) == SSA_NAME)
3906     {
3907       gimple *def = SSA_NAME_DEF_STMT (op);
3908       if (is_gimple_assign (def)
3909 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3910 	{
3911 	  tem = gimple_assign_rhs1 (def);
3912 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3913 	    {
3914 	      if (TREE_CODE (tem) == SSA_NAME)
3915 		tem = vn_valueize (tem);
3916 	      return tem;
3917 	    }
3918 	}
3919     }
3920 
3921   /* For constants simply extend it.  */
3922   if (TREE_CODE (op) == INTEGER_CST)
3923     return wide_int_to_tree (wide_type, wi::to_wide (op));
3924 
3925   return NULL_TREE;
3926 }
3927 
3928 /* Visit a nary operator RHS, value number it, and return true if the
3929    value number of LHS has changed as a result.  */
3930 
3931 static bool
3932 visit_nary_op (tree lhs, gassign *stmt)
3933 {
3934   vn_nary_op_t vnresult;
3935   tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
3936   if (! result && vnresult)
3937     result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
3938   if (result)
3939     return set_ssa_val_to (lhs, result);
3940 
3941   /* Do some special pattern matching for redundancies of operations
3942      in different types.  */
3943   enum tree_code code = gimple_assign_rhs_code (stmt);
3944   tree type = TREE_TYPE (lhs);
3945   tree rhs1 = gimple_assign_rhs1 (stmt);
3946   switch (code)
3947     {
3948     CASE_CONVERT:
3949       /* Match arithmetic done in a different type where we can easily
3950          substitute the result from some earlier sign-changed or widened
3951 	 operation.  */
3952       if (INTEGRAL_TYPE_P (type)
3953 	  && TREE_CODE (rhs1) == SSA_NAME
3954 	  /* We only handle sign-changes or zero-extension -> & mask.  */
3955 	  && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3956 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3957 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3958 	{
3959 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3960 	  if (def
3961 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
3962 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
3963 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
3964 	    {
3965 	      tree ops[3] = {};
3966 	      /* Either we have the op widened available.  */
3967 	      ops[0] = valueized_wider_op (type,
3968 					   gimple_assign_rhs1 (def));
3969 	      if (ops[0])
3970 		ops[1] = valueized_wider_op (type,
3971 					     gimple_assign_rhs2 (def));
3972 	      if (ops[0] && ops[1])
3973 		{
3974 		  ops[0] = vn_nary_op_lookup_pieces
3975 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
3976 		  /* We have wider operation available.  */
3977 		  if (ops[0]
3978 		      /* If the leader is a wrapping operation we can
3979 		         insert it for code hoisting w/o introducing
3980 			 undefined overflow.  If it is not it has to
3981 			 be available.  See PR86554.  */
3982 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
3983 			  || (rpo_avail && vn_context_bb
3984 			      && rpo_avail->eliminate_avail (vn_context_bb,
3985 							     ops[0]))))
3986 		    {
3987 		      unsigned lhs_prec = TYPE_PRECISION (type);
3988 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3989 		      if (lhs_prec == rhs_prec)
3990 			{
3991 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
3992 						    NOP_EXPR, type, ops[0]);
3993 			  result = vn_nary_build_or_lookup (&match_op);
3994 			  if (result)
3995 			    {
3996 			      bool changed = set_ssa_val_to (lhs, result);
3997 			      vn_nary_op_insert_stmt (stmt, result);
3998 			      return changed;
3999 			    }
4000 			}
4001 		      else
4002 			{
4003 			  tree mask = wide_int_to_tree
4004 			    (type, wi::mask (rhs_prec, false, lhs_prec));
4005 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4006 						    BIT_AND_EXPR,
4007 						    TREE_TYPE (lhs),
4008 						    ops[0], mask);
4009 			  result = vn_nary_build_or_lookup (&match_op);
4010 			  if (result)
4011 			    {
4012 			      bool changed = set_ssa_val_to (lhs, result);
4013 			      vn_nary_op_insert_stmt (stmt, result);
4014 			      return changed;
4015 			    }
4016 			}
4017 		    }
4018 		}
4019 	    }
4020 	}
4021     default:;
4022     }
4023 
4024   bool changed = set_ssa_val_to (lhs, lhs);
4025   vn_nary_op_insert_stmt (stmt, lhs);
4026   return changed;
4027 }
4028 
4029 /* Visit a call STMT storing into LHS.  Return true if the value number
4030    of the LHS has changed as a result.  */
4031 
4032 static bool
4033 visit_reference_op_call (tree lhs, gcall *stmt)
4034 {
4035   bool changed = false;
4036   struct vn_reference_s vr1;
4037   vn_reference_t vnresult = NULL;
4038   tree vdef = gimple_vdef (stmt);
4039 
4040   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
4041   if (lhs && TREE_CODE (lhs) != SSA_NAME)
4042     lhs = NULL_TREE;
4043 
4044   vn_reference_lookup_call (stmt, &vnresult, &vr1);
4045   if (vnresult)
4046     {
4047       if (vnresult->result_vdef && vdef)
4048 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4049       else if (vdef)
4050 	/* If the call was discovered to be pure or const reflect
4051 	   that as far as possible.  */
4052 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4053 
4054       if (!vnresult->result && lhs)
4055 	vnresult->result = lhs;
4056 
4057       if (vnresult->result && lhs)
4058 	changed |= set_ssa_val_to (lhs, vnresult->result);
4059     }
4060   else
4061     {
4062       vn_reference_t vr2;
4063       vn_reference_s **slot;
4064       tree vdef_val = vdef;
4065       if (vdef)
4066 	{
4067 	  /* If we value numbered an indirect functions function to
4068 	     one not clobbering memory value number its VDEF to its
4069 	     VUSE.  */
4070 	  tree fn = gimple_call_fn (stmt);
4071 	  if (fn && TREE_CODE (fn) == SSA_NAME)
4072 	    {
4073 	      fn = SSA_VAL (fn);
4074 	      if (TREE_CODE (fn) == ADDR_EXPR
4075 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4076 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4077 		      & (ECF_CONST | ECF_PURE)))
4078 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4079 	    }
4080 	  changed |= set_ssa_val_to (vdef, vdef_val);
4081 	}
4082       if (lhs)
4083 	changed |= set_ssa_val_to (lhs, lhs);
4084       vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4085       vr2->vuse = vr1.vuse;
4086       /* As we are not walking the virtual operand chain we know the
4087 	 shared_lookup_references are still original so we can re-use
4088 	 them here.  */
4089       vr2->operands = vr1.operands.copy ();
4090       vr2->type = vr1.type;
4091       vr2->set = vr1.set;
4092       vr2->hashcode = vr1.hashcode;
4093       vr2->result = lhs;
4094       vr2->result_vdef = vdef_val;
4095       vr2->value_id = 0;
4096       slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4097 							  INSERT);
4098       gcc_assert (!*slot);
4099       *slot = vr2;
4100       vr2->next = last_inserted_ref;
4101       last_inserted_ref = vr2;
4102     }
4103 
4104   return changed;
4105 }
4106 
4107 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4108    and return true if the value number of the LHS has changed as a result.  */
4109 
4110 static bool
4111 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4112 {
4113   bool changed = false;
4114   tree last_vuse;
4115   tree result;
4116 
4117   last_vuse = gimple_vuse (stmt);
4118   result = vn_reference_lookup (op, gimple_vuse (stmt),
4119 				default_vn_walk_kind, NULL, true, &last_vuse);
4120 
4121   /* We handle type-punning through unions by value-numbering based
4122      on offset and size of the access.  Be prepared to handle a
4123      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
4124   if (result
4125       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4126     {
4127       /* We will be setting the value number of lhs to the value number
4128 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4129 	 So first simplify and lookup this expression to see if it
4130 	 is already available.  */
4131       gimple_match_op res_op (gimple_match_cond::UNCOND,
4132 			      VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4133       result = vn_nary_build_or_lookup (&res_op);
4134       /* When building the conversion fails avoid inserting the reference
4135          again.  */
4136       if (!result)
4137 	return set_ssa_val_to (lhs, lhs);
4138     }
4139 
4140   if (result)
4141     changed = set_ssa_val_to (lhs, result);
4142   else
4143     {
4144       changed = set_ssa_val_to (lhs, lhs);
4145       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4146     }
4147 
4148   return changed;
4149 }
4150 
4151 
4152 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4153    and return true if the value number of the LHS has changed as a result.  */
4154 
4155 static bool
4156 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4157 {
4158   bool changed = false;
4159   vn_reference_t vnresult = NULL;
4160   tree assign;
4161   bool resultsame = false;
4162   tree vuse = gimple_vuse (stmt);
4163   tree vdef = gimple_vdef (stmt);
4164 
4165   if (TREE_CODE (op) == SSA_NAME)
4166     op = SSA_VAL (op);
4167 
4168   /* First we want to lookup using the *vuses* from the store and see
4169      if there the last store to this location with the same address
4170      had the same value.
4171 
4172      The vuses represent the memory state before the store.  If the
4173      memory state, address, and value of the store is the same as the
4174      last store to this location, then this store will produce the
4175      same memory state as that store.
4176 
4177      In this case the vdef versions for this store are value numbered to those
4178      vuse versions, since they represent the same memory state after
4179      this store.
4180 
4181      Otherwise, the vdefs for the store are used when inserting into
4182      the table, since the store generates a new memory state.  */
4183 
4184   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4185   if (vnresult
4186       && vnresult->result)
4187     {
4188       tree result = vnresult->result;
4189       gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4190 			   || result == SSA_VAL (result));
4191       resultsame = expressions_equal_p (result, op);
4192       if (resultsame)
4193 	{
4194 	  /* If the TBAA state isn't compatible for downstream reads
4195 	     we cannot value-number the VDEFs the same.  */
4196 	  alias_set_type set = get_alias_set (lhs);
4197 	  if (vnresult->set != set
4198 	      && ! alias_set_subset_of (set, vnresult->set))
4199 	    resultsame = false;
4200 	}
4201     }
4202 
4203   if (!resultsame)
4204     {
4205       /* Only perform the following when being called from PRE
4206 	 which embeds tail merging.  */
4207       if (default_vn_walk_kind == VN_WALK)
4208 	{
4209 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4210 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4211 	  if (vnresult)
4212 	    {
4213 	      VN_INFO (vdef)->visited = true;
4214 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
4215 	    }
4216 	}
4217 
4218       if (dump_file && (dump_flags & TDF_DETAILS))
4219 	{
4220 	  fprintf (dump_file, "No store match\n");
4221 	  fprintf (dump_file, "Value numbering store ");
4222 	  print_generic_expr (dump_file, lhs);
4223 	  fprintf (dump_file, " to ");
4224 	  print_generic_expr (dump_file, op);
4225 	  fprintf (dump_file, "\n");
4226 	}
4227       /* Have to set value numbers before insert, since insert is
4228 	 going to valueize the references in-place.  */
4229       if (vdef)
4230 	changed |= set_ssa_val_to (vdef, vdef);
4231 
4232       /* Do not insert structure copies into the tables.  */
4233       if (is_gimple_min_invariant (op)
4234 	  || is_gimple_reg (op))
4235         vn_reference_insert (lhs, op, vdef, NULL);
4236 
4237       /* Only perform the following when being called from PRE
4238 	 which embeds tail merging.  */
4239       if (default_vn_walk_kind == VN_WALK)
4240 	{
4241 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4242 	  vn_reference_insert (assign, lhs, vuse, vdef);
4243 	}
4244     }
4245   else
4246     {
4247       /* We had a match, so value number the vdef to have the value
4248 	 number of the vuse it came from.  */
4249 
4250       if (dump_file && (dump_flags & TDF_DETAILS))
4251 	fprintf (dump_file, "Store matched earlier value, "
4252 		 "value numbering store vdefs to matching vuses.\n");
4253 
4254       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4255     }
4256 
4257   return changed;
4258 }
4259 
4260 /* Visit and value number PHI, return true if the value number
4261    changed.  When BACKEDGES_VARYING_P is true then assume all
4262    backedge values are varying.  When INSERTED is not NULL then
4263    this is just a ahead query for a possible iteration, set INSERTED
4264    to true if we'd insert into the hashtable.  */
4265 
4266 static bool
4267 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4268 {
4269   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4270   tree backedge_val = NULL_TREE;
4271   bool seen_non_backedge = false;
4272   tree sameval_base = NULL_TREE;
4273   poly_int64 soff, doff;
4274   unsigned n_executable = 0;
4275   edge_iterator ei;
4276   edge e;
4277 
4278   /* TODO: We could check for this in initialization, and replace this
4279      with a gcc_assert.  */
4280   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4281     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4282 
4283   /* We track whether a PHI was CSEd to to avoid excessive iterations
4284      that would be necessary only because the PHI changed arguments
4285      but not value.  */
4286   if (!inserted)
4287     gimple_set_plf (phi, GF_PLF_1, false);
4288 
4289   /* See if all non-TOP arguments have the same value.  TOP is
4290      equivalent to everything, so we can ignore it.  */
4291   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4292     if (e->flags & EDGE_EXECUTABLE)
4293       {
4294 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4295 
4296 	++n_executable;
4297 	if (TREE_CODE (def) == SSA_NAME)
4298 	  {
4299 	    if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4300 	      def = SSA_VAL (def);
4301 	    if (e->flags & EDGE_DFS_BACK)
4302 	      backedge_val = def;
4303 	  }
4304 	if (!(e->flags & EDGE_DFS_BACK))
4305 	  seen_non_backedge = true;
4306 	if (def == VN_TOP)
4307 	  ;
4308 	/* Ignore undefined defs for sameval but record one.  */
4309 	else if (TREE_CODE (def) == SSA_NAME
4310 		 && ! virtual_operand_p (def)
4311 		 && ssa_undefined_value_p (def, false))
4312 	  seen_undef = def;
4313 	else if (sameval == VN_TOP)
4314 	  sameval = def;
4315 	else if (!expressions_equal_p (def, sameval))
4316 	  {
4317 	    /* We know we're arriving only with invariant addresses here,
4318 	       try harder comparing them.  We can do some caching here
4319 	       which we cannot do in expressions_equal_p.  */
4320 	    if (TREE_CODE (def) == ADDR_EXPR
4321 		&& TREE_CODE (sameval) == ADDR_EXPR
4322 		&& sameval_base != (void *)-1)
4323 	      {
4324 		if (!sameval_base)
4325 		  sameval_base = get_addr_base_and_unit_offset
4326 				   (TREE_OPERAND (sameval, 0), &soff);
4327 		if (!sameval_base)
4328 		  sameval_base = (tree)(void *)-1;
4329 		else if ((get_addr_base_and_unit_offset
4330 			    (TREE_OPERAND (def, 0), &doff) == sameval_base)
4331 			 && known_eq (soff, doff))
4332 		  continue;
4333 	      }
4334 	    sameval = NULL_TREE;
4335 	    break;
4336 	  }
4337       }
4338 
4339   /* If the value we want to use is flowing over the backedge and we
4340      should take it as VARYING but it has a non-VARYING value drop to
4341      VARYING.
4342      If we value-number a virtual operand never value-number to the
4343      value from the backedge as that confuses the alias-walking code.
4344      See gcc.dg/torture/pr87176.c.  If the value is the same on a
4345      non-backedge everything is OK though.  */
4346   bool visited_p;
4347   if ((backedge_val
4348        && !seen_non_backedge
4349        && TREE_CODE (backedge_val) == SSA_NAME
4350        && sameval == backedge_val
4351        && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4352 	   || SSA_VAL (backedge_val) != backedge_val))
4353       /* Do not value-number a virtual operand to sth not visited though
4354 	 given that allows us to escape a region in alias walking.  */
4355       || (sameval
4356 	  && TREE_CODE (sameval) == SSA_NAME
4357 	  && !SSA_NAME_IS_DEFAULT_DEF (sameval)
4358 	  && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4359 	  && (SSA_VAL (sameval, &visited_p), !visited_p)))
4360     /* Note this just drops to VARYING without inserting the PHI into
4361        the hashes.  */
4362     result = PHI_RESULT (phi);
4363   /* If none of the edges was executable keep the value-number at VN_TOP,
4364      if only a single edge is exectuable use its value.  */
4365   else if (n_executable <= 1)
4366     result = seen_undef ? seen_undef : sameval;
4367   /* If we saw only undefined values and VN_TOP use one of the
4368      undefined values.  */
4369   else if (sameval == VN_TOP)
4370     result = seen_undef ? seen_undef : sameval;
4371   /* First see if it is equivalent to a phi node in this block.  We prefer
4372      this as it allows IV elimination - see PRs 66502 and 67167.  */
4373   else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4374     {
4375       if (!inserted
4376 	  && TREE_CODE (result) == SSA_NAME
4377 	  && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4378 	{
4379 	  gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4380 	  if (dump_file && (dump_flags & TDF_DETAILS))
4381 	    {
4382 	      fprintf (dump_file, "Marking CSEd to PHI node ");
4383 	      print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4384 				 0, TDF_SLIM);
4385 	      fprintf (dump_file, "\n");
4386 	    }
4387 	}
4388     }
4389   /* If all values are the same use that, unless we've seen undefined
4390      values as well and the value isn't constant.
4391      CCP/copyprop have the same restriction to not remove uninit warnings.  */
4392   else if (sameval
4393 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
4394     result = sameval;
4395   else
4396     {
4397       result = PHI_RESULT (phi);
4398       /* Only insert PHIs that are varying, for constant value numbers
4399          we mess up equivalences otherwise as we are only comparing
4400 	 the immediate controlling predicates.  */
4401       vn_phi_insert (phi, result, backedges_varying_p);
4402       if (inserted)
4403 	*inserted = true;
4404     }
4405 
4406   return set_ssa_val_to (PHI_RESULT (phi), result);
4407 }
4408 
4409 /* Try to simplify RHS using equivalences and constant folding.  */
4410 
4411 static tree
4412 try_to_simplify (gassign *stmt)
4413 {
4414   enum tree_code code = gimple_assign_rhs_code (stmt);
4415   tree tem;
4416 
4417   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
4418      in this case, there is no point in doing extra work.  */
4419   if (code == SSA_NAME)
4420     return NULL_TREE;
4421 
4422   /* First try constant folding based on our current lattice.  */
4423   mprts_hook = vn_lookup_simplify_result;
4424   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4425   mprts_hook = NULL;
4426   if (tem
4427       && (TREE_CODE (tem) == SSA_NAME
4428 	  || is_gimple_min_invariant (tem)))
4429     return tem;
4430 
4431   return NULL_TREE;
4432 }
4433 
4434 /* Visit and value number STMT, return true if the value number
4435    changed.  */
4436 
4437 static bool
4438 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4439 {
4440   bool changed = false;
4441 
4442   if (dump_file && (dump_flags & TDF_DETAILS))
4443     {
4444       fprintf (dump_file, "Value numbering stmt = ");
4445       print_gimple_stmt (dump_file, stmt, 0);
4446     }
4447 
4448   if (gimple_code (stmt) == GIMPLE_PHI)
4449     changed = visit_phi (stmt, NULL, backedges_varying_p);
4450   else if (gimple_has_volatile_ops (stmt))
4451     changed = defs_to_varying (stmt);
4452   else if (gassign *ass = dyn_cast <gassign *> (stmt))
4453     {
4454       enum tree_code code = gimple_assign_rhs_code (ass);
4455       tree lhs = gimple_assign_lhs (ass);
4456       tree rhs1 = gimple_assign_rhs1 (ass);
4457       tree simplified;
4458 
4459       /* Shortcut for copies. Simplifying copies is pointless,
4460 	 since we copy the expression and value they represent.  */
4461       if (code == SSA_NAME
4462 	  && TREE_CODE (lhs) == SSA_NAME)
4463 	{
4464 	  changed = visit_copy (lhs, rhs1);
4465 	  goto done;
4466 	}
4467       simplified = try_to_simplify (ass);
4468       if (simplified)
4469 	{
4470 	  if (dump_file && (dump_flags & TDF_DETAILS))
4471 	    {
4472 	      fprintf (dump_file, "RHS ");
4473 	      print_gimple_expr (dump_file, ass, 0);
4474 	      fprintf (dump_file, " simplified to ");
4475 	      print_generic_expr (dump_file, simplified);
4476 	      fprintf (dump_file, "\n");
4477 	    }
4478 	}
4479       /* Setting value numbers to constants will occasionally
4480 	 screw up phi congruence because constants are not
4481 	 uniquely associated with a single ssa name that can be
4482 	 looked up.  */
4483       if (simplified
4484 	  && is_gimple_min_invariant (simplified)
4485 	  && TREE_CODE (lhs) == SSA_NAME)
4486 	{
4487 	  changed = set_ssa_val_to (lhs, simplified);
4488 	  goto done;
4489 	}
4490       else if (simplified
4491 	       && TREE_CODE (simplified) == SSA_NAME
4492 	       && TREE_CODE (lhs) == SSA_NAME)
4493 	{
4494 	  changed = visit_copy (lhs, simplified);
4495 	  goto done;
4496 	}
4497 
4498       if ((TREE_CODE (lhs) == SSA_NAME
4499 	   /* We can substitute SSA_NAMEs that are live over
4500 	      abnormal edges with their constant value.  */
4501 	   && !(gimple_assign_copy_p (ass)
4502 		&& is_gimple_min_invariant (rhs1))
4503 	   && !(simplified
4504 		&& is_gimple_min_invariant (simplified))
4505 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4506 	  /* Stores or copies from SSA_NAMEs that are live over
4507 	     abnormal edges are a problem.  */
4508 	  || (code == SSA_NAME
4509 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4510 	changed = defs_to_varying (ass);
4511       else if (REFERENCE_CLASS_P (lhs)
4512 	       || DECL_P (lhs))
4513 	changed = visit_reference_op_store (lhs, rhs1, ass);
4514       else if (TREE_CODE (lhs) == SSA_NAME)
4515 	{
4516 	  if ((gimple_assign_copy_p (ass)
4517 	       && is_gimple_min_invariant (rhs1))
4518 	      || (simplified
4519 		  && is_gimple_min_invariant (simplified)))
4520 	    {
4521 	      if (simplified)
4522 		changed = set_ssa_val_to (lhs, simplified);
4523 	      else
4524 		changed = set_ssa_val_to (lhs, rhs1);
4525 	    }
4526 	  else
4527 	    {
4528 	      /* Visit the original statement.  */
4529 	      switch (vn_get_stmt_kind (ass))
4530 		{
4531 		case VN_NARY:
4532 		changed = visit_nary_op (lhs, ass);
4533 		break;
4534 		case VN_REFERENCE:
4535 		changed = visit_reference_op_load (lhs, rhs1, ass);
4536 		break;
4537 		default:
4538 		changed = defs_to_varying (ass);
4539 		break;
4540 		}
4541 	    }
4542 	}
4543       else
4544 	changed = defs_to_varying (ass);
4545     }
4546   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4547     {
4548       tree lhs = gimple_call_lhs (call_stmt);
4549       if (lhs && TREE_CODE (lhs) == SSA_NAME)
4550 	{
4551 	  /* Try constant folding based on our current lattice.  */
4552 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4553 							    vn_valueize);
4554 	  if (simplified)
4555 	    {
4556 	      if (dump_file && (dump_flags & TDF_DETAILS))
4557 		{
4558 		  fprintf (dump_file, "call ");
4559 		  print_gimple_expr (dump_file, call_stmt, 0);
4560 		  fprintf (dump_file, " simplified to ");
4561 		  print_generic_expr (dump_file, simplified);
4562 		  fprintf (dump_file, "\n");
4563 		}
4564 	    }
4565 	  /* Setting value numbers to constants will occasionally
4566 	     screw up phi congruence because constants are not
4567 	     uniquely associated with a single ssa name that can be
4568 	     looked up.  */
4569 	  if (simplified
4570 	      && is_gimple_min_invariant (simplified))
4571 	    {
4572 	      changed = set_ssa_val_to (lhs, simplified);
4573 	      if (gimple_vdef (call_stmt))
4574 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4575 					   SSA_VAL (gimple_vuse (call_stmt)));
4576 	      goto done;
4577 	    }
4578 	  else if (simplified
4579 		   && TREE_CODE (simplified) == SSA_NAME)
4580 	    {
4581 	      changed = visit_copy (lhs, simplified);
4582 	      if (gimple_vdef (call_stmt))
4583 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4584 					   SSA_VAL (gimple_vuse (call_stmt)));
4585 	      goto done;
4586 	    }
4587 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4588 	    {
4589 	      changed = defs_to_varying (call_stmt);
4590 	      goto done;
4591 	    }
4592 	}
4593 
4594       /* Pick up flags from a devirtualization target.  */
4595       tree fn = gimple_call_fn (stmt);
4596       int extra_fnflags = 0;
4597       if (fn && TREE_CODE (fn) == SSA_NAME)
4598 	{
4599 	  fn = SSA_VAL (fn);
4600 	  if (TREE_CODE (fn) == ADDR_EXPR
4601 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4602 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4603 	}
4604       if (!gimple_call_internal_p (call_stmt)
4605 	  && (/* Calls to the same function with the same vuse
4606 		 and the same operands do not necessarily return the same
4607 		 value, unless they're pure or const.  */
4608 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
4609 	       & (ECF_PURE | ECF_CONST))
4610 	      /* If calls have a vdef, subsequent calls won't have
4611 		 the same incoming vuse.  So, if 2 calls with vdef have the
4612 		 same vuse, we know they're not subsequent.
4613 		 We can value number 2 calls to the same function with the
4614 		 same vuse and the same operands which are not subsequent
4615 		 the same, because there is no code in the program that can
4616 		 compare the 2 values...  */
4617 	      || (gimple_vdef (call_stmt)
4618 		  /* ... unless the call returns a pointer which does
4619 		     not alias with anything else.  In which case the
4620 		     information that the values are distinct are encoded
4621 		     in the IL.  */
4622 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4623 		  /* Only perform the following when being called from PRE
4624 		     which embeds tail merging.  */
4625 		  && default_vn_walk_kind == VN_WALK)))
4626 	changed = visit_reference_op_call (lhs, call_stmt);
4627       else
4628 	changed = defs_to_varying (call_stmt);
4629     }
4630   else
4631     changed = defs_to_varying (stmt);
4632  done:
4633   return changed;
4634 }
4635 
4636 
4637 /* Allocate a value number table.  */
4638 
4639 static void
4640 allocate_vn_table (vn_tables_t table, unsigned size)
4641 {
4642   table->phis = new vn_phi_table_type (size);
4643   table->nary = new vn_nary_op_table_type (size);
4644   table->references = new vn_reference_table_type (size);
4645 }
4646 
4647 /* Free a value number table.  */
4648 
4649 static void
4650 free_vn_table (vn_tables_t table)
4651 {
4652   /* Walk over elements and release vectors.  */
4653   vn_reference_iterator_type hir;
4654   vn_reference_t vr;
4655   FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4656     vr->operands.release ();
4657   delete table->phis;
4658   table->phis = NULL;
4659   delete table->nary;
4660   table->nary = NULL;
4661   delete table->references;
4662   table->references = NULL;
4663 }
4664 
4665 /* Set *ID according to RESULT.  */
4666 
4667 static void
4668 set_value_id_for_result (tree result, unsigned int *id)
4669 {
4670   if (result && TREE_CODE (result) == SSA_NAME)
4671     *id = VN_INFO (result)->value_id;
4672   else if (result && is_gimple_min_invariant (result))
4673     *id = get_or_alloc_constant_value_id (result);
4674   else
4675     *id = get_next_value_id ();
4676 }
4677 
4678 /* Set the value ids in the valid hash tables.  */
4679 
4680 static void
4681 set_hashtable_value_ids (void)
4682 {
4683   vn_nary_op_iterator_type hin;
4684   vn_phi_iterator_type hip;
4685   vn_reference_iterator_type hir;
4686   vn_nary_op_t vno;
4687   vn_reference_t vr;
4688   vn_phi_t vp;
4689 
4690   /* Now set the value ids of the things we had put in the hash
4691      table.  */
4692 
4693   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4694     if (! vno->predicated_values)
4695       set_value_id_for_result (vno->u.result, &vno->value_id);
4696 
4697   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4698     set_value_id_for_result (vp->result, &vp->value_id);
4699 
4700   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4701 			       hir)
4702     set_value_id_for_result (vr->result, &vr->value_id);
4703 }
4704 
4705 /* Return the maximum value id we have ever seen.  */
4706 
4707 unsigned int
4708 get_max_value_id (void)
4709 {
4710   return next_value_id;
4711 }
4712 
4713 /* Return the next unique value id.  */
4714 
4715 unsigned int
4716 get_next_value_id (void)
4717 {
4718   return next_value_id++;
4719 }
4720 
4721 
4722 /* Compare two expressions E1 and E2 and return true if they are equal.  */
4723 
4724 bool
4725 expressions_equal_p (tree e1, tree e2)
4726 {
4727   /* The obvious case.  */
4728   if (e1 == e2)
4729     return true;
4730 
4731   /* If either one is VN_TOP consider them equal.  */
4732   if (e1 == VN_TOP || e2 == VN_TOP)
4733     return true;
4734 
4735   /* If only one of them is null, they cannot be equal.  */
4736   if (!e1 || !e2)
4737     return false;
4738 
4739   /* Now perform the actual comparison.  */
4740   if (TREE_CODE (e1) == TREE_CODE (e2)
4741       && operand_equal_p (e1, e2, OEP_PURE_SAME))
4742     return true;
4743 
4744   return false;
4745 }
4746 
4747 
4748 /* Return true if the nary operation NARY may trap.  This is a copy
4749    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4750 
4751 bool
4752 vn_nary_may_trap (vn_nary_op_t nary)
4753 {
4754   tree type;
4755   tree rhs2 = NULL_TREE;
4756   bool honor_nans = false;
4757   bool honor_snans = false;
4758   bool fp_operation = false;
4759   bool honor_trapv = false;
4760   bool handled, ret;
4761   unsigned i;
4762 
4763   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4764       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4765       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4766     {
4767       type = nary->type;
4768       fp_operation = FLOAT_TYPE_P (type);
4769       if (fp_operation)
4770 	{
4771 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4772 	  honor_snans = flag_signaling_nans != 0;
4773 	}
4774       else if (INTEGRAL_TYPE_P (type)
4775 	       && TYPE_OVERFLOW_TRAPS (type))
4776 	honor_trapv = true;
4777     }
4778   if (nary->length >= 2)
4779     rhs2 = nary->op[1];
4780   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4781 				       honor_trapv,
4782 				       honor_nans, honor_snans, rhs2,
4783 				       &handled);
4784   if (handled
4785       && ret)
4786     return true;
4787 
4788   for (i = 0; i < nary->length; ++i)
4789     if (tree_could_trap_p (nary->op[i]))
4790       return true;
4791 
4792   return false;
4793 }
4794 
4795 /* Return true if the reference operation REF may trap.  */
4796 
4797 bool
4798 vn_reference_may_trap (vn_reference_t ref)
4799 {
4800   switch (ref->operands[0].opcode)
4801     {
4802     case MODIFY_EXPR:
4803     case CALL_EXPR:
4804       /* We do not handle calls.  */
4805     case ADDR_EXPR:
4806       /* And toplevel address computations never trap.  */
4807       return false;
4808     default:;
4809     }
4810 
4811   vn_reference_op_t op;
4812   unsigned i;
4813   FOR_EACH_VEC_ELT (ref->operands, i, op)
4814     {
4815       switch (op->opcode)
4816 	{
4817 	case WITH_SIZE_EXPR:
4818 	case TARGET_MEM_REF:
4819 	  /* Always variable.  */
4820 	  return true;
4821 	case COMPONENT_REF:
4822 	  if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
4823 	    return true;
4824 	  break;
4825 	case ARRAY_RANGE_REF:
4826 	case ARRAY_REF:
4827 	  if (TREE_CODE (op->op0) == SSA_NAME)
4828 	    return true;
4829 	  break;
4830 	case MEM_REF:
4831 	  /* Nothing interesting in itself, the base is separate.  */
4832 	  break;
4833 	/* The following are the address bases.  */
4834 	case SSA_NAME:
4835 	  return true;
4836 	case ADDR_EXPR:
4837 	  if (op->op0)
4838 	    return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
4839 	  return false;
4840 	default:;
4841 	}
4842     }
4843   return false;
4844 }
4845 
4846 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
4847 					    bitmap inserted_exprs_)
4848   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
4849     el_todo (0), eliminations (0), insertions (0),
4850     inserted_exprs (inserted_exprs_)
4851 {
4852   need_eh_cleanup = BITMAP_ALLOC (NULL);
4853   need_ab_cleanup = BITMAP_ALLOC (NULL);
4854 }
4855 
4856 eliminate_dom_walker::~eliminate_dom_walker ()
4857 {
4858   BITMAP_FREE (need_eh_cleanup);
4859   BITMAP_FREE (need_ab_cleanup);
4860 }
4861 
4862 /* Return a leader for OP that is available at the current point of the
4863    eliminate domwalk.  */
4864 
4865 tree
4866 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
4867 {
4868   tree valnum = VN_INFO (op)->valnum;
4869   if (TREE_CODE (valnum) == SSA_NAME)
4870     {
4871       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4872 	return valnum;
4873       if (avail.length () > SSA_NAME_VERSION (valnum))
4874 	return avail[SSA_NAME_VERSION (valnum)];
4875     }
4876   else if (is_gimple_min_invariant (valnum))
4877     return valnum;
4878   return NULL_TREE;
4879 }
4880 
4881 /* At the current point of the eliminate domwalk make OP available.  */
4882 
4883 void
4884 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
4885 {
4886   tree valnum = VN_INFO (op)->valnum;
4887   if (TREE_CODE (valnum) == SSA_NAME)
4888     {
4889       if (avail.length () <= SSA_NAME_VERSION (valnum))
4890 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4891       tree pushop = op;
4892       if (avail[SSA_NAME_VERSION (valnum)])
4893 	pushop = avail[SSA_NAME_VERSION (valnum)];
4894       avail_stack.safe_push (pushop);
4895       avail[SSA_NAME_VERSION (valnum)] = op;
4896     }
4897 }
4898 
4899 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
4900    the leader for the expression if insertion was successful.  */
4901 
4902 tree
4903 eliminate_dom_walker::eliminate_insert (basic_block bb,
4904 					gimple_stmt_iterator *gsi, tree val)
4905 {
4906   /* We can insert a sequence with a single assignment only.  */
4907   gimple_seq stmts = VN_INFO (val)->expr;
4908   if (!gimple_seq_singleton_p (stmts))
4909     return NULL_TREE;
4910   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
4911   if (!stmt
4912       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4913 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4914 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
4915 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
4916 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
4917     return NULL_TREE;
4918 
4919   tree op = gimple_assign_rhs1 (stmt);
4920   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4921       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4922     op = TREE_OPERAND (op, 0);
4923   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
4924   if (!leader)
4925     return NULL_TREE;
4926 
4927   tree res;
4928   stmts = NULL;
4929   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4930     res = gimple_build (&stmts, BIT_FIELD_REF,
4931 			TREE_TYPE (val), leader,
4932 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4933 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4934   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
4935     res = gimple_build (&stmts, BIT_AND_EXPR,
4936 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
4937   else
4938     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4939 			TREE_TYPE (val), leader);
4940   if (TREE_CODE (res) != SSA_NAME
4941       || SSA_NAME_IS_DEFAULT_DEF (res)
4942       || gimple_bb (SSA_NAME_DEF_STMT (res)))
4943     {
4944       gimple_seq_discard (stmts);
4945 
4946       /* During propagation we have to treat SSA info conservatively
4947          and thus we can end up simplifying the inserted expression
4948 	 at elimination time to sth not defined in stmts.  */
4949       /* But then this is a redundancy we failed to detect.  Which means
4950          res now has two values.  That doesn't play well with how
4951 	 we track availability here, so give up.  */
4952       if (dump_file && (dump_flags & TDF_DETAILS))
4953 	{
4954 	  if (TREE_CODE (res) == SSA_NAME)
4955 	    res = eliminate_avail (bb, res);
4956 	  if (res)
4957 	    {
4958 	      fprintf (dump_file, "Failed to insert expression for value ");
4959 	      print_generic_expr (dump_file, val);
4960 	      fprintf (dump_file, " which is really fully redundant to ");
4961 	      print_generic_expr (dump_file, res);
4962 	      fprintf (dump_file, "\n");
4963 	    }
4964 	}
4965 
4966       return NULL_TREE;
4967     }
4968   else
4969     {
4970       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4971       VN_INFO (res)->valnum = val;
4972       VN_INFO (res)->visited = true;
4973     }
4974 
4975   insertions++;
4976   if (dump_file && (dump_flags & TDF_DETAILS))
4977     {
4978       fprintf (dump_file, "Inserted ");
4979       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
4980     }
4981 
4982   return res;
4983 }
4984 
4985 void
4986 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
4987 {
4988   tree sprime = NULL_TREE;
4989   gimple *stmt = gsi_stmt (*gsi);
4990   tree lhs = gimple_get_lhs (stmt);
4991   if (lhs && TREE_CODE (lhs) == SSA_NAME
4992       && !gimple_has_volatile_ops (stmt)
4993       /* See PR43491.  Do not replace a global register variable when
4994 	 it is a the RHS of an assignment.  Do replace local register
4995 	 variables since gcc does not guarantee a local variable will
4996 	 be allocated in register.
4997 	 ???  The fix isn't effective here.  This should instead
4998 	 be ensured by not value-numbering them the same but treating
4999 	 them like volatiles?  */
5000       && !(gimple_assign_single_p (stmt)
5001 	   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5002 	       && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5003 	       && is_global_var (gimple_assign_rhs1 (stmt)))))
5004     {
5005       sprime = eliminate_avail (b, lhs);
5006       if (!sprime)
5007 	{
5008 	  /* If there is no existing usable leader but SCCVN thinks
5009 	     it has an expression it wants to use as replacement,
5010 	     insert that.  */
5011 	  tree val = VN_INFO (lhs)->valnum;
5012 	  if (val != VN_TOP
5013 	      && TREE_CODE (val) == SSA_NAME
5014 	      && VN_INFO (val)->needs_insertion
5015 	      && VN_INFO (val)->expr != NULL
5016 	      && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5017 	    eliminate_push_avail (b, sprime);
5018 	}
5019 
5020       /* If this now constitutes a copy duplicate points-to
5021 	 and range info appropriately.  This is especially
5022 	 important for inserted code.  See tree-ssa-copy.c
5023 	 for similar code.  */
5024       if (sprime
5025 	  && TREE_CODE (sprime) == SSA_NAME)
5026 	{
5027 	  basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5028 	  if (POINTER_TYPE_P (TREE_TYPE (lhs))
5029 	      && SSA_NAME_PTR_INFO (lhs)
5030 	      && ! SSA_NAME_PTR_INFO (sprime))
5031 	    {
5032 	      duplicate_ssa_name_ptr_info (sprime,
5033 					   SSA_NAME_PTR_INFO (lhs));
5034 	      if (b != sprime_b)
5035 		mark_ptr_info_alignment_unknown
5036 		    (SSA_NAME_PTR_INFO (sprime));
5037 	    }
5038 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5039 		   && SSA_NAME_RANGE_INFO (lhs)
5040 		   && ! SSA_NAME_RANGE_INFO (sprime)
5041 		   && b == sprime_b)
5042 	    duplicate_ssa_name_range_info (sprime,
5043 					   SSA_NAME_RANGE_TYPE (lhs),
5044 					   SSA_NAME_RANGE_INFO (lhs));
5045 	}
5046 
5047       /* Inhibit the use of an inserted PHI on a loop header when
5048 	 the address of the memory reference is a simple induction
5049 	 variable.  In other cases the vectorizer won't do anything
5050 	 anyway (either it's loop invariant or a complicated
5051 	 expression).  */
5052       if (sprime
5053 	  && TREE_CODE (sprime) == SSA_NAME
5054 	  && do_pre
5055 	  && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5056 	  && loop_outer (b->loop_father)
5057 	  && has_zero_uses (sprime)
5058 	  && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5059 	  && gimple_assign_load_p (stmt))
5060 	{
5061 	  gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5062 	  basic_block def_bb = gimple_bb (def_stmt);
5063 	  if (gimple_code (def_stmt) == GIMPLE_PHI
5064 	      && def_bb->loop_father->header == def_bb)
5065 	    {
5066 	      loop_p loop = def_bb->loop_father;
5067 	      ssa_op_iter iter;
5068 	      tree op;
5069 	      bool found = false;
5070 	      FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5071 		{
5072 		  affine_iv iv;
5073 		  def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5074 		  if (def_bb
5075 		      && flow_bb_inside_loop_p (loop, def_bb)
5076 		      && simple_iv (loop, loop, op, &iv, true))
5077 		    {
5078 		      found = true;
5079 		      break;
5080 		    }
5081 		}
5082 	      if (found)
5083 		{
5084 		  if (dump_file && (dump_flags & TDF_DETAILS))
5085 		    {
5086 		      fprintf (dump_file, "Not replacing ");
5087 		      print_gimple_expr (dump_file, stmt, 0);
5088 		      fprintf (dump_file, " with ");
5089 		      print_generic_expr (dump_file, sprime);
5090 		      fprintf (dump_file, " which would add a loop"
5091 			       " carried dependence to loop %d\n",
5092 			       loop->num);
5093 		    }
5094 		  /* Don't keep sprime available.  */
5095 		  sprime = NULL_TREE;
5096 		}
5097 	    }
5098 	}
5099 
5100       if (sprime)
5101 	{
5102 	  /* If we can propagate the value computed for LHS into
5103 	     all uses don't bother doing anything with this stmt.  */
5104 	  if (may_propagate_copy (lhs, sprime))
5105 	    {
5106 	      /* Mark it for removal.  */
5107 	      to_remove.safe_push (stmt);
5108 
5109 	      /* ???  Don't count copy/constant propagations.  */
5110 	      if (gimple_assign_single_p (stmt)
5111 		  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5112 		      || gimple_assign_rhs1 (stmt) == sprime))
5113 		return;
5114 
5115 	      if (dump_file && (dump_flags & TDF_DETAILS))
5116 		{
5117 		  fprintf (dump_file, "Replaced ");
5118 		  print_gimple_expr (dump_file, stmt, 0);
5119 		  fprintf (dump_file, " with ");
5120 		  print_generic_expr (dump_file, sprime);
5121 		  fprintf (dump_file, " in all uses of ");
5122 		  print_gimple_stmt (dump_file, stmt, 0);
5123 		}
5124 
5125 	      eliminations++;
5126 	      return;
5127 	    }
5128 
5129 	  /* If this is an assignment from our leader (which
5130 	     happens in the case the value-number is a constant)
5131 	     then there is nothing to do.  */
5132 	  if (gimple_assign_single_p (stmt)
5133 	      && sprime == gimple_assign_rhs1 (stmt))
5134 	    return;
5135 
5136 	  /* Else replace its RHS.  */
5137 	  if (dump_file && (dump_flags & TDF_DETAILS))
5138 	    {
5139 	      fprintf (dump_file, "Replaced ");
5140 	      print_gimple_expr (dump_file, stmt, 0);
5141 	      fprintf (dump_file, " with ");
5142 	      print_generic_expr (dump_file, sprime);
5143 	      fprintf (dump_file, " in ");
5144 	      print_gimple_stmt (dump_file, stmt, 0);
5145 	    }
5146 	  eliminations++;
5147 
5148 	  bool can_make_abnormal_goto = (is_gimple_call (stmt)
5149 					 && stmt_can_make_abnormal_goto (stmt));
5150 	  gimple *orig_stmt = stmt;
5151 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
5152 					  TREE_TYPE (sprime)))
5153 	    {
5154 	      /* We preserve conversions to but not from function or method
5155 		 types.  This asymmetry makes it necessary to re-instantiate
5156 		 conversions here.  */
5157 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
5158 		  && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5159 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
5160 	      else
5161 		gcc_unreachable ();
5162 	    }
5163 	  tree vdef = gimple_vdef (stmt);
5164 	  tree vuse = gimple_vuse (stmt);
5165 	  propagate_tree_value_into_stmt (gsi, sprime);
5166 	  stmt = gsi_stmt (*gsi);
5167 	  update_stmt (stmt);
5168 	  /* In case the VDEF on the original stmt was released, value-number
5169 	     it to the VUSE.  This is to make vuse_ssa_val able to skip
5170 	     released virtual operands.  */
5171 	  if (vdef != gimple_vdef (stmt))
5172 	    {
5173 	      gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5174 	      VN_INFO (vdef)->valnum = vuse;
5175 	    }
5176 
5177 	  /* If we removed EH side-effects from the statement, clean
5178 	     its EH information.  */
5179 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5180 	    {
5181 	      bitmap_set_bit (need_eh_cleanup,
5182 			      gimple_bb (stmt)->index);
5183 	      if (dump_file && (dump_flags & TDF_DETAILS))
5184 		fprintf (dump_file, "  Removed EH side-effects.\n");
5185 	    }
5186 
5187 	  /* Likewise for AB side-effects.  */
5188 	  if (can_make_abnormal_goto
5189 	      && !stmt_can_make_abnormal_goto (stmt))
5190 	    {
5191 	      bitmap_set_bit (need_ab_cleanup,
5192 			      gimple_bb (stmt)->index);
5193 	      if (dump_file && (dump_flags & TDF_DETAILS))
5194 		fprintf (dump_file, "  Removed AB side-effects.\n");
5195 	    }
5196 
5197 	  return;
5198 	}
5199     }
5200 
5201   /* If the statement is a scalar store, see if the expression
5202      has the same value number as its rhs.  If so, the store is
5203      dead.  */
5204   if (gimple_assign_single_p (stmt)
5205       && !gimple_has_volatile_ops (stmt)
5206       && !is_gimple_reg (gimple_assign_lhs (stmt))
5207       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5208 	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5209     {
5210       tree val;
5211       tree rhs = gimple_assign_rhs1 (stmt);
5212       vn_reference_t vnresult;
5213       val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5214 				 &vnresult, false);
5215       if (TREE_CODE (rhs) == SSA_NAME)
5216 	rhs = VN_INFO (rhs)->valnum;
5217       if (val
5218 	  && operand_equal_p (val, rhs, 0))
5219 	{
5220 	  /* We can only remove the later store if the former aliases
5221 	     at least all accesses the later one does or if the store
5222 	     was to readonly memory storing the same value.  */
5223 	  alias_set_type set = get_alias_set (lhs);
5224 	  if (! vnresult
5225 	      || vnresult->set == set
5226 	      || alias_set_subset_of (set, vnresult->set))
5227 	    {
5228 	      if (dump_file && (dump_flags & TDF_DETAILS))
5229 		{
5230 		  fprintf (dump_file, "Deleted redundant store ");
5231 		  print_gimple_stmt (dump_file, stmt, 0);
5232 		}
5233 
5234 	      /* Queue stmt for removal.  */
5235 	      to_remove.safe_push (stmt);
5236 	      return;
5237 	    }
5238 	}
5239     }
5240 
5241   /* If this is a control statement value numbering left edges
5242      unexecuted on force the condition in a way consistent with
5243      that.  */
5244   if (gcond *cond = dyn_cast <gcond *> (stmt))
5245     {
5246       if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5247 	  ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5248 	{
5249 	  if (dump_file && (dump_flags & TDF_DETAILS))
5250 	    {
5251 	      fprintf (dump_file, "Removing unexecutable edge from ");
5252 	      print_gimple_stmt (dump_file, stmt, 0);
5253 	    }
5254 	  if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5255 	      == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5256 	    gimple_cond_make_true (cond);
5257 	  else
5258 	    gimple_cond_make_false (cond);
5259 	  update_stmt (cond);
5260 	  el_todo |= TODO_cleanup_cfg;
5261 	  return;
5262 	}
5263     }
5264 
5265   bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5266   bool was_noreturn = (is_gimple_call (stmt)
5267 		       && gimple_call_noreturn_p (stmt));
5268   tree vdef = gimple_vdef (stmt);
5269   tree vuse = gimple_vuse (stmt);
5270 
5271   /* If we didn't replace the whole stmt (or propagate the result
5272      into all uses), replace all uses on this stmt with their
5273      leaders.  */
5274   bool modified = false;
5275   use_operand_p use_p;
5276   ssa_op_iter iter;
5277   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5278     {
5279       tree use = USE_FROM_PTR (use_p);
5280       /* ???  The call code above leaves stmt operands un-updated.  */
5281       if (TREE_CODE (use) != SSA_NAME)
5282 	continue;
5283       tree sprime;
5284       if (SSA_NAME_IS_DEFAULT_DEF (use))
5285 	/* ???  For default defs BB shouldn't matter, but we have to
5286 	   solve the inconsistency between rpo eliminate and
5287 	   dom eliminate avail valueization first.  */
5288 	sprime = eliminate_avail (b, use);
5289       else
5290 	/* Look for sth available at the definition block of the argument.
5291 	   This avoids inconsistencies between availability there which
5292 	   decides if the stmt can be removed and availability at the
5293 	   use site.  The SSA property ensures that things available
5294 	   at the definition are also available at uses.  */
5295 	sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5296       if (sprime && sprime != use
5297 	  && may_propagate_copy (use, sprime)
5298 	  /* We substitute into debug stmts to avoid excessive
5299 	     debug temporaries created by removed stmts, but we need
5300 	     to avoid doing so for inserted sprimes as we never want
5301 	     to create debug temporaries for them.  */
5302 	  && (!inserted_exprs
5303 	      || TREE_CODE (sprime) != SSA_NAME
5304 	      || !is_gimple_debug (stmt)
5305 	      || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5306 	{
5307 	  propagate_value (use_p, sprime);
5308 	  modified = true;
5309 	}
5310     }
5311 
5312   /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5313      into which is a requirement for the IPA devirt machinery.  */
5314   gimple *old_stmt = stmt;
5315   if (modified)
5316     {
5317       /* If a formerly non-invariant ADDR_EXPR is turned into an
5318 	 invariant one it was on a separate stmt.  */
5319       if (gimple_assign_single_p (stmt)
5320 	  && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5321 	recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5322       gimple_stmt_iterator prev = *gsi;
5323       gsi_prev (&prev);
5324       if (fold_stmt (gsi))
5325 	{
5326 	  /* fold_stmt may have created new stmts inbetween
5327 	     the previous stmt and the folded stmt.  Mark
5328 	     all defs created there as varying to not confuse
5329 	     the SCCVN machinery as we're using that even during
5330 	     elimination.  */
5331 	  if (gsi_end_p (prev))
5332 	    prev = gsi_start_bb (b);
5333 	  else
5334 	    gsi_next (&prev);
5335 	  if (gsi_stmt (prev) != gsi_stmt (*gsi))
5336 	    do
5337 	      {
5338 		tree def;
5339 		ssa_op_iter dit;
5340 		FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5341 					   dit, SSA_OP_ALL_DEFS)
5342 		    /* As existing DEFs may move between stmts
5343 		       only process new ones.  */
5344 		    if (! has_VN_INFO (def))
5345 		      {
5346 			VN_INFO (def)->valnum = def;
5347 			VN_INFO (def)->visited = true;
5348 		      }
5349 		if (gsi_stmt (prev) == gsi_stmt (*gsi))
5350 		  break;
5351 		gsi_next (&prev);
5352 	      }
5353 	    while (1);
5354 	}
5355       stmt = gsi_stmt (*gsi);
5356       /* In case we folded the stmt away schedule the NOP for removal.  */
5357       if (gimple_nop_p (stmt))
5358 	to_remove.safe_push (stmt);
5359     }
5360 
5361   /* Visit indirect calls and turn them into direct calls if
5362      possible using the devirtualization machinery.  Do this before
5363      checking for required EH/abnormal/noreturn cleanup as devird
5364      may expose more of those.  */
5365   if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5366     {
5367       tree fn = gimple_call_fn (call_stmt);
5368       if (fn
5369 	  && flag_devirtualize
5370 	  && virtual_method_call_p (fn))
5371 	{
5372 	  tree otr_type = obj_type_ref_class (fn);
5373 	  unsigned HOST_WIDE_INT otr_tok
5374 	      = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5375 	  tree instance;
5376 	  ipa_polymorphic_call_context context (current_function_decl,
5377 						fn, stmt, &instance);
5378 	  context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5379 				    otr_type, stmt, NULL);
5380 	  bool final;
5381 	  vec <cgraph_node *> targets
5382 	      = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5383 						   otr_tok, context, &final);
5384 	  if (dump_file)
5385 	    dump_possible_polymorphic_call_targets (dump_file,
5386 						    obj_type_ref_class (fn),
5387 						    otr_tok, context);
5388 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
5389 	    {
5390 	      tree fn;
5391 	      if (targets.length () == 1)
5392 		fn = targets[0]->decl;
5393 	      else
5394 		fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5395 	      if (dump_enabled_p ())
5396 		{
5397 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5398 				   "converting indirect call to "
5399 				   "function %s\n",
5400 				   lang_hooks.decl_printable_name (fn, 2));
5401 		}
5402 	      gimple_call_set_fndecl (call_stmt, fn);
5403 	      /* If changing the call to __builtin_unreachable
5404 		 or similar noreturn function, adjust gimple_call_fntype
5405 		 too.  */
5406 	      if (gimple_call_noreturn_p (call_stmt)
5407 		  && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5408 		  && TYPE_ARG_TYPES (TREE_TYPE (fn))
5409 		  && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5410 		      == void_type_node))
5411 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5412 	      maybe_remove_unused_call_args (cfun, call_stmt);
5413 	      modified = true;
5414 	    }
5415 	}
5416     }
5417 
5418   if (modified)
5419     {
5420       /* When changing a call into a noreturn call, cfg cleanup
5421 	 is needed to fix up the noreturn call.  */
5422       if (!was_noreturn
5423 	  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5424 	to_fixup.safe_push  (stmt);
5425       /* When changing a condition or switch into one we know what
5426 	 edge will be executed, schedule a cfg cleanup.  */
5427       if ((gimple_code (stmt) == GIMPLE_COND
5428 	   && (gimple_cond_true_p (as_a <gcond *> (stmt))
5429 	       || gimple_cond_false_p (as_a <gcond *> (stmt))))
5430 	  || (gimple_code (stmt) == GIMPLE_SWITCH
5431 	      && TREE_CODE (gimple_switch_index
5432 			    (as_a <gswitch *> (stmt))) == INTEGER_CST))
5433 	el_todo |= TODO_cleanup_cfg;
5434       /* If we removed EH side-effects from the statement, clean
5435 	 its EH information.  */
5436       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5437 	{
5438 	  bitmap_set_bit (need_eh_cleanup,
5439 			  gimple_bb (stmt)->index);
5440 	  if (dump_file && (dump_flags & TDF_DETAILS))
5441 	    fprintf (dump_file, "  Removed EH side-effects.\n");
5442 	}
5443       /* Likewise for AB side-effects.  */
5444       if (can_make_abnormal_goto
5445 	  && !stmt_can_make_abnormal_goto (stmt))
5446 	{
5447 	  bitmap_set_bit (need_ab_cleanup,
5448 			  gimple_bb (stmt)->index);
5449 	  if (dump_file && (dump_flags & TDF_DETAILS))
5450 	    fprintf (dump_file, "  Removed AB side-effects.\n");
5451 	}
5452       update_stmt (stmt);
5453       /* In case the VDEF on the original stmt was released, value-number
5454          it to the VUSE.  This is to make vuse_ssa_val able to skip
5455 	 released virtual operands.  */
5456       if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5457 	VN_INFO (vdef)->valnum = vuse;
5458     }
5459 
5460   /* Make new values available - for fully redundant LHS we
5461      continue with the next stmt above and skip this.  */
5462   def_operand_p defp;
5463   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5464     eliminate_push_avail (b, DEF_FROM_PTR (defp));
5465 }
5466 
5467 /* Perform elimination for the basic-block B during the domwalk.  */
5468 
5469 edge
5470 eliminate_dom_walker::before_dom_children (basic_block b)
5471 {
5472   /* Mark new bb.  */
5473   avail_stack.safe_push (NULL_TREE);
5474 
5475   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
5476   if (!(b->flags & BB_EXECUTABLE))
5477     return NULL;
5478 
5479   vn_context_bb = b;
5480 
5481   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5482     {
5483       gphi *phi = gsi.phi ();
5484       tree res = PHI_RESULT (phi);
5485 
5486       if (virtual_operand_p (res))
5487 	{
5488 	  gsi_next (&gsi);
5489 	  continue;
5490 	}
5491 
5492       tree sprime = eliminate_avail (b, res);
5493       if (sprime
5494 	  && sprime != res)
5495 	{
5496 	  if (dump_file && (dump_flags & TDF_DETAILS))
5497 	    {
5498 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
5499 	      print_generic_expr (dump_file, res);
5500 	      fprintf (dump_file, " with ");
5501 	      print_generic_expr (dump_file, sprime);
5502 	      fprintf (dump_file, "\n");
5503 	    }
5504 
5505 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
5506 	  if (! inserted_exprs
5507 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5508 	    eliminations++;
5509 
5510 	  /* If we will propagate into all uses don't bother to do
5511 	     anything.  */
5512 	  if (may_propagate_copy (res, sprime))
5513 	    {
5514 	      /* Mark the PHI for removal.  */
5515 	      to_remove.safe_push (phi);
5516 	      gsi_next (&gsi);
5517 	      continue;
5518 	    }
5519 
5520 	  remove_phi_node (&gsi, false);
5521 
5522 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5523 	    sprime = fold_convert (TREE_TYPE (res), sprime);
5524 	  gimple *stmt = gimple_build_assign (res, sprime);
5525 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5526 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5527 	  continue;
5528 	}
5529 
5530       eliminate_push_avail (b, res);
5531       gsi_next (&gsi);
5532     }
5533 
5534   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5535        !gsi_end_p (gsi);
5536        gsi_next (&gsi))
5537     eliminate_stmt (b, &gsi);
5538 
5539   /* Replace destination PHI arguments.  */
5540   edge_iterator ei;
5541   edge e;
5542   FOR_EACH_EDGE (e, ei, b->succs)
5543     if (e->flags & EDGE_EXECUTABLE)
5544       for (gphi_iterator gsi = gsi_start_phis (e->dest);
5545 	   !gsi_end_p (gsi);
5546 	   gsi_next (&gsi))
5547 	{
5548 	  gphi *phi = gsi.phi ();
5549 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5550 	  tree arg = USE_FROM_PTR (use_p);
5551 	  if (TREE_CODE (arg) != SSA_NAME
5552 	      || virtual_operand_p (arg))
5553 	    continue;
5554 	  tree sprime = eliminate_avail (b, arg);
5555 	  if (sprime && may_propagate_copy (arg, sprime))
5556 	    propagate_value (use_p, sprime);
5557 	}
5558 
5559   vn_context_bb = NULL;
5560 
5561   return NULL;
5562 }
5563 
5564 /* Make no longer available leaders no longer available.  */
5565 
5566 void
5567 eliminate_dom_walker::after_dom_children (basic_block)
5568 {
5569   tree entry;
5570   while ((entry = avail_stack.pop ()) != NULL_TREE)
5571     {
5572       tree valnum = VN_INFO (entry)->valnum;
5573       tree old = avail[SSA_NAME_VERSION (valnum)];
5574       if (old == entry)
5575 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5576       else
5577 	avail[SSA_NAME_VERSION (valnum)] = entry;
5578     }
5579 }
5580 
5581 /* Remove queued stmts and perform delayed cleanups.  */
5582 
5583 unsigned
5584 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5585 {
5586   statistics_counter_event (cfun, "Eliminated", eliminations);
5587   statistics_counter_event (cfun, "Insertions", insertions);
5588 
5589   /* We cannot remove stmts during BB walk, especially not release SSA
5590      names there as this confuses the VN machinery.  The stmts ending
5591      up in to_remove are either stores or simple copies.
5592      Remove stmts in reverse order to make debug stmt creation possible.  */
5593   while (!to_remove.is_empty ())
5594     {
5595       bool do_release_defs = true;
5596       gimple *stmt = to_remove.pop ();
5597 
5598       /* When we are value-numbering a region we do not require exit PHIs to
5599 	 be present so we have to make sure to deal with uses outside of the
5600 	 region of stmts that we thought are eliminated.
5601 	 ??? Note we may be confused by uses in dead regions we didn't run
5602 	 elimination on.  Rather than checking individual uses we accept
5603 	 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5604 	 contains such example).  */
5605       if (region_p)
5606 	{
5607 	  if (gphi *phi = dyn_cast <gphi *> (stmt))
5608 	    {
5609 	      tree lhs = gimple_phi_result (phi);
5610 	      if (!has_zero_uses (lhs))
5611 		{
5612 		  if (dump_file && (dump_flags & TDF_DETAILS))
5613 		    fprintf (dump_file, "Keeping eliminated stmt live "
5614 			     "as copy because of out-of-region uses\n");
5615 		  tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5616 		  gimple *copy = gimple_build_assign (lhs, sprime);
5617 		  gimple_stmt_iterator gsi
5618 		    = gsi_after_labels (gimple_bb (stmt));
5619 		  gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5620 		  do_release_defs = false;
5621 		}
5622 	    }
5623 	  else if (tree lhs = gimple_get_lhs (stmt))
5624 	    if (TREE_CODE (lhs) == SSA_NAME
5625 		&& !has_zero_uses (lhs))
5626 	      {
5627 		if (dump_file && (dump_flags & TDF_DETAILS))
5628 		  fprintf (dump_file, "Keeping eliminated stmt live "
5629 			   "as copy because of out-of-region uses\n");
5630 		tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5631 		gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5632 		if (is_gimple_assign (stmt))
5633 		  {
5634 		    gimple_assign_set_rhs_from_tree (&gsi, sprime);
5635 		    stmt = gsi_stmt (gsi);
5636 		    update_stmt (stmt);
5637 		    if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
5638 		      bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
5639 		    continue;
5640 		  }
5641 		else
5642 		  {
5643 		    gimple *copy = gimple_build_assign (lhs, sprime);
5644 		    gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5645 		    do_release_defs = false;
5646 		  }
5647 	      }
5648 	}
5649 
5650       if (dump_file && (dump_flags & TDF_DETAILS))
5651 	{
5652 	  fprintf (dump_file, "Removing dead stmt ");
5653 	  print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5654 	}
5655 
5656       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5657       if (gimple_code (stmt) == GIMPLE_PHI)
5658 	remove_phi_node (&gsi, do_release_defs);
5659       else
5660 	{
5661 	  basic_block bb = gimple_bb (stmt);
5662 	  unlink_stmt_vdef (stmt);
5663 	  if (gsi_remove (&gsi, true))
5664 	    bitmap_set_bit (need_eh_cleanup, bb->index);
5665 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5666 	    bitmap_set_bit (need_ab_cleanup, bb->index);
5667 	  if (do_release_defs)
5668 	    release_defs (stmt);
5669 	}
5670 
5671       /* Removing a stmt may expose a forwarder block.  */
5672       el_todo |= TODO_cleanup_cfg;
5673     }
5674 
5675   /* Fixup stmts that became noreturn calls.  This may require splitting
5676      blocks and thus isn't possible during the dominator walk.  Do this
5677      in reverse order so we don't inadvertedly remove a stmt we want to
5678      fixup by visiting a dominating now noreturn call first.  */
5679   while (!to_fixup.is_empty ())
5680     {
5681       gimple *stmt = to_fixup.pop ();
5682 
5683       if (dump_file && (dump_flags & TDF_DETAILS))
5684 	{
5685 	  fprintf (dump_file, "Fixing up noreturn call ");
5686 	  print_gimple_stmt (dump_file, stmt, 0);
5687 	}
5688 
5689       if (fixup_noreturn_call (stmt))
5690 	el_todo |= TODO_cleanup_cfg;
5691     }
5692 
5693   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
5694   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
5695 
5696   if (do_eh_cleanup)
5697     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
5698 
5699   if (do_ab_cleanup)
5700     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
5701 
5702   if (do_eh_cleanup || do_ab_cleanup)
5703     el_todo |= TODO_cleanup_cfg;
5704 
5705   return el_todo;
5706 }
5707 
5708 /* Eliminate fully redundant computations.  */
5709 
5710 unsigned
5711 eliminate_with_rpo_vn (bitmap inserted_exprs)
5712 {
5713   eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
5714 
5715   walker.walk (cfun->cfg->x_entry_block_ptr);
5716   return walker.eliminate_cleanup ();
5717 }
5718 
5719 static unsigned
5720 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
5721 	   bool iterate, bool eliminate);
5722 
5723 void
5724 run_rpo_vn (vn_lookup_kind kind)
5725 {
5726   default_vn_walk_kind = kind;
5727   do_rpo_vn (cfun, NULL, NULL, true, false);
5728 
5729   /* ???  Prune requirement of these.  */
5730   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
5731   constant_value_ids = BITMAP_ALLOC (NULL);
5732 
5733   /* Initialize the value ids and prune out remaining VN_TOPs
5734      from dead code.  */
5735   tree name;
5736   unsigned i;
5737   FOR_EACH_SSA_NAME (i, name, cfun)
5738     {
5739       vn_ssa_aux_t info = VN_INFO (name);
5740       if (!info->visited
5741 	  || info->valnum == VN_TOP)
5742 	info->valnum = name;
5743       if (info->valnum == name)
5744 	info->value_id = get_next_value_id ();
5745       else if (is_gimple_min_invariant (info->valnum))
5746 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
5747     }
5748 
5749   /* Propagate.  */
5750   FOR_EACH_SSA_NAME (i, name, cfun)
5751     {
5752       vn_ssa_aux_t info = VN_INFO (name);
5753       if (TREE_CODE (info->valnum) == SSA_NAME
5754 	  && info->valnum != name
5755 	  && info->value_id != VN_INFO (info->valnum)->value_id)
5756 	info->value_id = VN_INFO (info->valnum)->value_id;
5757     }
5758 
5759   set_hashtable_value_ids ();
5760 
5761   if (dump_file && (dump_flags & TDF_DETAILS))
5762     {
5763       fprintf (dump_file, "Value numbers:\n");
5764       FOR_EACH_SSA_NAME (i, name, cfun)
5765 	{
5766 	  if (VN_INFO (name)->visited
5767 	      && SSA_VAL (name) != name)
5768 	    {
5769 	      print_generic_expr (dump_file, name);
5770 	      fprintf (dump_file, " = ");
5771 	      print_generic_expr (dump_file, SSA_VAL (name));
5772 	      fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
5773 	    }
5774 	}
5775     }
5776 }
5777 
5778 /* Free VN associated data structures.  */
5779 
5780 void
5781 free_rpo_vn (void)
5782 {
5783   free_vn_table (valid_info);
5784   XDELETE (valid_info);
5785   obstack_free (&vn_tables_obstack, NULL);
5786   obstack_free (&vn_tables_insert_obstack, NULL);
5787 
5788   vn_ssa_aux_iterator_type it;
5789   vn_ssa_aux_t info;
5790   FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
5791     if (info->needs_insertion)
5792       release_ssa_name (info->name);
5793   obstack_free (&vn_ssa_aux_obstack, NULL);
5794   delete vn_ssa_aux_hash;
5795 
5796   delete constant_to_value_id;
5797   constant_to_value_id = NULL;
5798   BITMAP_FREE (constant_value_ids);
5799 }
5800 
5801 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
5802 
5803 static tree
5804 vn_lookup_simplify_result (gimple_match_op *res_op)
5805 {
5806   if (!res_op->code.is_tree_code ())
5807     return NULL_TREE;
5808   tree *ops = res_op->ops;
5809   unsigned int length = res_op->num_ops;
5810   if (res_op->code == CONSTRUCTOR
5811       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5812          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
5813       && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
5814     {
5815       length = CONSTRUCTOR_NELTS (res_op->ops[0]);
5816       ops = XALLOCAVEC (tree, length);
5817       for (unsigned i = 0; i < length; ++i)
5818 	ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
5819     }
5820   vn_nary_op_t vnresult = NULL;
5821   tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
5822 				       res_op->type, ops, &vnresult);
5823   /* If this is used from expression simplification make sure to
5824      return an available expression.  */
5825   if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
5826     res = rpo_avail->eliminate_avail (vn_context_bb, res);
5827   return res;
5828 }
5829 
5830 rpo_elim::~rpo_elim ()
5831 {
5832   /* Release the avail vectors.  */
5833   for (rpo_avail_t::iterator i = m_rpo_avail.begin ();
5834        i != m_rpo_avail.end (); ++i)
5835     (*i).second.release ();
5836 }
5837 
5838 /* Return a leader for OPs value that is valid at BB.  */
5839 
5840 tree
5841 rpo_elim::eliminate_avail (basic_block bb, tree op)
5842 {
5843   bool visited;
5844   tree valnum = SSA_VAL (op, &visited);
5845   /* If we didn't visit OP then it must be defined outside of the
5846      region we process and also dominate it.  So it is available.  */
5847   if (!visited)
5848     return op;
5849   if (TREE_CODE (valnum) == SSA_NAME)
5850     {
5851       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5852 	return valnum;
5853       vec<std::pair<int, int> > *av = m_rpo_avail.get (valnum);
5854       if (!av || av->is_empty ())
5855 	return NULL_TREE;
5856       int i = av->length () - 1;
5857       if ((*av)[i].first == bb->index)
5858 	/* On tramp3d 90% of the cases are here.  */
5859 	return ssa_name ((*av)[i].second);
5860       do
5861 	{
5862 	  basic_block abb = BASIC_BLOCK_FOR_FN (cfun, (*av)[i].first);
5863 	  /* ???  During elimination we have to use availability at the
5864 	     definition site of a use we try to replace.  This
5865 	     is required to not run into inconsistencies because
5866 	     of dominated_by_p_w_unex behavior and removing a definition
5867 	     while not replacing all uses.
5868 	     ???  We could try to consistently walk dominators
5869 	     ignoring non-executable regions.  The nearest common
5870 	     dominator of bb and abb is where we can stop walking.  We
5871 	     may also be able to "pre-compute" (bits of) the next immediate
5872 	     (non-)dominator during the RPO walk when marking edges as
5873 	     executable.  */
5874 	  if (dominated_by_p_w_unex (bb, abb))
5875 	    {
5876 	      tree leader = ssa_name ((*av)[i].second);
5877 	      /* Prevent eliminations that break loop-closed SSA.  */
5878 	      if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
5879 		  && ! SSA_NAME_IS_DEFAULT_DEF (leader)
5880 		  && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5881 							 (leader))->loop_father,
5882 					      bb))
5883 		return NULL_TREE;
5884 	      if (dump_file && (dump_flags & TDF_DETAILS))
5885 		{
5886 		  print_generic_expr (dump_file, leader);
5887 		  fprintf (dump_file, " is available for ");
5888 		  print_generic_expr (dump_file, valnum);
5889 		  fprintf (dump_file, "\n");
5890 		}
5891 	      /* On tramp3d 99% of the _remaining_ cases succeed at
5892 	         the first enty.  */
5893 	      return leader;
5894 	    }
5895 	  /* ???  Can we somehow skip to the immediate dominator
5896 	     RPO index (bb_to_rpo)?  Again, maybe not worth, on
5897 	     tramp3d the worst number of elements in the vector is 9.  */
5898 	}
5899       while (--i >= 0);
5900     }
5901   else if (valnum != VN_TOP)
5902     /* valnum is is_gimple_min_invariant.  */
5903     return valnum;
5904   return NULL_TREE;
5905 }
5906 
5907 /* Make LEADER a leader for its value at BB.  */
5908 
5909 void
5910 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
5911 {
5912   tree valnum = VN_INFO (leader)->valnum;
5913   if (valnum == VN_TOP)
5914     return;
5915   if (dump_file && (dump_flags & TDF_DETAILS))
5916     {
5917       fprintf (dump_file, "Making available beyond BB%d ", bb->index);
5918       print_generic_expr (dump_file, leader);
5919       fprintf (dump_file, " for value ");
5920       print_generic_expr (dump_file, valnum);
5921       fprintf (dump_file, "\n");
5922     }
5923   bool existed;
5924   vec<std::pair<int, int> > &av = m_rpo_avail.get_or_insert (valnum, &existed);
5925   if (!existed)
5926     {
5927       new (&av) vec<std::pair<int, int> >;
5928       av = vNULL;
5929       av.reserve_exact (2);
5930     }
5931   av.safe_push (std::make_pair (bb->index, SSA_NAME_VERSION (leader)));
5932 }
5933 
5934 /* Valueization hook for RPO VN plus required state.  */
5935 
5936 tree
5937 rpo_vn_valueize (tree name)
5938 {
5939   if (TREE_CODE (name) == SSA_NAME)
5940     {
5941       vn_ssa_aux_t val = VN_INFO (name);
5942       if (val)
5943 	{
5944 	  tree tem = val->valnum;
5945 	  if (tem != VN_TOP && tem != name)
5946 	    {
5947 	      if (TREE_CODE (tem) != SSA_NAME)
5948 		return tem;
5949 	      /* For all values we only valueize to an available leader
5950 		 which means we can use SSA name info without restriction.  */
5951 	      tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
5952 	      if (tem)
5953 		return tem;
5954 	    }
5955 	}
5956     }
5957   return name;
5958 }
5959 
5960 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5961    inverted condition.  */
5962 
5963 static void
5964 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
5965 {
5966   switch (code)
5967     {
5968     case LT_EXPR:
5969       /* a < b -> a {!,<}= b */
5970       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5971 					   ops, boolean_true_node, 0, pred_e);
5972       vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
5973 					   ops, boolean_true_node, 0, pred_e);
5974       /* a < b -> ! a {>,=} b */
5975       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5976 					   ops, boolean_false_node, 0, pred_e);
5977       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5978 					   ops, boolean_false_node, 0, pred_e);
5979       break;
5980     case GT_EXPR:
5981       /* a > b -> a {!,>}= b */
5982       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5983 					   ops, boolean_true_node, 0, pred_e);
5984       vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
5985 					   ops, boolean_true_node, 0, pred_e);
5986       /* a > b -> ! a {<,=} b */
5987       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5988 					   ops, boolean_false_node, 0, pred_e);
5989       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5990 					   ops, boolean_false_node, 0, pred_e);
5991       break;
5992     case EQ_EXPR:
5993       /* a == b -> ! a {<,>} b */
5994       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5995 					   ops, boolean_false_node, 0, pred_e);
5996       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5997 					   ops, boolean_false_node, 0, pred_e);
5998       break;
5999     case LE_EXPR:
6000     case GE_EXPR:
6001     case NE_EXPR:
6002       /* Nothing besides inverted condition.  */
6003       break;
6004     default:;
6005     }
6006 }
6007 
6008 /* Main stmt worker for RPO VN, process BB.  */
6009 
6010 static unsigned
6011 process_bb (rpo_elim &avail, basic_block bb,
6012 	    bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6013 	    bool do_region, bitmap exit_bbs, bool skip_phis)
6014 {
6015   unsigned todo = 0;
6016   edge_iterator ei;
6017   edge e;
6018 
6019   vn_context_bb = bb;
6020 
6021   /* If we are in loop-closed SSA preserve this state.  This is
6022      relevant when called on regions from outside of FRE/PRE.  */
6023   bool lc_phi_nodes = false;
6024   if (!skip_phis
6025       && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6026     FOR_EACH_EDGE (e, ei, bb->preds)
6027       if (e->src->loop_father != e->dest->loop_father
6028 	  && flow_loop_nested_p (e->dest->loop_father,
6029 				 e->src->loop_father))
6030 	{
6031 	  lc_phi_nodes = true;
6032 	  break;
6033 	}
6034 
6035   /* When we visit a loop header substitute into loop info.  */
6036   if (!iterate && eliminate && bb->loop_father->header == bb)
6037     {
6038       /* Keep fields in sync with substitute_in_loop_info.  */
6039       if (bb->loop_father->nb_iterations)
6040 	bb->loop_father->nb_iterations
6041 	  = simplify_replace_tree (bb->loop_father->nb_iterations,
6042 				   NULL_TREE, NULL_TREE, vn_valueize);
6043     }
6044 
6045   /* Value-number all defs in the basic-block.  */
6046   if (!skip_phis)
6047     for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6048 	 gsi_next (&gsi))
6049       {
6050 	gphi *phi = gsi.phi ();
6051 	tree res = PHI_RESULT (phi);
6052 	vn_ssa_aux_t res_info = VN_INFO (res);
6053 	if (!bb_visited)
6054 	  {
6055 	    gcc_assert (!res_info->visited);
6056 	    res_info->valnum = VN_TOP;
6057 	    res_info->visited = true;
6058 	  }
6059 
6060 	/* When not iterating force backedge values to varying.  */
6061 	visit_stmt (phi, !iterate_phis);
6062 	if (virtual_operand_p (res))
6063 	  continue;
6064 
6065 	/* Eliminate */
6066 	/* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6067 	   how we handle backedges and availability.
6068 	   And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization.  */
6069 	tree val = res_info->valnum;
6070 	if (res != val && !iterate && eliminate)
6071 	  {
6072 	    if (tree leader = avail.eliminate_avail (bb, res))
6073 	      {
6074 		if (leader != res
6075 		    /* Preserve loop-closed SSA form.  */
6076 		    && (! lc_phi_nodes
6077 			|| is_gimple_min_invariant (leader)))
6078 		  {
6079 		    if (dump_file && (dump_flags & TDF_DETAILS))
6080 		      {
6081 			fprintf (dump_file, "Replaced redundant PHI node "
6082 				 "defining ");
6083 			print_generic_expr (dump_file, res);
6084 			fprintf (dump_file, " with ");
6085 			print_generic_expr (dump_file, leader);
6086 			fprintf (dump_file, "\n");
6087 		      }
6088 		    avail.eliminations++;
6089 
6090 		    if (may_propagate_copy (res, leader))
6091 		      {
6092 			/* Schedule for removal.  */
6093 			avail.to_remove.safe_push (phi);
6094 			continue;
6095 		      }
6096 		    /* ???  Else generate a copy stmt.  */
6097 		  }
6098 	      }
6099 	  }
6100 	/* Only make defs available that not already are.  But make
6101 	   sure loop-closed SSA PHI node defs are picked up for
6102 	   downstream uses.  */
6103 	if (lc_phi_nodes
6104 	    || res == val
6105 	    || ! avail.eliminate_avail (bb, res))
6106 	  avail.eliminate_push_avail (bb, res);
6107       }
6108 
6109   /* For empty BBs mark outgoing edges executable.  For non-empty BBs
6110      we do this when processing the last stmt as we have to do this
6111      before elimination which otherwise forces GIMPLE_CONDs to
6112      if (1 != 0) style when seeing non-executable edges.  */
6113   if (gsi_end_p (gsi_start_bb (bb)))
6114     {
6115       FOR_EACH_EDGE (e, ei, bb->succs)
6116 	{
6117 	  if (!(e->flags & EDGE_EXECUTABLE))
6118 	    {
6119 	      if (dump_file && (dump_flags & TDF_DETAILS))
6120 		fprintf (dump_file,
6121 			 "marking outgoing edge %d -> %d executable\n",
6122 			 e->src->index, e->dest->index);
6123 	      e->flags |= EDGE_EXECUTABLE;
6124 	      e->dest->flags |= BB_EXECUTABLE;
6125 	    }
6126 	  else if (!(e->dest->flags & BB_EXECUTABLE))
6127 	    {
6128 	      if (dump_file && (dump_flags & TDF_DETAILS))
6129 		fprintf (dump_file,
6130 			 "marking destination block %d reachable\n",
6131 			 e->dest->index);
6132 	      e->dest->flags |= BB_EXECUTABLE;
6133 	    }
6134 	}
6135     }
6136   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6137        !gsi_end_p (gsi); gsi_next (&gsi))
6138     {
6139       ssa_op_iter i;
6140       tree op;
6141       if (!bb_visited)
6142 	{
6143 	  FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
6144 	    {
6145 	      vn_ssa_aux_t op_info = VN_INFO (op);
6146 	      gcc_assert (!op_info->visited);
6147 	      op_info->valnum = VN_TOP;
6148 	      op_info->visited = true;
6149 	    }
6150 
6151 	  /* We somehow have to deal with uses that are not defined
6152 	     in the processed region.  Forcing unvisited uses to
6153 	     varying here doesn't play well with def-use following during
6154 	     expression simplification, so we deal with this by checking
6155 	     the visited flag in SSA_VAL.  */
6156 	}
6157 
6158       visit_stmt (gsi_stmt (gsi));
6159 
6160       gimple *last = gsi_stmt (gsi);
6161       e = NULL;
6162       switch (gimple_code (last))
6163 	{
6164 	case GIMPLE_SWITCH:
6165 	  e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6166 						(as_a <gswitch *> (last))));
6167 	  break;
6168 	case GIMPLE_COND:
6169 	  {
6170 	    tree lhs = vn_valueize (gimple_cond_lhs (last));
6171 	    tree rhs = vn_valueize (gimple_cond_rhs (last));
6172 	    tree val = gimple_simplify (gimple_cond_code (last),
6173 					boolean_type_node, lhs, rhs,
6174 					NULL, vn_valueize);
6175 	    /* If the condition didn't simplfy see if we have recorded
6176 	       an expression from sofar taken edges.  */
6177 	    if (! val || TREE_CODE (val) != INTEGER_CST)
6178 	      {
6179 		vn_nary_op_t vnresult;
6180 		tree ops[2];
6181 		ops[0] = lhs;
6182 		ops[1] = rhs;
6183 		val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6184 						boolean_type_node, ops,
6185 						&vnresult);
6186 		/* Did we get a predicated value?  */
6187 		if (! val && vnresult && vnresult->predicated_values)
6188 		  {
6189 		    val = vn_nary_op_get_predicated_value (vnresult, bb);
6190 		    if (val && dump_file && (dump_flags & TDF_DETAILS))
6191 		      {
6192 			fprintf (dump_file, "Got predicated value ");
6193 			print_generic_expr (dump_file, val, TDF_NONE);
6194 			fprintf (dump_file, " for ");
6195 			print_gimple_stmt (dump_file, last, TDF_SLIM);
6196 		      }
6197 		  }
6198 	      }
6199 	    if (val)
6200 	      e = find_taken_edge (bb, val);
6201 	    if (! e)
6202 	      {
6203 		/* If we didn't manage to compute the taken edge then
6204 		   push predicated expressions for the condition itself
6205 		   and related conditions to the hashtables.  This allows
6206 		   simplification of redundant conditions which is
6207 		   important as early cleanup.  */
6208 		edge true_e, false_e;
6209 		extract_true_false_edges_from_block (bb, &true_e, &false_e);
6210 		enum tree_code code = gimple_cond_code (last);
6211 		enum tree_code icode
6212 		  = invert_tree_comparison (code, HONOR_NANS (lhs));
6213 		tree ops[2];
6214 		ops[0] = lhs;
6215 		ops[1] = rhs;
6216 		if (do_region
6217 		    && bitmap_bit_p (exit_bbs, true_e->dest->index))
6218 		  true_e = NULL;
6219 		if (do_region
6220 		    && bitmap_bit_p (exit_bbs, false_e->dest->index))
6221 		  false_e = NULL;
6222 		if (true_e)
6223 		  vn_nary_op_insert_pieces_predicated
6224 		    (2, code, boolean_type_node, ops,
6225 		     boolean_true_node, 0, true_e);
6226 		if (false_e)
6227 		  vn_nary_op_insert_pieces_predicated
6228 		    (2, code, boolean_type_node, ops,
6229 		     boolean_false_node, 0, false_e);
6230 		if (icode != ERROR_MARK)
6231 		  {
6232 		    if (true_e)
6233 		      vn_nary_op_insert_pieces_predicated
6234 			(2, icode, boolean_type_node, ops,
6235 			 boolean_false_node, 0, true_e);
6236 		    if (false_e)
6237 		      vn_nary_op_insert_pieces_predicated
6238 			(2, icode, boolean_type_node, ops,
6239 			 boolean_true_node, 0, false_e);
6240 		  }
6241 		/* Relax for non-integers, inverted condition handled
6242 		   above.  */
6243 		if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6244 		  {
6245 		    if (true_e)
6246 		      insert_related_predicates_on_edge (code, ops, true_e);
6247 		    if (false_e)
6248 		      insert_related_predicates_on_edge (icode, ops, false_e);
6249 		  }
6250 	      }
6251 	    break;
6252 	  }
6253 	case GIMPLE_GOTO:
6254 	  e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6255 	  break;
6256 	default:
6257 	  e = NULL;
6258 	}
6259       if (e)
6260 	{
6261 	  todo = TODO_cleanup_cfg;
6262 	  if (!(e->flags & EDGE_EXECUTABLE))
6263 	    {
6264 	      if (dump_file && (dump_flags & TDF_DETAILS))
6265 		fprintf (dump_file,
6266 			 "marking known outgoing %sedge %d -> %d executable\n",
6267 			 e->flags & EDGE_DFS_BACK ? "back-" : "",
6268 			 e->src->index, e->dest->index);
6269 	      e->flags |= EDGE_EXECUTABLE;
6270 	      e->dest->flags |= BB_EXECUTABLE;
6271 	    }
6272 	  else if (!(e->dest->flags & BB_EXECUTABLE))
6273 	    {
6274 	      if (dump_file && (dump_flags & TDF_DETAILS))
6275 		fprintf (dump_file,
6276 			 "marking destination block %d reachable\n",
6277 			 e->dest->index);
6278 	      e->dest->flags |= BB_EXECUTABLE;
6279 	    }
6280 	}
6281       else if (gsi_one_before_end_p (gsi))
6282 	{
6283 	  FOR_EACH_EDGE (e, ei, bb->succs)
6284 	    {
6285 	      if (!(e->flags & EDGE_EXECUTABLE))
6286 		{
6287 		  if (dump_file && (dump_flags & TDF_DETAILS))
6288 		    fprintf (dump_file,
6289 			     "marking outgoing edge %d -> %d executable\n",
6290 			     e->src->index, e->dest->index);
6291 		  e->flags |= EDGE_EXECUTABLE;
6292 		  e->dest->flags |= BB_EXECUTABLE;
6293 		}
6294 	      else if (!(e->dest->flags & BB_EXECUTABLE))
6295 		{
6296 		  if (dump_file && (dump_flags & TDF_DETAILS))
6297 		    fprintf (dump_file,
6298 			     "marking destination block %d reachable\n",
6299 			     e->dest->index);
6300 		  e->dest->flags |= BB_EXECUTABLE;
6301 		}
6302 	    }
6303 	}
6304 
6305       /* Eliminate.  That also pushes to avail.  */
6306       if (eliminate && ! iterate)
6307 	avail.eliminate_stmt (bb, &gsi);
6308       else
6309 	/* If not eliminating, make all not already available defs
6310 	   available.  */
6311 	FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6312 	  if (! avail.eliminate_avail (bb, op))
6313 	    avail.eliminate_push_avail (bb, op);
6314     }
6315 
6316   /* Eliminate in destination PHI arguments.  Always substitute in dest
6317      PHIs, even for non-executable edges.  This handles region
6318      exits PHIs.  */
6319   if (!iterate && eliminate)
6320     FOR_EACH_EDGE (e, ei, bb->succs)
6321       for (gphi_iterator gsi = gsi_start_phis (e->dest);
6322 	   !gsi_end_p (gsi); gsi_next (&gsi))
6323 	{
6324 	  gphi *phi = gsi.phi ();
6325 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6326 	  tree arg = USE_FROM_PTR (use_p);
6327 	  if (TREE_CODE (arg) != SSA_NAME
6328 	      || virtual_operand_p (arg))
6329 	    continue;
6330 	  tree sprime;
6331 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
6332 	    {
6333 	      sprime = SSA_VAL (arg);
6334 	      gcc_assert (TREE_CODE (sprime) != SSA_NAME
6335 			  || SSA_NAME_IS_DEFAULT_DEF (sprime));
6336 	    }
6337 	  else
6338 	    /* Look for sth available at the definition block of the argument.
6339 	       This avoids inconsistencies between availability there which
6340 	       decides if the stmt can be removed and availability at the
6341 	       use site.  The SSA property ensures that things available
6342 	       at the definition are also available at uses.  */
6343 	    sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6344 					    arg);
6345 	  if (sprime
6346 	      && sprime != arg
6347 	      && may_propagate_copy (arg, sprime))
6348 	    propagate_value (use_p, sprime);
6349 	}
6350 
6351   vn_context_bb = NULL;
6352   return todo;
6353 }
6354 
6355 /* Unwind state per basic-block.  */
6356 
6357 struct unwind_state
6358 {
6359   /* Times this block has been visited.  */
6360   unsigned visited;
6361   /* Whether to handle this as iteration point or whether to treat
6362      incoming backedge PHI values as varying.  */
6363   bool iterate;
6364   /* Maximum RPO index this block is reachable from.  */
6365   int max_rpo;
6366   /* Unwind state.  */
6367   void *ob_top;
6368   vn_reference_t ref_top;
6369   vn_phi_t phi_top;
6370   vn_nary_op_t nary_top;
6371 };
6372 
6373 /* Unwind the RPO VN state for iteration.  */
6374 
6375 static void
6376 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6377 {
6378   gcc_assert (to->iterate);
6379   for (; last_inserted_nary != to->nary_top;
6380        last_inserted_nary = last_inserted_nary->next)
6381     {
6382       vn_nary_op_t *slot;
6383       slot = valid_info->nary->find_slot_with_hash
6384 	(last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6385       /* Predication causes the need to restore previous state.  */
6386       if ((*slot)->unwind_to)
6387 	*slot = (*slot)->unwind_to;
6388       else
6389 	valid_info->nary->clear_slot (slot);
6390     }
6391   for (; last_inserted_phi != to->phi_top;
6392        last_inserted_phi = last_inserted_phi->next)
6393     {
6394       vn_phi_t *slot;
6395       slot = valid_info->phis->find_slot_with_hash
6396 	(last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6397       valid_info->phis->clear_slot (slot);
6398     }
6399   for (; last_inserted_ref != to->ref_top;
6400        last_inserted_ref = last_inserted_ref->next)
6401     {
6402       vn_reference_t *slot;
6403       slot = valid_info->references->find_slot_with_hash
6404 	(last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6405       (*slot)->operands.release ();
6406       valid_info->references->clear_slot (slot);
6407     }
6408   obstack_free (&vn_tables_obstack, to->ob_top);
6409 
6410   /* Prune [rpo_idx, ] from avail.  */
6411   /* ???  This is O(number-of-values-in-region) which is
6412      O(region-size) rather than O(iteration-piece).  */
6413   for (rpo_elim::rpo_avail_t::iterator i
6414        = avail.m_rpo_avail.begin ();
6415        i != avail.m_rpo_avail.end (); ++i)
6416     {
6417       while (! (*i).second.is_empty ())
6418 	{
6419 	  if (bb_to_rpo[(*i).second.last ().first] < rpo_idx)
6420 	    break;
6421 	  (*i).second.pop ();
6422 	}
6423     }
6424 }
6425 
6426 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6427    If ITERATE is true then treat backedges optimistically as not
6428    executed and iterate.  If ELIMINATE is true then perform
6429    elimination, otherwise leave that to the caller.  */
6430 
6431 static unsigned
6432 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6433 	   bool iterate, bool eliminate)
6434 {
6435   unsigned todo = 0;
6436 
6437   /* We currently do not support region-based iteration when
6438      elimination is requested.  */
6439   gcc_assert (!entry || !iterate || !eliminate);
6440   /* When iterating we need loop info up-to-date.  */
6441   gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6442 
6443   bool do_region = entry != NULL;
6444   if (!do_region)
6445     {
6446       entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6447       exit_bbs = BITMAP_ALLOC (NULL);
6448       bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6449     }
6450 
6451   /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
6452      re-mark those that are contained in the region.  */
6453   edge_iterator ei;
6454   edge e;
6455   FOR_EACH_EDGE (e, ei, entry->dest->preds)
6456     e->flags &= ~EDGE_DFS_BACK;
6457 
6458   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6459   int n = rev_post_order_and_mark_dfs_back_seme
6460     (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
6461   /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order.  */
6462   for (int i = 0; i < n / 2; ++i)
6463     std::swap (rpo[i], rpo[n-i-1]);
6464 
6465   if (!do_region)
6466     BITMAP_FREE (exit_bbs);
6467 
6468   /* If there are any non-DFS_BACK edges into entry->dest skip
6469      processing PHI nodes for that block.  This supports
6470      value-numbering loop bodies w/o the actual loop.  */
6471   FOR_EACH_EDGE (e, ei, entry->dest->preds)
6472     if (e != entry
6473 	&& !(e->flags & EDGE_DFS_BACK))
6474       break;
6475   bool skip_entry_phis = e != NULL;
6476   if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
6477     fprintf (dump_file, "Region does not contain all edges into "
6478 	     "the entry block, skipping its PHIs.\n");
6479 
6480   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6481   for (int i = 0; i < n; ++i)
6482     bb_to_rpo[rpo[i]] = i;
6483 
6484   unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6485 
6486   rpo_elim avail (entry->dest);
6487   rpo_avail = &avail;
6488 
6489   /* Verify we have no extra entries into the region.  */
6490   if (flag_checking && do_region)
6491     {
6492       auto_bb_flag bb_in_region (fn);
6493       for (int i = 0; i < n; ++i)
6494 	{
6495 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6496 	  bb->flags |= bb_in_region;
6497 	}
6498       /* We can't merge the first two loops because we cannot rely
6499          on EDGE_DFS_BACK for edges not within the region.  But if
6500 	 we decide to always have the bb_in_region flag we can
6501 	 do the checking during the RPO walk itself (but then it's
6502 	 also easy to handle MEME conservatively).  */
6503       for (int i = 0; i < n; ++i)
6504 	{
6505 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6506 	  edge e;
6507 	  edge_iterator ei;
6508 	  FOR_EACH_EDGE (e, ei, bb->preds)
6509 	    gcc_assert (e == entry
6510 			|| (skip_entry_phis && bb == entry->dest)
6511 			|| (e->src->flags & bb_in_region));
6512 	}
6513       for (int i = 0; i < n; ++i)
6514 	{
6515 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6516 	  bb->flags &= ~bb_in_region;
6517 	}
6518     }
6519 
6520   /* Create the VN state.  For the initial size of the various hashtables
6521      use a heuristic based on region size and number of SSA names.  */
6522   unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6523 			  / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6524   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6525   next_value_id = 1;
6526 
6527   vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6528   gcc_obstack_init (&vn_ssa_aux_obstack);
6529 
6530   gcc_obstack_init (&vn_tables_obstack);
6531   gcc_obstack_init (&vn_tables_insert_obstack);
6532   valid_info = XCNEW (struct vn_tables_s);
6533   allocate_vn_table (valid_info, region_size);
6534   last_inserted_ref = NULL;
6535   last_inserted_phi = NULL;
6536   last_inserted_nary = NULL;
6537 
6538   vn_valueize = rpo_vn_valueize;
6539 
6540   /* Initialize the unwind state and edge/BB executable state.  */
6541   bool need_max_rpo_iterate = false;
6542   for (int i = 0; i < n; ++i)
6543     {
6544       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6545       rpo_state[i].visited = 0;
6546       rpo_state[i].max_rpo = i;
6547       bb->flags &= ~BB_EXECUTABLE;
6548       bool has_backedges = false;
6549       edge e;
6550       edge_iterator ei;
6551       FOR_EACH_EDGE (e, ei, bb->preds)
6552 	{
6553 	  if (e->flags & EDGE_DFS_BACK)
6554 	    has_backedges = true;
6555 	  e->flags &= ~EDGE_EXECUTABLE;
6556 	  if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
6557 	    continue;
6558 	  if (bb_to_rpo[e->src->index] > i)
6559 	    {
6560 	      rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6561 					  bb_to_rpo[e->src->index]);
6562 	      need_max_rpo_iterate = true;
6563 	    }
6564 	  else
6565 	    rpo_state[i].max_rpo
6566 	      = MAX (rpo_state[i].max_rpo,
6567 		     rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6568 	}
6569       rpo_state[i].iterate = iterate && has_backedges;
6570     }
6571   entry->flags |= EDGE_EXECUTABLE;
6572   entry->dest->flags |= BB_EXECUTABLE;
6573 
6574   /* When there are irreducible regions the simplistic max_rpo computation
6575      above for the case of backedges doesn't work and we need to iterate
6576      until there are no more changes.  */
6577   unsigned nit = 0;
6578   while (need_max_rpo_iterate)
6579     {
6580       nit++;
6581       need_max_rpo_iterate = false;
6582       for (int i = 0; i < n; ++i)
6583 	{
6584 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6585 	  edge e;
6586 	  edge_iterator ei;
6587 	  FOR_EACH_EDGE (e, ei, bb->preds)
6588 	    {
6589 	      if (e == entry || (skip_entry_phis && bb == entry->dest))
6590 		continue;
6591 	      int max_rpo = MAX (rpo_state[i].max_rpo,
6592 				 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6593 	      if (rpo_state[i].max_rpo != max_rpo)
6594 		{
6595 		  rpo_state[i].max_rpo = max_rpo;
6596 		  need_max_rpo_iterate = true;
6597 		}
6598 	    }
6599 	}
6600     }
6601   statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6602 
6603   /* As heuristic to improve compile-time we handle only the N innermost
6604      loops and the outermost one optimistically.  */
6605   if (iterate)
6606     {
6607       loop_p loop;
6608       unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6609       FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6610 	if (loop_depth (loop) > max_depth)
6611 	  for (unsigned i = 2;
6612 	       i < loop_depth (loop) - max_depth; ++i)
6613 	    {
6614 	      basic_block header = superloop_at_depth (loop, i)->header;
6615 	      bool non_latch_backedge = false;
6616 	      edge e;
6617 	      edge_iterator ei;
6618 	      FOR_EACH_EDGE (e, ei, header->preds)
6619 		if (e->flags & EDGE_DFS_BACK)
6620 		  {
6621 		    /* There can be a non-latch backedge into the header
6622 		       which is part of an outer irreducible region.  We
6623 		       cannot avoid iterating this block then.  */
6624 		    if (!dominated_by_p (CDI_DOMINATORS,
6625 					 e->src, e->dest))
6626 		      {
6627 			if (dump_file && (dump_flags & TDF_DETAILS))
6628 			  fprintf (dump_file, "non-latch backedge %d -> %d "
6629 				   "forces iteration of loop %d\n",
6630 				   e->src->index, e->dest->index, loop->num);
6631 			non_latch_backedge = true;
6632 		      }
6633 		    else
6634 		      e->flags |= EDGE_EXECUTABLE;
6635 		  }
6636 	      rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
6637 	    }
6638     }
6639 
6640   uint64_t nblk = 0;
6641   int idx = 0;
6642   if (iterate)
6643     /* Go and process all blocks, iterating as necessary.  */
6644     do
6645       {
6646 	basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6647 
6648 	/* If the block has incoming backedges remember unwind state.  This
6649 	   is required even for non-executable blocks since in irreducible
6650 	   regions we might reach them via the backedge and re-start iterating
6651 	   from there.
6652 	   Note we can individually mark blocks with incoming backedges to
6653 	   not iterate where we then handle PHIs conservatively.  We do that
6654 	   heuristically to reduce compile-time for degenerate cases.  */
6655 	if (rpo_state[idx].iterate)
6656 	  {
6657 	    rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
6658 	    rpo_state[idx].ref_top = last_inserted_ref;
6659 	    rpo_state[idx].phi_top = last_inserted_phi;
6660 	    rpo_state[idx].nary_top = last_inserted_nary;
6661 	  }
6662 
6663 	if (!(bb->flags & BB_EXECUTABLE))
6664 	  {
6665 	    if (dump_file && (dump_flags & TDF_DETAILS))
6666 	      fprintf (dump_file, "Block %d: BB%d found not executable\n",
6667 		       idx, bb->index);
6668 	    idx++;
6669 	    continue;
6670 	  }
6671 
6672 	if (dump_file && (dump_flags & TDF_DETAILS))
6673 	  fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6674 	nblk++;
6675 	todo |= process_bb (avail, bb,
6676 			    rpo_state[idx].visited != 0,
6677 			    rpo_state[idx].iterate,
6678 			    iterate, eliminate, do_region, exit_bbs, false);
6679 	rpo_state[idx].visited++;
6680 
6681 	/* Verify if changed values flow over executable outgoing backedges
6682 	   and those change destination PHI values (that's the thing we
6683 	   can easily verify).  Reduce over all such edges to the farthest
6684 	   away PHI.  */
6685 	int iterate_to = -1;
6686 	edge_iterator ei;
6687 	edge e;
6688 	FOR_EACH_EDGE (e, ei, bb->succs)
6689 	  if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
6690 	      == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
6691 	      && rpo_state[bb_to_rpo[e->dest->index]].iterate)
6692 	    {
6693 	      int destidx = bb_to_rpo[e->dest->index];
6694 	      if (!rpo_state[destidx].visited)
6695 		{
6696 		  if (dump_file && (dump_flags & TDF_DETAILS))
6697 		    fprintf (dump_file, "Unvisited destination %d\n",
6698 			     e->dest->index);
6699 		  if (iterate_to == -1 || destidx < iterate_to)
6700 		    iterate_to = destidx;
6701 		  continue;
6702 		}
6703 	      if (dump_file && (dump_flags & TDF_DETAILS))
6704 		fprintf (dump_file, "Looking for changed values of backedge"
6705 			 " %d->%d destination PHIs\n",
6706 			 e->src->index, e->dest->index);
6707 	      vn_context_bb = e->dest;
6708 	      gphi_iterator gsi;
6709 	      for (gsi = gsi_start_phis (e->dest);
6710 		   !gsi_end_p (gsi); gsi_next (&gsi))
6711 		{
6712 		  bool inserted = false;
6713 		  /* While we'd ideally just iterate on value changes
6714 		     we CSE PHIs and do that even across basic-block
6715 		     boundaries.  So even hashtable state changes can
6716 		     be important (which is roughly equivalent to
6717 		     PHI argument value changes).  To not excessively
6718 		     iterate because of that we track whether a PHI
6719 		     was CSEd to with GF_PLF_1.  */
6720 		  bool phival_changed;
6721 		  if ((phival_changed = visit_phi (gsi.phi (),
6722 						   &inserted, false))
6723 		      || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
6724 		    {
6725 		      if (!phival_changed
6726 			  && dump_file && (dump_flags & TDF_DETAILS))
6727 			fprintf (dump_file, "PHI was CSEd and hashtable "
6728 				 "state (changed)\n");
6729 		      if (iterate_to == -1 || destidx < iterate_to)
6730 			iterate_to = destidx;
6731 		      break;
6732 		    }
6733 		}
6734 	      vn_context_bb = NULL;
6735 	    }
6736 	if (iterate_to != -1)
6737 	  {
6738 	    do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
6739 	    idx = iterate_to;
6740 	    if (dump_file && (dump_flags & TDF_DETAILS))
6741 	      fprintf (dump_file, "Iterating to %d BB%d\n",
6742 		       iterate_to, rpo[iterate_to]);
6743 	    continue;
6744 	  }
6745 
6746 	idx++;
6747       }
6748     while (idx < n);
6749 
6750   else /* !iterate */
6751     {
6752       /* Process all blocks greedily with a worklist that enforces RPO
6753          processing of reachable blocks.  */
6754       auto_bitmap worklist;
6755       bitmap_set_bit (worklist, 0);
6756       while (!bitmap_empty_p (worklist))
6757 	{
6758 	  int idx = bitmap_first_set_bit (worklist);
6759 	  bitmap_clear_bit (worklist, idx);
6760 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6761 	  gcc_assert ((bb->flags & BB_EXECUTABLE)
6762 		      && !rpo_state[idx].visited);
6763 
6764 	  if (dump_file && (dump_flags & TDF_DETAILS))
6765 	    fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6766 
6767 	  /* When we run into predecessor edges where we cannot trust its
6768 	     executable state mark them executable so PHI processing will
6769 	     be conservative.
6770 	     ???  Do we need to force arguments flowing over that edge
6771 	     to be varying or will they even always be?  */
6772 	  edge_iterator ei;
6773 	  edge e;
6774 	  FOR_EACH_EDGE (e, ei, bb->preds)
6775 	    if (!(e->flags & EDGE_EXECUTABLE)
6776 		&& (bb == entry->dest
6777 		    || (!rpo_state[bb_to_rpo[e->src->index]].visited
6778 			&& (rpo_state[bb_to_rpo[e->src->index]].max_rpo
6779 			    >= (int)idx))))
6780 	      {
6781 		if (dump_file && (dump_flags & TDF_DETAILS))
6782 		  fprintf (dump_file, "Cannot trust state of predecessor "
6783 			   "edge %d -> %d, marking executable\n",
6784 			   e->src->index, e->dest->index);
6785 		e->flags |= EDGE_EXECUTABLE;
6786 	      }
6787 
6788 	  nblk++;
6789 	  todo |= process_bb (avail, bb, false, false, false, eliminate,
6790 			      do_region, exit_bbs,
6791 			      skip_entry_phis && bb == entry->dest);
6792 	  rpo_state[idx].visited++;
6793 
6794 	  FOR_EACH_EDGE (e, ei, bb->succs)
6795 	    if ((e->flags & EDGE_EXECUTABLE)
6796 		&& e->dest->index != EXIT_BLOCK
6797 		&& (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
6798 		&& !rpo_state[bb_to_rpo[e->dest->index]].visited)
6799 	      bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
6800 	}
6801     }
6802 
6803   /* If statistics or dump file active.  */
6804   int nex = 0;
6805   unsigned max_visited = 1;
6806   for (int i = 0; i < n; ++i)
6807     {
6808       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6809       if (bb->flags & BB_EXECUTABLE)
6810 	nex++;
6811       statistics_histogram_event (cfun, "RPO block visited times",
6812 				  rpo_state[i].visited);
6813       if (rpo_state[i].visited > max_visited)
6814 	max_visited = rpo_state[i].visited;
6815     }
6816   unsigned nvalues = 0, navail = 0;
6817   for (rpo_elim::rpo_avail_t::iterator i = avail.m_rpo_avail.begin ();
6818        i != avail.m_rpo_avail.end (); ++i)
6819     {
6820       nvalues++;
6821       navail += (*i).second.length ();
6822     }
6823   statistics_counter_event (cfun, "RPO blocks", n);
6824   statistics_counter_event (cfun, "RPO blocks visited", nblk);
6825   statistics_counter_event (cfun, "RPO blocks executable", nex);
6826   statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
6827   statistics_histogram_event (cfun, "RPO num values", nvalues);
6828   statistics_histogram_event (cfun, "RPO num avail", navail);
6829   statistics_histogram_event (cfun, "RPO num lattice",
6830 			      vn_ssa_aux_hash->elements ());
6831   if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
6832     {
6833       fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
6834 	       " blocks in total discovering %d executable blocks iterating "
6835 	       "%d.%d times, a block was visited max. %u times\n",
6836 	       n, nblk, nex,
6837 	       (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
6838 	       max_visited);
6839       fprintf (dump_file, "RPO tracked %d values available at %d locations "
6840 	       "and %" PRIu64 " lattice elements\n",
6841 	       nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
6842     }
6843 
6844   if (eliminate)
6845     {
6846       /* When !iterate we already performed elimination during the RPO
6847          walk.  */
6848       if (iterate)
6849 	{
6850 	  /* Elimination for region-based VN needs to be done within the
6851 	     RPO walk.  */
6852 	  gcc_assert (! do_region);
6853 	  /* Note we can't use avail.walk here because that gets confused
6854 	     by the existing availability and it will be less efficient
6855 	     as well.  */
6856 	  todo |= eliminate_with_rpo_vn (NULL);
6857 	}
6858       else
6859 	todo |= avail.eliminate_cleanup (do_region);
6860     }
6861 
6862   vn_valueize = NULL;
6863   rpo_avail = NULL;
6864 
6865   XDELETEVEC (bb_to_rpo);
6866   XDELETEVEC (rpo);
6867   XDELETEVEC (rpo_state);
6868 
6869   return todo;
6870 }
6871 
6872 /* Region-based entry for RPO VN.  Performs value-numbering and elimination
6873    on the SEME region specified by ENTRY and EXIT_BBS.  If ENTRY is not
6874    the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
6875    are not considered.  */
6876 
6877 unsigned
6878 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
6879 {
6880   default_vn_walk_kind = VN_WALKREWRITE;
6881   unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
6882   free_rpo_vn ();
6883   return todo;
6884 }
6885 
6886 
6887 namespace {
6888 
6889 const pass_data pass_data_fre =
6890 {
6891   GIMPLE_PASS, /* type */
6892   "fre", /* name */
6893   OPTGROUP_NONE, /* optinfo_flags */
6894   TV_TREE_FRE, /* tv_id */
6895   ( PROP_cfg | PROP_ssa ), /* properties_required */
6896   0, /* properties_provided */
6897   0, /* properties_destroyed */
6898   0, /* todo_flags_start */
6899   0, /* todo_flags_finish */
6900 };
6901 
6902 class pass_fre : public gimple_opt_pass
6903 {
6904 public:
6905   pass_fre (gcc::context *ctxt)
6906     : gimple_opt_pass (pass_data_fre, ctxt)
6907   {}
6908 
6909   /* opt_pass methods: */
6910   opt_pass * clone () { return new pass_fre (m_ctxt); }
6911   virtual bool gate (function *) { return flag_tree_fre != 0; }
6912   virtual unsigned int execute (function *);
6913 
6914 }; // class pass_fre
6915 
6916 unsigned int
6917 pass_fre::execute (function *fun)
6918 {
6919   unsigned todo = 0;
6920 
6921   /* At -O[1g] use the cheap non-iterating mode.  */
6922   calculate_dominance_info (CDI_DOMINATORS);
6923   if (optimize > 1)
6924     loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6925 
6926   default_vn_walk_kind = VN_WALKREWRITE;
6927   todo = do_rpo_vn (fun, NULL, NULL, optimize > 1, true);
6928   free_rpo_vn ();
6929 
6930   if (optimize > 1)
6931     loop_optimizer_finalize ();
6932 
6933   return todo;
6934 }
6935 
6936 } // anon namespace
6937 
6938 gimple_opt_pass *
6939 make_pass_fre (gcc::context *ctxt)
6940 {
6941   return new pass_fre (ctxt);
6942 }
6943 
6944 #undef BB_EXECUTABLE
6945