xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-sccvn.c (revision d90047b5d07facf36e6c01dcc0bded8997ce9cc2)
1 /* SCC value numbering for trees
2    Copyright (C) 2006-2017 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
63 
64 /* This algorithm is based on the SCC algorithm presented by Keith
65    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
66    (http://citeseer.ist.psu.edu/41805.html).  In
67    straight line code, it is equivalent to a regular hash based value
68    numbering that is performed in reverse postorder.
69 
70    For code with cycles, there are two alternatives, both of which
71    require keeping the hashtables separate from the actual list of
72    value numbers for SSA names.
73 
74    1. Iterate value numbering in an RPO walk of the blocks, removing
75    all the entries from the hashtable after each iteration (but
76    keeping the SSA name->value number mapping between iterations).
77    Iterate until it does not change.
78 
79    2. Perform value numbering as part of an SCC walk on the SSA graph,
80    iterating only the cycles in the SSA graph until they do not change
81    (using a separate, optimistic hashtable for value numbering the SCC
82    operands).
83 
84    The second is not just faster in practice (because most SSA graph
85    cycles do not involve all the variables in the graph), it also has
86    some nice properties.
87 
88    One of these nice properties is that when we pop an SCC off the
89    stack, we are guaranteed to have processed all the operands coming from
90    *outside of that SCC*, so we do not need to do anything special to
91    ensure they have value numbers.
92 
93    Another nice property is that the SCC walk is done as part of a DFS
94    of the SSA graph, which makes it easy to perform combining and
95    simplifying operations at the same time.
96 
97    The code below is deliberately written in a way that makes it easy
98    to separate the SCC walk from the other work it does.
99 
100    In order to propagate constants through the code, we track which
101    expressions contain constants, and use those while folding.  In
102    theory, we could also track expressions whose value numbers are
103    replaced, in case we end up folding based on expression
104    identities.
105 
106    In order to value number memory, we assign value numbers to vuses.
107    This enables us to note that, for example, stores to the same
108    address of the same value from the same starting memory states are
109    equivalent.
110    TODO:
111 
112    1. We can iterate only the changing portions of the SCC's, but
113    I have not seen an SCC big enough for this to be a win.
114    2. If you differentiate between phi nodes for loops and phi nodes
115    for if-then-else, you can properly consider phi nodes in different
116    blocks for equivalence.
117    3. We could value number vuses in more cases, particularly, whole
118    structure copies.
119 */
120 
121 
122 static tree *last_vuse_ptr;
123 static vn_lookup_kind vn_walk_kind;
124 static vn_lookup_kind default_vn_walk_kind;
125 bitmap const_parms;
126 
127 /* vn_nary_op hashtable helpers.  */
128 
129 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 {
131   typedef vn_nary_op_s *compare_type;
132   static inline hashval_t hash (const vn_nary_op_s *);
133   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
134 };
135 
136 /* Return the computed hashcode for nary operation P1.  */
137 
138 inline hashval_t
139 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 {
141   return vno1->hashcode;
142 }
143 
144 /* Compare nary operations P1 and P2 and return true if they are
145    equivalent.  */
146 
147 inline bool
148 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 {
150   return vn_nary_op_eq (vno1, vno2);
151 }
152 
153 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
154 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
155 
156 
157 /* vn_phi hashtable helpers.  */
158 
159 static int
160 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 
162 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 {
164   static inline hashval_t hash (const vn_phi_s *);
165   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
166   static inline void remove (vn_phi_s *);
167 };
168 
169 /* Return the computed hashcode for phi operation P1.  */
170 
171 inline hashval_t
172 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 {
174   return vp1->hashcode;
175 }
176 
177 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
178 
179 inline bool
180 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 {
182   return vn_phi_eq (vp1, vp2);
183 }
184 
185 /* Free a phi operation structure VP.  */
186 
187 inline void
188 vn_phi_hasher::remove (vn_phi_s *phi)
189 {
190   phi->phiargs.release ();
191 }
192 
193 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
195 
196 
197 /* Compare two reference operands P1 and P2 for equality.  Return true if
198    they are equal, and false otherwise.  */
199 
200 static int
201 vn_reference_op_eq (const void *p1, const void *p2)
202 {
203   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
204   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 
206   return (vro1->opcode == vro2->opcode
207 	  /* We do not care for differences in type qualification.  */
208 	  && (vro1->type == vro2->type
209 	      || (vro1->type && vro2->type
210 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
211 					 TYPE_MAIN_VARIANT (vro2->type))))
212 	  && expressions_equal_p (vro1->op0, vro2->op0)
213 	  && expressions_equal_p (vro1->op1, vro2->op1)
214 	  && expressions_equal_p (vro1->op2, vro2->op2));
215 }
216 
217 /* Free a reference operation structure VP.  */
218 
219 static inline void
220 free_reference (vn_reference_s *vr)
221 {
222   vr->operands.release ();
223 }
224 
225 
226 /* vn_reference hashtable helpers.  */
227 
228 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 {
230   static inline hashval_t hash (const vn_reference_s *);
231   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
232   static inline void remove (vn_reference_s *);
233 };
234 
235 /* Return the hashcode for a given reference operation P1.  */
236 
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 {
240   return vr1->hashcode;
241 }
242 
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 {
246   return vn_reference_eq (v, c);
247 }
248 
249 inline void
250 vn_reference_hasher::remove (vn_reference_s *v)
251 {
252   free_reference (v);
253 }
254 
255 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
256 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
257 
258 
259 /* The set of hashtables and alloc_pool's for their items.  */
260 
261 typedef struct vn_tables_s
262 {
263   vn_nary_op_table_type *nary;
264   vn_phi_table_type *phis;
265   vn_reference_table_type *references;
266   struct obstack nary_obstack;
267   object_allocator<vn_phi_s> *phis_pool;
268   object_allocator<vn_reference_s> *references_pool;
269 } *vn_tables_t;
270 
271 
272 /* vn_constant hashtable helpers.  */
273 
274 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 {
276   static inline hashval_t hash (const vn_constant_s *);
277   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
278 };
279 
280 /* Hash table hash function for vn_constant_t.  */
281 
282 inline hashval_t
283 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 {
285   return vc1->hashcode;
286 }
287 
288 /* Hash table equality function for vn_constant_t.  */
289 
290 inline bool
291 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 {
293   if (vc1->hashcode != vc2->hashcode)
294     return false;
295 
296   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
297 }
298 
299 static hash_table<vn_constant_hasher> *constant_to_value_id;
300 static bitmap constant_value_ids;
301 
302 
303 /* Valid hashtables storing information we have proven to be
304    correct.  */
305 
306 static vn_tables_t valid_info;
307 
308 /* Optimistic hashtables storing information we are making assumptions about
309    during iterations.  */
310 
311 static vn_tables_t optimistic_info;
312 
313 /* Pointer to the set of hashtables that is currently being used.
314    Should always point to either the optimistic_info, or the
315    valid_info.  */
316 
317 static vn_tables_t current_info;
318 
319 
320 /* Reverse post order index for each basic block.  */
321 
322 static int *rpo_numbers;
323 
324 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 
326 /* Return the SSA value of the VUSE x, supporting released VDEFs
327    during elimination which will value-number the VDEF to the
328    associated VUSE (but not substitute in the whole lattice).  */
329 
330 static inline tree
331 vuse_ssa_val (tree x)
332 {
333   if (!x)
334     return NULL_TREE;
335 
336   do
337     {
338       x = SSA_VAL (x);
339     }
340   while (SSA_NAME_IN_FREE_LIST (x));
341 
342   return x;
343 }
344 
345 /* This represents the top of the VN lattice, which is the universal
346    value.  */
347 
348 tree VN_TOP;
349 
350 /* Unique counter for our value ids.  */
351 
352 static unsigned int next_value_id;
353 
354 /* Next DFS number and the stack for strongly connected component
355    detection. */
356 
357 static unsigned int next_dfs_num;
358 static vec<tree> sccstack;
359 
360 
361 
362 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
363    are allocated on an obstack for locality reasons, and to free them
364    without looping over the vec.  */
365 
366 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
367 static struct obstack vn_ssa_aux_obstack;
368 
369 /* Return whether there is value numbering information for a given SSA name.  */
370 
371 bool
372 has_VN_INFO (tree name)
373 {
374   if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
375     return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
376   return false;
377 }
378 
379 /* Return the value numbering information for a given SSA name.  */
380 
381 vn_ssa_aux_t
382 VN_INFO (tree name)
383 {
384   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
385   gcc_checking_assert (res);
386   return res;
387 }
388 
389 /* Set the value numbering info for a given SSA name to a given
390    value.  */
391 
392 static inline void
393 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 {
395   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
396 }
397 
398 /* Initialize the value numbering info for a given SSA name.
399    This should be called just once for every SSA name.  */
400 
401 vn_ssa_aux_t
402 VN_INFO_GET (tree name)
403 {
404   vn_ssa_aux_t newinfo;
405 
406   gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
407 	      || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
408   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
409   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
410   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
411     vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
412   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
413   return newinfo;
414 }
415 
416 
417 /* Return the vn_kind the expression computed by the stmt should be
418    associated with.  */
419 
420 enum vn_kind
421 vn_get_stmt_kind (gimple *stmt)
422 {
423   switch (gimple_code (stmt))
424     {
425     case GIMPLE_CALL:
426       return VN_REFERENCE;
427     case GIMPLE_PHI:
428       return VN_PHI;
429     case GIMPLE_ASSIGN:
430       {
431 	enum tree_code code = gimple_assign_rhs_code (stmt);
432 	tree rhs1 = gimple_assign_rhs1 (stmt);
433 	switch (get_gimple_rhs_class (code))
434 	  {
435 	  case GIMPLE_UNARY_RHS:
436 	  case GIMPLE_BINARY_RHS:
437 	  case GIMPLE_TERNARY_RHS:
438 	    return VN_NARY;
439 	  case GIMPLE_SINGLE_RHS:
440 	    switch (TREE_CODE_CLASS (code))
441 	      {
442 	      case tcc_reference:
443 		/* VOP-less references can go through unary case.  */
444 		if ((code == REALPART_EXPR
445 		     || code == IMAGPART_EXPR
446 		     || code == VIEW_CONVERT_EXPR
447 		     || code == BIT_FIELD_REF)
448 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
449 		  return VN_NARY;
450 
451 		/* Fallthrough.  */
452 	      case tcc_declaration:
453 		return VN_REFERENCE;
454 
455 	      case tcc_constant:
456 		return VN_CONSTANT;
457 
458 	      default:
459 		if (code == ADDR_EXPR)
460 		  return (is_gimple_min_invariant (rhs1)
461 			  ? VN_CONSTANT : VN_REFERENCE);
462 		else if (code == CONSTRUCTOR)
463 		  return VN_NARY;
464 		return VN_NONE;
465 	      }
466 	  default:
467 	    return VN_NONE;
468 	  }
469       }
470     default:
471       return VN_NONE;
472     }
473 }
474 
475 /* Lookup a value id for CONSTANT and return it.  If it does not
476    exist returns 0.  */
477 
478 unsigned int
479 get_constant_value_id (tree constant)
480 {
481   vn_constant_s **slot;
482   struct vn_constant_s vc;
483 
484   vc.hashcode = vn_hash_constant_with_type (constant);
485   vc.constant = constant;
486   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
487   if (slot)
488     return (*slot)->value_id;
489   return 0;
490 }
491 
492 /* Lookup a value id for CONSTANT, and if it does not exist, create a
493    new one and return it.  If it does exist, return it.  */
494 
495 unsigned int
496 get_or_alloc_constant_value_id (tree constant)
497 {
498   vn_constant_s **slot;
499   struct vn_constant_s vc;
500   vn_constant_t vcp;
501 
502   vc.hashcode = vn_hash_constant_with_type (constant);
503   vc.constant = constant;
504   slot = constant_to_value_id->find_slot (&vc, INSERT);
505   if (*slot)
506     return (*slot)->value_id;
507 
508   vcp = XNEW (struct vn_constant_s);
509   vcp->hashcode = vc.hashcode;
510   vcp->constant = constant;
511   vcp->value_id = get_next_value_id ();
512   *slot = vcp;
513   bitmap_set_bit (constant_value_ids, vcp->value_id);
514   return vcp->value_id;
515 }
516 
517 /* Return true if V is a value id for a constant.  */
518 
519 bool
520 value_id_constant_p (unsigned int v)
521 {
522   return bitmap_bit_p (constant_value_ids, v);
523 }
524 
525 /* Compute the hash for a reference operand VRO1.  */
526 
527 static void
528 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 {
530   hstate.add_int (vro1->opcode);
531   if (vro1->op0)
532     inchash::add_expr (vro1->op0, hstate);
533   if (vro1->op1)
534     inchash::add_expr (vro1->op1, hstate);
535   if (vro1->op2)
536     inchash::add_expr (vro1->op2, hstate);
537 }
538 
539 /* Compute a hash for the reference operation VR1 and return it.  */
540 
541 static hashval_t
542 vn_reference_compute_hash (const vn_reference_t vr1)
543 {
544   inchash::hash hstate;
545   hashval_t result;
546   int i;
547   vn_reference_op_t vro;
548   HOST_WIDE_INT off = -1;
549   bool deref = false;
550 
551   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552     {
553       if (vro->opcode == MEM_REF)
554 	deref = true;
555       else if (vro->opcode != ADDR_EXPR)
556 	deref = false;
557       if (vro->off != -1)
558 	{
559 	  if (off == -1)
560 	    off = 0;
561 	  off += vro->off;
562 	}
563       else
564 	{
565 	  if (off != -1
566 	      && off != 0)
567 	    hstate.add_int (off);
568 	  off = -1;
569 	  if (deref
570 	      && vro->opcode == ADDR_EXPR)
571 	    {
572 	      if (vro->op0)
573 		{
574 		  tree op = TREE_OPERAND (vro->op0, 0);
575 		  hstate.add_int (TREE_CODE (op));
576 		  inchash::add_expr (op, hstate);
577 		}
578 	    }
579 	  else
580 	    vn_reference_op_compute_hash (vro, hstate);
581 	}
582     }
583   result = hstate.end ();
584   /* ??? We would ICE later if we hash instead of adding that in. */
585   if (vr1->vuse)
586     result += SSA_NAME_VERSION (vr1->vuse);
587 
588   return result;
589 }
590 
591 /* Return true if reference operations VR1 and VR2 are equivalent.  This
592    means they have the same set of operands and vuses.  */
593 
594 bool
595 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 {
597   unsigned i, j;
598 
599   /* Early out if this is not a hash collision.  */
600   if (vr1->hashcode != vr2->hashcode)
601     return false;
602 
603   /* The VOP needs to be the same.  */
604   if (vr1->vuse != vr2->vuse)
605     return false;
606 
607   /* If the operands are the same we are done.  */
608   if (vr1->operands == vr2->operands)
609     return true;
610 
611   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
612     return false;
613 
614   if (INTEGRAL_TYPE_P (vr1->type)
615       && INTEGRAL_TYPE_P (vr2->type))
616     {
617       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
618 	return false;
619     }
620   else if (INTEGRAL_TYPE_P (vr1->type)
621 	   && (TYPE_PRECISION (vr1->type)
622 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
623     return false;
624   else if (INTEGRAL_TYPE_P (vr2->type)
625 	   && (TYPE_PRECISION (vr2->type)
626 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
627     return false;
628 
629   i = 0;
630   j = 0;
631   do
632     {
633       HOST_WIDE_INT off1 = 0, off2 = 0;
634       vn_reference_op_t vro1, vro2;
635       vn_reference_op_s tem1, tem2;
636       bool deref1 = false, deref2 = false;
637       for (; vr1->operands.iterate (i, &vro1); i++)
638 	{
639 	  if (vro1->opcode == MEM_REF)
640 	    deref1 = true;
641 	  /* Do not look through a storage order barrier.  */
642 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
643 	    return false;
644 	  if (vro1->off == -1)
645 	    break;
646 	  off1 += vro1->off;
647 	}
648       for (; vr2->operands.iterate (j, &vro2); j++)
649 	{
650 	  if (vro2->opcode == MEM_REF)
651 	    deref2 = true;
652 	  /* Do not look through a storage order barrier.  */
653 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
654 	    return false;
655 	  if (vro2->off == -1)
656 	    break;
657 	  off2 += vro2->off;
658 	}
659       if (off1 != off2)
660 	return false;
661       if (deref1 && vro1->opcode == ADDR_EXPR)
662 	{
663 	  memset (&tem1, 0, sizeof (tem1));
664 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
665 	  tem1.type = TREE_TYPE (tem1.op0);
666 	  tem1.opcode = TREE_CODE (tem1.op0);
667 	  vro1 = &tem1;
668 	  deref1 = false;
669 	}
670       if (deref2 && vro2->opcode == ADDR_EXPR)
671 	{
672 	  memset (&tem2, 0, sizeof (tem2));
673 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
674 	  tem2.type = TREE_TYPE (tem2.op0);
675 	  tem2.opcode = TREE_CODE (tem2.op0);
676 	  vro2 = &tem2;
677 	  deref2 = false;
678 	}
679       if (deref1 != deref2)
680 	return false;
681       if (!vn_reference_op_eq (vro1, vro2))
682 	return false;
683       ++j;
684       ++i;
685     }
686   while (vr1->operands.length () != i
687 	 || vr2->operands.length () != j);
688 
689   return true;
690 }
691 
692 /* Copy the operations present in load/store REF into RESULT, a vector of
693    vn_reference_op_s's.  */
694 
695 static void
696 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 {
698   if (TREE_CODE (ref) == TARGET_MEM_REF)
699     {
700       vn_reference_op_s temp;
701 
702       result->reserve (3);
703 
704       memset (&temp, 0, sizeof (temp));
705       temp.type = TREE_TYPE (ref);
706       temp.opcode = TREE_CODE (ref);
707       temp.op0 = TMR_INDEX (ref);
708       temp.op1 = TMR_STEP (ref);
709       temp.op2 = TMR_OFFSET (ref);
710       temp.off = -1;
711       temp.clique = MR_DEPENDENCE_CLIQUE (ref);
712       temp.base = MR_DEPENDENCE_BASE (ref);
713       result->quick_push (temp);
714 
715       memset (&temp, 0, sizeof (temp));
716       temp.type = NULL_TREE;
717       temp.opcode = ERROR_MARK;
718       temp.op0 = TMR_INDEX2 (ref);
719       temp.off = -1;
720       result->quick_push (temp);
721 
722       memset (&temp, 0, sizeof (temp));
723       temp.type = NULL_TREE;
724       temp.opcode = TREE_CODE (TMR_BASE (ref));
725       temp.op0 = TMR_BASE (ref);
726       temp.off = -1;
727       result->quick_push (temp);
728       return;
729     }
730 
731   /* For non-calls, store the information that makes up the address.  */
732   tree orig = ref;
733   while (ref)
734     {
735       vn_reference_op_s temp;
736 
737       memset (&temp, 0, sizeof (temp));
738       temp.type = TREE_TYPE (ref);
739       temp.opcode = TREE_CODE (ref);
740       temp.off = -1;
741 
742       switch (temp.opcode)
743 	{
744 	case MODIFY_EXPR:
745 	  temp.op0 = TREE_OPERAND (ref, 1);
746 	  break;
747 	case WITH_SIZE_EXPR:
748 	  temp.op0 = TREE_OPERAND (ref, 1);
749 	  temp.off = 0;
750 	  break;
751 	case MEM_REF:
752 	  /* The base address gets its own vn_reference_op_s structure.  */
753 	  temp.op0 = TREE_OPERAND (ref, 1);
754 	    {
755 	      offset_int off = mem_ref_offset (ref);
756 	      if (wi::fits_shwi_p (off))
757 		temp.off = off.to_shwi ();
758 	    }
759 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
760 	  temp.base = MR_DEPENDENCE_BASE (ref);
761 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
762 	  break;
763 	case BIT_FIELD_REF:
764 	  /* Record bits, position and storage order.  */
765 	  temp.op0 = TREE_OPERAND (ref, 1);
766 	  temp.op1 = TREE_OPERAND (ref, 2);
767 	  if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 	    {
769 	      HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
770 	      if (off % BITS_PER_UNIT == 0)
771 		temp.off = off / BITS_PER_UNIT;
772 	    }
773 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 	  break;
775 	case COMPONENT_REF:
776 	  /* The field decl is enough to unambiguously specify the field,
777 	     a matching type is not necessary and a mismatching type
778 	     is always a spurious difference.  */
779 	  temp.type = NULL_TREE;
780 	  temp.op0 = TREE_OPERAND (ref, 1);
781 	  temp.op1 = TREE_OPERAND (ref, 2);
782 	  {
783 	    tree this_offset = component_ref_field_offset (ref);
784 	    if (this_offset
785 		&& TREE_CODE (this_offset) == INTEGER_CST)
786 	      {
787 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
788 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 		  {
790 		    offset_int off
791 		      = (wi::to_offset (this_offset)
792 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
793 		    if (wi::fits_shwi_p (off)
794 			/* Probibit value-numbering zero offset components
795 			   of addresses the same before the pass folding
796 			   __builtin_object_size had a chance to run
797 			   (checking cfun->after_inlining does the
798 			   trick here).  */
799 			&& (TREE_CODE (orig) != ADDR_EXPR
800 			    || off != 0
801 			    || cfun->after_inlining))
802 		      temp.off = off.to_shwi ();
803 		  }
804 	      }
805 	  }
806 	  break;
807 	case ARRAY_RANGE_REF:
808 	case ARRAY_REF:
809 	  {
810 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
811 	    /* Record index as operand.  */
812 	    temp.op0 = TREE_OPERAND (ref, 1);
813 	    /* Always record lower bounds and element size.  */
814 	    temp.op1 = array_ref_low_bound (ref);
815 	    /* But record element size in units of the type alignment.  */
816 	    temp.op2 = TREE_OPERAND (ref, 3);
817 	    temp.align = eltype->type_common.align;
818 	    if (! temp.op2)
819 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
820 				     size_int (TYPE_ALIGN_UNIT (eltype)));
821 	    if (TREE_CODE (temp.op0) == INTEGER_CST
822 		&& TREE_CODE (temp.op1) == INTEGER_CST
823 		&& TREE_CODE (temp.op2) == INTEGER_CST)
824 	      {
825 		offset_int off = ((wi::to_offset (temp.op0)
826 				   - wi::to_offset (temp.op1))
827 				  * wi::to_offset (temp.op2)
828 				  * vn_ref_op_align_unit (&temp));
829 		if (wi::fits_shwi_p (off))
830 		  temp.off = off.to_shwi();
831 	      }
832 	  }
833 	  break;
834 	case VAR_DECL:
835 	  if (DECL_HARD_REGISTER (ref))
836 	    {
837 	      temp.op0 = ref;
838 	      break;
839 	    }
840 	  /* Fallthru.  */
841 	case PARM_DECL:
842 	case CONST_DECL:
843 	case RESULT_DECL:
844 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
845 	     when valueizing MEM[ptr] with ptr = &decl.  */
846 	  temp.opcode = MEM_REF;
847 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
848 	  temp.off = 0;
849 	  result->safe_push (temp);
850 	  temp.opcode = ADDR_EXPR;
851 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
852 	  temp.type = TREE_TYPE (temp.op0);
853 	  temp.off = -1;
854 	  break;
855 	case STRING_CST:
856 	case INTEGER_CST:
857 	case COMPLEX_CST:
858 	case VECTOR_CST:
859 	case REAL_CST:
860 	case FIXED_CST:
861 	case CONSTRUCTOR:
862 	case SSA_NAME:
863 	  temp.op0 = ref;
864 	  break;
865 	case ADDR_EXPR:
866 	  if (is_gimple_min_invariant (ref))
867 	    {
868 	      temp.op0 = ref;
869 	      break;
870 	    }
871 	  break;
872 	  /* These are only interesting for their operands, their
873 	     existence, and their type.  They will never be the last
874 	     ref in the chain of references (IE they require an
875 	     operand), so we don't have to put anything
876 	     for op* as it will be handled by the iteration  */
877 	case REALPART_EXPR:
878 	  temp.off = 0;
879 	  break;
880 	case VIEW_CONVERT_EXPR:
881 	  temp.off = 0;
882 	  temp.reverse = storage_order_barrier_p (ref);
883 	  break;
884 	case IMAGPART_EXPR:
885 	  /* This is only interesting for its constant offset.  */
886 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
887 	  break;
888 	default:
889 	  gcc_unreachable ();
890 	}
891       result->safe_push (temp);
892 
893       if (REFERENCE_CLASS_P (ref)
894 	  || TREE_CODE (ref) == MODIFY_EXPR
895 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
896 	  || (TREE_CODE (ref) == ADDR_EXPR
897 	      && !is_gimple_min_invariant (ref)))
898 	ref = TREE_OPERAND (ref, 0);
899       else
900 	ref = NULL_TREE;
901     }
902 }
903 
904 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
905    operands in *OPS, the reference alias set SET and the reference type TYPE.
906    Return true if something useful was produced.  */
907 
908 bool
909 ao_ref_init_from_vn_reference (ao_ref *ref,
910 			       alias_set_type set, tree type,
911 			       vec<vn_reference_op_s> ops)
912 {
913   vn_reference_op_t op;
914   unsigned i;
915   tree base = NULL_TREE;
916   tree *op0_p = &base;
917   offset_int offset = 0;
918   offset_int max_size;
919   offset_int size = -1;
920   tree size_tree = NULL_TREE;
921   alias_set_type base_alias_set = -1;
922 
923   /* First get the final access size from just the outermost expression.  */
924   op = &ops[0];
925   if (op->opcode == COMPONENT_REF)
926     size_tree = DECL_SIZE (op->op0);
927   else if (op->opcode == BIT_FIELD_REF)
928     size_tree = op->op0;
929   else
930     {
931       machine_mode mode = TYPE_MODE (type);
932       if (mode == BLKmode)
933 	size_tree = TYPE_SIZE (type);
934       else
935 	size = int (GET_MODE_BITSIZE (mode));
936     }
937   if (size_tree != NULL_TREE
938       && TREE_CODE (size_tree) == INTEGER_CST)
939     size = wi::to_offset (size_tree);
940 
941   /* Initially, maxsize is the same as the accessed element size.
942      In the following it will only grow (or become -1).  */
943   max_size = size;
944 
945   /* Compute cumulative bit-offset for nested component-refs and array-refs,
946      and find the ultimate containing object.  */
947   FOR_EACH_VEC_ELT (ops, i, op)
948     {
949       switch (op->opcode)
950 	{
951 	/* These may be in the reference ops, but we cannot do anything
952 	   sensible with them here.  */
953 	case ADDR_EXPR:
954 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
955 	  if (base != NULL_TREE
956 	      && TREE_CODE (base) == MEM_REF
957 	      && op->op0
958 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
959 	    {
960 	      vn_reference_op_t pop = &ops[i-1];
961 	      base = TREE_OPERAND (op->op0, 0);
962 	      if (pop->off == -1)
963 		{
964 		  max_size = -1;
965 		  offset = 0;
966 		}
967 	      else
968 		offset += pop->off * BITS_PER_UNIT;
969 	      op0_p = NULL;
970 	      break;
971 	    }
972 	  /* Fallthru.  */
973 	case CALL_EXPR:
974 	  return false;
975 
976 	/* Record the base objects.  */
977 	case MEM_REF:
978 	  base_alias_set = get_deref_alias_set (op->op0);
979 	  *op0_p = build2 (MEM_REF, op->type,
980 			   NULL_TREE, op->op0);
981 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
982 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
983 	  op0_p = &TREE_OPERAND (*op0_p, 0);
984 	  break;
985 
986 	case VAR_DECL:
987 	case PARM_DECL:
988 	case RESULT_DECL:
989 	case SSA_NAME:
990 	  *op0_p = op->op0;
991 	  op0_p = NULL;
992 	  break;
993 
994 	/* And now the usual component-reference style ops.  */
995 	case BIT_FIELD_REF:
996 	  offset += wi::to_offset (op->op1);
997 	  break;
998 
999 	case COMPONENT_REF:
1000 	  {
1001 	    tree field = op->op0;
1002 	    /* We do not have a complete COMPONENT_REF tree here so we
1003 	       cannot use component_ref_field_offset.  Do the interesting
1004 	       parts manually.  */
1005 	    tree this_offset = DECL_FIELD_OFFSET (field);
1006 
1007 	    if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1008 	      max_size = -1;
1009 	    else
1010 	      {
1011 		offset_int woffset = (wi::to_offset (this_offset)
1012 				      << LOG2_BITS_PER_UNIT);
1013 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1014 		offset += woffset;
1015 	      }
1016 	    break;
1017 	  }
1018 
1019 	case ARRAY_RANGE_REF:
1020 	case ARRAY_REF:
1021 	  /* We recorded the lower bound and the element size.  */
1022 	  if (TREE_CODE (op->op0) != INTEGER_CST
1023 	      || TREE_CODE (op->op1) != INTEGER_CST
1024 	      || TREE_CODE (op->op2) != INTEGER_CST)
1025 	    max_size = -1;
1026 	  else
1027 	    {
1028 	      offset_int woffset
1029 		= wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1030 			    TYPE_PRECISION (TREE_TYPE (op->op0)));
1031 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1032 	      woffset <<= LOG2_BITS_PER_UNIT;
1033 	      offset += woffset;
1034 	    }
1035 	  break;
1036 
1037 	case REALPART_EXPR:
1038 	  break;
1039 
1040 	case IMAGPART_EXPR:
1041 	  offset += size;
1042 	  break;
1043 
1044 	case VIEW_CONVERT_EXPR:
1045 	  break;
1046 
1047 	case STRING_CST:
1048 	case INTEGER_CST:
1049 	case COMPLEX_CST:
1050 	case VECTOR_CST:
1051 	case REAL_CST:
1052 	case CONSTRUCTOR:
1053 	case CONST_DECL:
1054 	  return false;
1055 
1056 	default:
1057 	  return false;
1058 	}
1059     }
1060 
1061   if (base == NULL_TREE)
1062     return false;
1063 
1064   ref->ref = NULL_TREE;
1065   ref->base = base;
1066   ref->ref_alias_set = set;
1067   if (base_alias_set != -1)
1068     ref->base_alias_set = base_alias_set;
1069   else
1070     ref->base_alias_set = get_alias_set (base);
1071   /* We discount volatiles from value-numbering elsewhere.  */
1072   ref->volatile_p = false;
1073 
1074   if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1075     {
1076       ref->offset = 0;
1077       ref->size = -1;
1078       ref->max_size = -1;
1079       return true;
1080     }
1081 
1082   ref->size = size.to_shwi ();
1083 
1084   if (!wi::fits_shwi_p (offset))
1085     {
1086       ref->offset = 0;
1087       ref->max_size = -1;
1088       return true;
1089     }
1090 
1091   ref->offset = offset.to_shwi ();
1092 
1093   if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1094     ref->max_size = -1;
1095   else
1096     ref->max_size = max_size.to_shwi ();
1097 
1098   return true;
1099 }
1100 
1101 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1102    vn_reference_op_s's.  */
1103 
1104 static void
1105 copy_reference_ops_from_call (gcall *call,
1106 			      vec<vn_reference_op_s> *result)
1107 {
1108   vn_reference_op_s temp;
1109   unsigned i;
1110   tree lhs = gimple_call_lhs (call);
1111   int lr;
1112 
1113   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1114      different.  By adding the lhs here in the vector, we ensure that the
1115      hashcode is different, guaranteeing a different value number.  */
1116   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1117     {
1118       memset (&temp, 0, sizeof (temp));
1119       temp.opcode = MODIFY_EXPR;
1120       temp.type = TREE_TYPE (lhs);
1121       temp.op0 = lhs;
1122       temp.off = -1;
1123       result->safe_push (temp);
1124     }
1125 
1126   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1127   memset (&temp, 0, sizeof (temp));
1128   temp.type = gimple_call_return_type (call);
1129   temp.opcode = CALL_EXPR;
1130   temp.op0 = gimple_call_fn (call);
1131   temp.op1 = gimple_call_chain (call);
1132   if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1133     temp.op2 = size_int (lr);
1134   temp.off = -1;
1135   if (gimple_call_with_bounds_p (call))
1136     temp.with_bounds = 1;
1137   result->safe_push (temp);
1138 
1139   /* Copy the call arguments.  As they can be references as well,
1140      just chain them together.  */
1141   for (i = 0; i < gimple_call_num_args (call); ++i)
1142     {
1143       tree callarg = gimple_call_arg (call, i);
1144       copy_reference_ops_from_ref (callarg, result);
1145     }
1146 }
1147 
1148 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1149    *I_P to point to the last element of the replacement.  */
1150 static bool
1151 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1152 			    unsigned int *i_p)
1153 {
1154   unsigned int i = *i_p;
1155   vn_reference_op_t op = &(*ops)[i];
1156   vn_reference_op_t mem_op = &(*ops)[i - 1];
1157   tree addr_base;
1158   HOST_WIDE_INT addr_offset = 0;
1159 
1160   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1161      from .foo.bar to the preceding MEM_REF offset and replace the
1162      address with &OBJ.  */
1163   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1164 					     &addr_offset);
1165   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1166   if (addr_base != TREE_OPERAND (op->op0, 0))
1167     {
1168       offset_int off = offset_int::from (mem_op->op0, SIGNED);
1169       off += addr_offset;
1170       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1171       op->op0 = build_fold_addr_expr (addr_base);
1172       if (tree_fits_shwi_p (mem_op->op0))
1173 	mem_op->off = tree_to_shwi (mem_op->op0);
1174       else
1175 	mem_op->off = -1;
1176       return true;
1177     }
1178   return false;
1179 }
1180 
1181 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1182    *I_P to point to the last element of the replacement.  */
1183 static bool
1184 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1185 				     unsigned int *i_p)
1186 {
1187   unsigned int i = *i_p;
1188   vn_reference_op_t op = &(*ops)[i];
1189   vn_reference_op_t mem_op = &(*ops)[i - 1];
1190   gimple *def_stmt;
1191   enum tree_code code;
1192   offset_int off;
1193 
1194   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1195   if (!is_gimple_assign (def_stmt))
1196     return false;
1197 
1198   code = gimple_assign_rhs_code (def_stmt);
1199   if (code != ADDR_EXPR
1200       && code != POINTER_PLUS_EXPR)
1201     return false;
1202 
1203   off = offset_int::from (mem_op->op0, SIGNED);
1204 
1205   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1206      from .foo.bar to the preceding MEM_REF offset and replace the
1207      address with &OBJ.  */
1208   if (code == ADDR_EXPR)
1209     {
1210       tree addr, addr_base;
1211       HOST_WIDE_INT addr_offset;
1212 
1213       addr = gimple_assign_rhs1 (def_stmt);
1214       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1215 						 &addr_offset);
1216       /* If that didn't work because the address isn't invariant propagate
1217          the reference tree from the address operation in case the current
1218 	 dereference isn't offsetted.  */
1219       if (!addr_base
1220 	  && *i_p == ops->length () - 1
1221 	  && off == 0
1222 	  /* This makes us disable this transform for PRE where the
1223 	     reference ops might be also used for code insertion which
1224 	     is invalid.  */
1225 	  && default_vn_walk_kind == VN_WALKREWRITE)
1226 	{
1227 	  auto_vec<vn_reference_op_s, 32> tem;
1228 	  copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1229 	  /* Make sure to preserve TBAA info.  The only objects not
1230 	     wrapped in MEM_REFs that can have their address taken are
1231 	     STRING_CSTs.  */
1232 	  if (tem.length () >= 2
1233 	      && tem[tem.length () - 2].opcode == MEM_REF)
1234 	    {
1235 	      vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1236 	      new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1237 						  new_mem_op->op0);
1238 	    }
1239 	  else
1240 	    gcc_assert (tem.last ().opcode == STRING_CST);
1241 	  ops->pop ();
1242 	  ops->pop ();
1243 	  ops->safe_splice (tem);
1244 	  --*i_p;
1245 	  return true;
1246 	}
1247       if (!addr_base
1248 	  || TREE_CODE (addr_base) != MEM_REF
1249 	  || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1250 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1251 	return false;
1252 
1253       off += addr_offset;
1254       off += mem_ref_offset (addr_base);
1255       op->op0 = TREE_OPERAND (addr_base, 0);
1256     }
1257   else
1258     {
1259       tree ptr, ptroff;
1260       ptr = gimple_assign_rhs1 (def_stmt);
1261       ptroff = gimple_assign_rhs2 (def_stmt);
1262       if (TREE_CODE (ptr) != SSA_NAME
1263 	  || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1264 	  || TREE_CODE (ptroff) != INTEGER_CST)
1265 	return false;
1266 
1267       off += wi::to_offset (ptroff);
1268       op->op0 = ptr;
1269     }
1270 
1271   mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1272   if (tree_fits_shwi_p (mem_op->op0))
1273     mem_op->off = tree_to_shwi (mem_op->op0);
1274   else
1275     mem_op->off = -1;
1276   if (TREE_CODE (op->op0) == SSA_NAME)
1277     op->op0 = SSA_VAL (op->op0);
1278   if (TREE_CODE (op->op0) != SSA_NAME)
1279     op->opcode = TREE_CODE (op->op0);
1280 
1281   /* And recurse.  */
1282   if (TREE_CODE (op->op0) == SSA_NAME)
1283     vn_reference_maybe_forwprop_address (ops, i_p);
1284   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1285     vn_reference_fold_indirect (ops, i_p);
1286   return true;
1287 }
1288 
1289 /* Optimize the reference REF to a constant if possible or return
1290    NULL_TREE if not.  */
1291 
1292 tree
1293 fully_constant_vn_reference_p (vn_reference_t ref)
1294 {
1295   vec<vn_reference_op_s> operands = ref->operands;
1296   vn_reference_op_t op;
1297 
1298   /* Try to simplify the translated expression if it is
1299      a call to a builtin function with at most two arguments.  */
1300   op = &operands[0];
1301   if (op->opcode == CALL_EXPR
1302       && TREE_CODE (op->op0) == ADDR_EXPR
1303       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1304       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1305       && operands.length () >= 2
1306       && operands.length () <= 3)
1307     {
1308       vn_reference_op_t arg0, arg1 = NULL;
1309       bool anyconst = false;
1310       arg0 = &operands[1];
1311       if (operands.length () > 2)
1312 	arg1 = &operands[2];
1313       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1314 	  || (arg0->opcode == ADDR_EXPR
1315 	      && is_gimple_min_invariant (arg0->op0)))
1316 	anyconst = true;
1317       if (arg1
1318 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1319 	      || (arg1->opcode == ADDR_EXPR
1320 		  && is_gimple_min_invariant (arg1->op0))))
1321 	anyconst = true;
1322       if (anyconst)
1323 	{
1324 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1325 					 arg1 ? 2 : 1,
1326 					 arg0->op0,
1327 					 arg1 ? arg1->op0 : NULL);
1328 	  if (folded
1329 	      && TREE_CODE (folded) == NOP_EXPR)
1330 	    folded = TREE_OPERAND (folded, 0);
1331 	  if (folded
1332 	      && is_gimple_min_invariant (folded))
1333 	    return folded;
1334 	}
1335     }
1336 
1337   /* Simplify reads from constants or constant initializers.  */
1338   else if (BITS_PER_UNIT == 8
1339 	   && is_gimple_reg_type (ref->type)
1340 	   && (!INTEGRAL_TYPE_P (ref->type)
1341 	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1342     {
1343       HOST_WIDE_INT off = 0;
1344       HOST_WIDE_INT size;
1345       if (INTEGRAL_TYPE_P (ref->type))
1346 	size = TYPE_PRECISION (ref->type);
1347       else
1348 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1349       if (size % BITS_PER_UNIT != 0
1350 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1351 	return NULL_TREE;
1352       size /= BITS_PER_UNIT;
1353       unsigned i;
1354       for (i = 0; i < operands.length (); ++i)
1355 	{
1356 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1357 	    {
1358 	      ++i;
1359 	      break;
1360 	    }
1361 	  if (operands[i].off == -1)
1362 	    return NULL_TREE;
1363 	  off += operands[i].off;
1364 	  if (operands[i].opcode == MEM_REF)
1365 	    {
1366 	      ++i;
1367 	      break;
1368 	    }
1369 	}
1370       vn_reference_op_t base = &operands[--i];
1371       tree ctor = error_mark_node;
1372       tree decl = NULL_TREE;
1373       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1374 	ctor = base->op0;
1375       else if (base->opcode == MEM_REF
1376 	       && base[1].opcode == ADDR_EXPR
1377 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1378 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1379 	{
1380 	  decl = TREE_OPERAND (base[1].op0, 0);
1381 	  ctor = ctor_for_folding (decl);
1382 	}
1383       if (ctor == NULL_TREE)
1384 	return build_zero_cst (ref->type);
1385       else if (ctor != error_mark_node)
1386 	{
1387 	  if (decl)
1388 	    {
1389 	      tree res = fold_ctor_reference (ref->type, ctor,
1390 					      off * BITS_PER_UNIT,
1391 					      size * BITS_PER_UNIT, decl);
1392 	      if (res)
1393 		{
1394 		  STRIP_USELESS_TYPE_CONVERSION (res);
1395 		  if (is_gimple_min_invariant (res))
1396 		    return res;
1397 		}
1398 	    }
1399 	  else
1400 	    {
1401 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1402 	      int len = native_encode_expr (ctor, buf, size, off);
1403 	      if (len > 0)
1404 		return native_interpret_expr (ref->type, buf, len);
1405 	    }
1406 	}
1407     }
1408 
1409   return NULL_TREE;
1410 }
1411 
1412 /* Return true if OPS contain a storage order barrier.  */
1413 
1414 static bool
1415 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1416 {
1417   vn_reference_op_t op;
1418   unsigned i;
1419 
1420   FOR_EACH_VEC_ELT (ops, i, op)
1421     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1422       return true;
1423 
1424   return false;
1425 }
1426 
1427 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1428    structures into their value numbers.  This is done in-place, and
1429    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1430    whether any operands were valueized.  */
1431 
1432 static vec<vn_reference_op_s>
1433 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1434 {
1435   vn_reference_op_t vro;
1436   unsigned int i;
1437 
1438   *valueized_anything = false;
1439 
1440   FOR_EACH_VEC_ELT (orig, i, vro)
1441     {
1442       if (vro->opcode == SSA_NAME
1443 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1444 	{
1445 	  tree tem = SSA_VAL (vro->op0);
1446 	  if (tem != vro->op0)
1447 	    {
1448 	      *valueized_anything = true;
1449 	      vro->op0 = tem;
1450 	    }
1451 	  /* If it transforms from an SSA_NAME to a constant, update
1452 	     the opcode.  */
1453 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1454 	    vro->opcode = TREE_CODE (vro->op0);
1455 	}
1456       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1457 	{
1458 	  tree tem = SSA_VAL (vro->op1);
1459 	  if (tem != vro->op1)
1460 	    {
1461 	      *valueized_anything = true;
1462 	      vro->op1 = tem;
1463 	    }
1464 	}
1465       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1466 	{
1467 	  tree tem = SSA_VAL (vro->op2);
1468 	  if (tem != vro->op2)
1469 	    {
1470 	      *valueized_anything = true;
1471 	      vro->op2 = tem;
1472 	    }
1473 	}
1474       /* If it transforms from an SSA_NAME to an address, fold with
1475 	 a preceding indirect reference.  */
1476       if (i > 0
1477 	  && vro->op0
1478 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1479 	  && orig[i - 1].opcode == MEM_REF)
1480 	{
1481 	  if (vn_reference_fold_indirect (&orig, &i))
1482 	    *valueized_anything = true;
1483 	}
1484       else if (i > 0
1485 	       && vro->opcode == SSA_NAME
1486 	       && orig[i - 1].opcode == MEM_REF)
1487 	{
1488 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1489 	    *valueized_anything = true;
1490 	}
1491       /* If it transforms a non-constant ARRAY_REF into a constant
1492 	 one, adjust the constant offset.  */
1493       else if (vro->opcode == ARRAY_REF
1494 	       && vro->off == -1
1495 	       && TREE_CODE (vro->op0) == INTEGER_CST
1496 	       && TREE_CODE (vro->op1) == INTEGER_CST
1497 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1498 	{
1499 	  offset_int off = ((wi::to_offset (vro->op0)
1500 			     - wi::to_offset (vro->op1))
1501 			    * wi::to_offset (vro->op2)
1502 			    * vn_ref_op_align_unit (vro));
1503 	  if (wi::fits_shwi_p (off))
1504 	    vro->off = off.to_shwi ();
1505 	}
1506     }
1507 
1508   return orig;
1509 }
1510 
1511 static vec<vn_reference_op_s>
1512 valueize_refs (vec<vn_reference_op_s> orig)
1513 {
1514   bool tem;
1515   return valueize_refs_1 (orig, &tem);
1516 }
1517 
1518 static vec<vn_reference_op_s> shared_lookup_references;
1519 
1520 /* Create a vector of vn_reference_op_s structures from REF, a
1521    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1522    this function.  *VALUEIZED_ANYTHING will specify whether any
1523    operands were valueized.  */
1524 
1525 static vec<vn_reference_op_s>
1526 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1527 {
1528   if (!ref)
1529     return vNULL;
1530   shared_lookup_references.truncate (0);
1531   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1532   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1533 					      valueized_anything);
1534   return shared_lookup_references;
1535 }
1536 
1537 /* Create a vector of vn_reference_op_s structures from CALL, a
1538    call statement.  The vector is shared among all callers of
1539    this function.  */
1540 
1541 static vec<vn_reference_op_s>
1542 valueize_shared_reference_ops_from_call (gcall *call)
1543 {
1544   if (!call)
1545     return vNULL;
1546   shared_lookup_references.truncate (0);
1547   copy_reference_ops_from_call (call, &shared_lookup_references);
1548   shared_lookup_references = valueize_refs (shared_lookup_references);
1549   return shared_lookup_references;
1550 }
1551 
1552 /* Lookup a SCCVN reference operation VR in the current hash table.
1553    Returns the resulting value number if it exists in the hash table,
1554    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1555    vn_reference_t stored in the hashtable if something is found.  */
1556 
1557 static tree
1558 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1559 {
1560   vn_reference_s **slot;
1561   hashval_t hash;
1562 
1563   hash = vr->hashcode;
1564   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1565   if (!slot && current_info == optimistic_info)
1566     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1567   if (slot)
1568     {
1569       if (vnresult)
1570 	*vnresult = (vn_reference_t)*slot;
1571       return ((vn_reference_t)*slot)->result;
1572     }
1573 
1574   return NULL_TREE;
1575 }
1576 
1577 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1578    with the current VUSE and performs the expression lookup.  */
1579 
1580 static void *
1581 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1582 		       unsigned int cnt, void *vr_)
1583 {
1584   vn_reference_t vr = (vn_reference_t)vr_;
1585   vn_reference_s **slot;
1586   hashval_t hash;
1587 
1588   /* This bounds the stmt walks we perform on reference lookups
1589      to O(1) instead of O(N) where N is the number of dominating
1590      stores.  */
1591   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1592     return (void *)-1;
1593 
1594   if (last_vuse_ptr)
1595     *last_vuse_ptr = vuse;
1596 
1597   /* Fixup vuse and hash.  */
1598   if (vr->vuse)
1599     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1600   vr->vuse = vuse_ssa_val (vuse);
1601   if (vr->vuse)
1602     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1603 
1604   hash = vr->hashcode;
1605   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1606   if (!slot && current_info == optimistic_info)
1607     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1608   if (slot)
1609     return *slot;
1610 
1611   return NULL;
1612 }
1613 
1614 /* Lookup an existing or insert a new vn_reference entry into the
1615    value table for the VUSE, SET, TYPE, OPERANDS reference which
1616    has the value VALUE which is either a constant or an SSA name.  */
1617 
1618 static vn_reference_t
1619 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1620 					  alias_set_type set,
1621 					  tree type,
1622 					  vec<vn_reference_op_s,
1623 					        va_heap> operands,
1624 					  tree value)
1625 {
1626   vn_reference_s vr1;
1627   vn_reference_t result;
1628   unsigned value_id;
1629   vr1.vuse = vuse;
1630   vr1.operands = operands;
1631   vr1.type = type;
1632   vr1.set = set;
1633   vr1.hashcode = vn_reference_compute_hash (&vr1);
1634   if (vn_reference_lookup_1 (&vr1, &result))
1635     return result;
1636   if (TREE_CODE (value) == SSA_NAME)
1637     value_id = VN_INFO (value)->value_id;
1638   else
1639     value_id = get_or_alloc_constant_value_id (value);
1640   return vn_reference_insert_pieces (vuse, set, type,
1641 				     operands.copy (), value, value_id);
1642 }
1643 
1644 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1645 
1646 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
1647 
1648 static tree
1649 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1650 {
1651   if (!rcode.is_tree_code ())
1652     return NULL_TREE;
1653   tree *ops = ops_;
1654   unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1655   if (rcode == CONSTRUCTOR
1656       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1657 	 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
1658       && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1659     {
1660       length = CONSTRUCTOR_NELTS (ops_[0]);
1661       ops = XALLOCAVEC (tree, length);
1662       for (unsigned i = 0; i < length; ++i)
1663 	ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1664     }
1665   vn_nary_op_t vnresult = NULL;
1666   return vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1667 				   type, ops, &vnresult);
1668 }
1669 
1670 /* Return a value-number for RCODE OPS... either by looking up an existing
1671    value-number for the simplified result or by inserting the operation if
1672    INSERT is true.  */
1673 
1674 static tree
1675 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1676 			   bool insert)
1677 {
1678   tree result = NULL_TREE;
1679   /* We will be creating a value number for
1680        RCODE (OPS...).
1681      So first simplify and lookup this expression to see if it
1682      is already available.  */
1683   mprts_hook = vn_lookup_simplify_result;
1684   bool res = false;
1685   switch (TREE_CODE_LENGTH ((tree_code) rcode))
1686     {
1687     case 1:
1688       res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1689       break;
1690     case 2:
1691       res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1692       break;
1693     case 3:
1694       res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1695       break;
1696     }
1697   mprts_hook = NULL;
1698   gimple *new_stmt = NULL;
1699   if (res
1700       && gimple_simplified_result_is_gimple_val (rcode, ops))
1701     /* The expression is already available.  */
1702     result = ops[0];
1703   else
1704     {
1705       tree val = vn_lookup_simplify_result (rcode, type, ops);
1706       if (!val && insert)
1707 	{
1708 	  gimple_seq stmts = NULL;
1709 	  result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1710 	  if (result)
1711 	    {
1712 	      gcc_assert (gimple_seq_singleton_p (stmts));
1713 	      new_stmt = gimple_seq_first_stmt (stmts);
1714 	    }
1715 	}
1716       else
1717 	/* The expression is already available.  */
1718 	result = val;
1719     }
1720   if (new_stmt)
1721     {
1722       /* The expression is not yet available, value-number lhs to
1723 	 the new SSA_NAME we created.  */
1724       /* Initialize value-number information properly.  */
1725       VN_INFO_GET (result)->valnum = result;
1726       VN_INFO (result)->value_id = get_next_value_id ();
1727       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1728 					  new_stmt);
1729       VN_INFO (result)->needs_insertion = true;
1730       /* ???  PRE phi-translation inserts NARYs without corresponding
1731          SSA name result.  Re-use those but set their result according
1732 	 to the stmt we just built.  */
1733       vn_nary_op_t nary = NULL;
1734       vn_nary_op_lookup_stmt (new_stmt, &nary);
1735       if (nary)
1736 	{
1737 	  gcc_assert (nary->result == NULL_TREE);
1738 	  nary->result = gimple_assign_lhs (new_stmt);
1739 	}
1740       /* As all "inserted" statements are singleton SCCs, insert
1741 	 to the valid table.  This is strictly needed to
1742 	 avoid re-generating new value SSA_NAMEs for the same
1743 	 expression during SCC iteration over and over (the
1744 	 optimistic table gets cleared after each iteration).
1745 	 We do not need to insert into the optimistic table, as
1746 	 lookups there will fall back to the valid table.  */
1747       else if (current_info == optimistic_info)
1748 	{
1749 	  current_info = valid_info;
1750 	  vn_nary_op_insert_stmt (new_stmt, result);
1751 	  current_info = optimistic_info;
1752 	}
1753       else
1754 	vn_nary_op_insert_stmt (new_stmt, result);
1755       if (dump_file && (dump_flags & TDF_DETAILS))
1756 	{
1757 	  fprintf (dump_file, "Inserting name ");
1758 	  print_generic_expr (dump_file, result, 0);
1759 	  fprintf (dump_file, " for expression ");
1760 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1761 	  fprintf (dump_file, "\n");
1762 	}
1763     }
1764   return result;
1765 }
1766 
1767 /* Return a value-number for RCODE OPS... either by looking up an existing
1768    value-number for the simplified result or by inserting the operation.  */
1769 
1770 static tree
1771 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1772 {
1773   return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1774 }
1775 
1776 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1777    its value if present.  */
1778 
1779 tree
1780 vn_nary_simplify (vn_nary_op_t nary)
1781 {
1782   if (nary->length > 3)
1783     return NULL_TREE;
1784   tree ops[3];
1785   memcpy (ops, nary->op, sizeof (tree) * nary->length);
1786   return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1787 }
1788 
1789 
1790 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1791    from the statement defining VUSE and if not successful tries to
1792    translate *REFP and VR_ through an aggregate copy at the definition
1793    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
1794    of *REF and *VR.  If only disambiguation was performed then
1795    *DISAMBIGUATE_ONLY is set to true.  */
1796 
1797 static void *
1798 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1799 		       bool *disambiguate_only)
1800 {
1801   vn_reference_t vr = (vn_reference_t)vr_;
1802   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1803   tree base = ao_ref_base (ref);
1804   HOST_WIDE_INT offset, maxsize;
1805   static vec<vn_reference_op_s> lhs_ops;
1806   ao_ref lhs_ref;
1807   bool lhs_ref_ok = false;
1808 
1809   /* If the reference is based on a parameter that was determined as
1810      pointing to readonly memory it doesn't change.  */
1811   if (TREE_CODE (base) == MEM_REF
1812       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1813       && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1814       && bitmap_bit_p (const_parms,
1815 		       SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1816     {
1817       *disambiguate_only = true;
1818       return NULL;
1819     }
1820 
1821   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1822   if (is_gimple_assign (def_stmt))
1823     {
1824       tree lhs = gimple_assign_lhs (def_stmt);
1825       bool valueized_anything = false;
1826       /* Avoid re-allocation overhead.  */
1827       lhs_ops.truncate (0);
1828       copy_reference_ops_from_ref (lhs, &lhs_ops);
1829       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1830       if (valueized_anything)
1831 	{
1832 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1833 						      get_alias_set (lhs),
1834 						      TREE_TYPE (lhs), lhs_ops);
1835 	  if (lhs_ref_ok
1836 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1837 	    {
1838 	      *disambiguate_only = true;
1839 	      return NULL;
1840 	    }
1841 	}
1842       else
1843 	{
1844 	  ao_ref_init (&lhs_ref, lhs);
1845 	  lhs_ref_ok = true;
1846 	}
1847     }
1848   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1849 	   && gimple_call_num_args (def_stmt) <= 4)
1850     {
1851       /* For builtin calls valueize its arguments and call the
1852          alias oracle again.  Valueization may improve points-to
1853 	 info of pointers and constify size and position arguments.
1854 	 Originally this was motivated by PR61034 which has
1855 	 conditional calls to free falsely clobbering ref because
1856 	 of imprecise points-to info of the argument.  */
1857       tree oldargs[4];
1858       bool valueized_anything = false;
1859       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1860 	{
1861 	  oldargs[i] = gimple_call_arg (def_stmt, i);
1862 	  if (TREE_CODE (oldargs[i]) == SSA_NAME
1863 	      && VN_INFO (oldargs[i])->valnum != oldargs[i])
1864 	    {
1865 	      gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1866 	      valueized_anything = true;
1867 	    }
1868 	}
1869       if (valueized_anything)
1870 	{
1871 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1872 					       ref);
1873 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1874 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1875 	  if (!res)
1876 	    {
1877 	      *disambiguate_only = true;
1878 	      return NULL;
1879 	    }
1880 	}
1881     }
1882 
1883   if (*disambiguate_only)
1884     return (void *)-1;
1885 
1886   offset = ref->offset;
1887   maxsize = ref->max_size;
1888 
1889   /* If we cannot constrain the size of the reference we cannot
1890      test if anything kills it.  */
1891   if (maxsize == -1)
1892     return (void *)-1;
1893 
1894   /* We can't deduce anything useful from clobbers.  */
1895   if (gimple_clobber_p (def_stmt))
1896     return (void *)-1;
1897 
1898   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1899      from that definition.
1900      1) Memset.  */
1901   if (is_gimple_reg_type (vr->type)
1902       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1903       && integer_zerop (gimple_call_arg (def_stmt, 1))
1904       && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1905       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1906     {
1907       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1908       tree base2;
1909       HOST_WIDE_INT offset2, size2, maxsize2;
1910       bool reverse;
1911       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1912 				       &reverse);
1913       size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1914       if ((unsigned HOST_WIDE_INT)size2 / 8
1915 	  == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1916 	  && maxsize2 != -1
1917 	  && operand_equal_p (base, base2, 0)
1918 	  && offset2 <= offset
1919 	  && offset2 + size2 >= offset + maxsize)
1920 	{
1921 	  tree val = build_zero_cst (vr->type);
1922 	  return vn_reference_lookup_or_insert_for_pieces
1923 	           (vuse, vr->set, vr->type, vr->operands, val);
1924 	}
1925     }
1926 
1927   /* 2) Assignment from an empty CONSTRUCTOR.  */
1928   else if (is_gimple_reg_type (vr->type)
1929 	   && gimple_assign_single_p (def_stmt)
1930 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1931 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1932     {
1933       tree base2;
1934       HOST_WIDE_INT offset2, size2, maxsize2;
1935       bool reverse;
1936       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1937 				       &offset2, &size2, &maxsize2, &reverse);
1938       if (maxsize2 != -1
1939 	  && maxsize2 == size2
1940 	  && operand_equal_p (base, base2, 0)
1941 	  && offset2 <= offset
1942 	  && offset2 + size2 >= offset + maxsize)
1943 	{
1944 	  tree val = build_zero_cst (vr->type);
1945 	  return vn_reference_lookup_or_insert_for_pieces
1946 	           (vuse, vr->set, vr->type, vr->operands, val);
1947 	}
1948     }
1949 
1950   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1951      routines to extract the assigned bits.  */
1952   else if (ref->size == maxsize
1953 	   && is_gimple_reg_type (vr->type)
1954 	   && !contains_storage_order_barrier_p (vr->operands)
1955 	   && gimple_assign_single_p (def_stmt)
1956 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1957 	   && maxsize % BITS_PER_UNIT == 0
1958 	   && offset % BITS_PER_UNIT == 0
1959 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1960 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1961 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1962     {
1963       tree base2;
1964       HOST_WIDE_INT offset2, size2, maxsize2;
1965       bool reverse;
1966       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1967 				       &offset2, &size2, &maxsize2, &reverse);
1968       if (!reverse
1969 	  && maxsize2 != -1
1970 	  && maxsize2 == size2
1971 	  && size2 % BITS_PER_UNIT == 0
1972 	  && offset2 % BITS_PER_UNIT == 0
1973 	  && operand_equal_p (base, base2, 0)
1974 	  && offset2 <= offset
1975 	  && offset2 + size2 >= offset + maxsize)
1976 	{
1977 	  /* We support up to 512-bit values (for V8DFmode).  */
1978 	  unsigned char buffer[64];
1979 	  int len;
1980 
1981 	  tree rhs = gimple_assign_rhs1 (def_stmt);
1982 	  if (TREE_CODE (rhs) == SSA_NAME)
1983 	    rhs = SSA_VAL (rhs);
1984 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1985 				    buffer, sizeof (buffer),
1986 				    (offset - offset2) / BITS_PER_UNIT);
1987 	  if (len > 0 && len * BITS_PER_UNIT >= ref->size)
1988 	    {
1989 	      tree type = vr->type;
1990 	      /* Make sure to interpret in a type that has a range
1991 	         covering the whole access size.  */
1992 	      if (INTEGRAL_TYPE_P (vr->type)
1993 		  && ref->size != TYPE_PRECISION (vr->type))
1994 		type = build_nonstandard_integer_type (ref->size,
1995 						       TYPE_UNSIGNED (type));
1996 	      tree val = native_interpret_expr (type, buffer,
1997 						ref->size / BITS_PER_UNIT);
1998 	      /* If we chop off bits because the types precision doesn't
1999 		 match the memory access size this is ok when optimizing
2000 		 reads but not when called from the DSE code during
2001 		 elimination.  */
2002 	      if (val
2003 		  && type != vr->type)
2004 		{
2005 		  if (! int_fits_type_p (val, vr->type))
2006 		    val = NULL_TREE;
2007 		  else
2008 		    val = fold_convert (vr->type, val);
2009 		}
2010 
2011 	      if (val)
2012 		return vn_reference_lookup_or_insert_for_pieces
2013 			 (vuse, vr->set, vr->type, vr->operands, val);
2014 	    }
2015 	}
2016     }
2017 
2018   /* 4) Assignment from an SSA name which definition we may be able
2019      to access pieces from.  */
2020   else if (ref->size == maxsize
2021 	   && is_gimple_reg_type (vr->type)
2022 	   && !contains_storage_order_barrier_p (vr->operands)
2023 	   && gimple_assign_single_p (def_stmt)
2024 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2025     {
2026       tree base2;
2027       HOST_WIDE_INT offset2, size2, maxsize2;
2028       bool reverse;
2029       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2030 				       &offset2, &size2, &maxsize2,
2031 				       &reverse);
2032       if (!reverse
2033 	  && maxsize2 != -1
2034 	  && maxsize2 == size2
2035 	  && operand_equal_p (base, base2, 0)
2036 	  && offset2 <= offset
2037 	  && offset2 + size2 >= offset + maxsize
2038 	  /* ???  We can't handle bitfield precision extracts without
2039 	     either using an alternate type for the BIT_FIELD_REF and
2040 	     then doing a conversion or possibly adjusting the offset
2041 	     according to endianness.  */
2042 	  && (! INTEGRAL_TYPE_P (vr->type)
2043 	      || ref->size == TYPE_PRECISION (vr->type))
2044 	  && ref->size % BITS_PER_UNIT == 0)
2045 	{
2046 	  code_helper rcode = BIT_FIELD_REF;
2047 	  tree ops[3];
2048 	  ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2049 	  ops[1] = bitsize_int (ref->size);
2050 	  ops[2] = bitsize_int (offset - offset2);
2051 	  tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2052 	  if (val
2053 	      && (TREE_CODE (val) != SSA_NAME
2054 		  || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2055 	    {
2056 	      vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2057 		  (vuse, vr->set, vr->type, vr->operands, val);
2058 	      return res;
2059 	    }
2060 	}
2061     }
2062 
2063   /* 5) For aggregate copies translate the reference through them if
2064      the copy kills ref.  */
2065   else if (vn_walk_kind == VN_WALKREWRITE
2066 	   && gimple_assign_single_p (def_stmt)
2067 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
2068 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2069 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2070     {
2071       tree base2;
2072       HOST_WIDE_INT maxsize2;
2073       int i, j, k;
2074       auto_vec<vn_reference_op_s> rhs;
2075       vn_reference_op_t vro;
2076       ao_ref r;
2077 
2078       if (!lhs_ref_ok)
2079 	return (void *)-1;
2080 
2081       /* See if the assignment kills REF.  */
2082       base2 = ao_ref_base (&lhs_ref);
2083       maxsize2 = lhs_ref.max_size;
2084       if (maxsize2 == -1
2085 	  || (base != base2
2086 	      && (TREE_CODE (base) != MEM_REF
2087 		  || TREE_CODE (base2) != MEM_REF
2088 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2089 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2090 					  TREE_OPERAND (base2, 1))))
2091 	  || !stmt_kills_ref_p (def_stmt, ref))
2092 	return (void *)-1;
2093 
2094       /* Find the common base of ref and the lhs.  lhs_ops already
2095          contains valueized operands for the lhs.  */
2096       i = vr->operands.length () - 1;
2097       j = lhs_ops.length () - 1;
2098       while (j >= 0 && i >= 0
2099 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2100 	{
2101 	  i--;
2102 	  j--;
2103 	}
2104 
2105       /* ???  The innermost op should always be a MEM_REF and we already
2106          checked that the assignment to the lhs kills vr.  Thus for
2107 	 aggregate copies using char[] types the vn_reference_op_eq
2108 	 may fail when comparing types for compatibility.  But we really
2109 	 don't care here - further lookups with the rewritten operands
2110 	 will simply fail if we messed up types too badly.  */
2111       HOST_WIDE_INT extra_off = 0;
2112       if (j == 0 && i >= 0
2113 	  && lhs_ops[0].opcode == MEM_REF
2114 	  && lhs_ops[0].off != -1)
2115 	{
2116 	  if (lhs_ops[0].off == vr->operands[i].off)
2117 	    i--, j--;
2118 	  else if (vr->operands[i].opcode == MEM_REF
2119 		   && vr->operands[i].off != -1)
2120 	    {
2121 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
2122 	      i--, j--;
2123 	    }
2124 	}
2125 
2126       /* i now points to the first additional op.
2127 	 ???  LHS may not be completely contained in VR, one or more
2128 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
2129 	 try handling outermost VIEW_CONVERT_EXPRs.  */
2130       if (j != -1)
2131 	return (void *)-1;
2132 
2133       /* Punt if the additional ops contain a storage order barrier.  */
2134       for (k = i; k >= 0; k--)
2135 	{
2136 	  vro = &vr->operands[k];
2137 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2138 	    return (void *)-1;
2139 	}
2140 
2141       /* Now re-write REF to be based on the rhs of the assignment.  */
2142       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2143 
2144       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
2145       if (extra_off != 0)
2146 	{
2147 	  if (rhs.length () < 2
2148 	      || rhs[0].opcode != MEM_REF
2149 	      || rhs[0].off == -1)
2150 	    return (void *)-1;
2151 	  rhs[0].off += extra_off;
2152 	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2153 					build_int_cst (TREE_TYPE (rhs[0].op0),
2154 						       extra_off));
2155 	}
2156 
2157       /* We need to pre-pend vr->operands[0..i] to rhs.  */
2158       vec<vn_reference_op_s> old = vr->operands;
2159       if (i + 1 + rhs.length () > vr->operands.length ())
2160 	vr->operands.safe_grow (i + 1 + rhs.length ());
2161       else
2162 	vr->operands.truncate (i + 1 + rhs.length ());
2163       FOR_EACH_VEC_ELT (rhs, j, vro)
2164 	vr->operands[i + 1 + j] = *vro;
2165       vr->operands = valueize_refs (vr->operands);
2166       if (old == shared_lookup_references)
2167 	shared_lookup_references = vr->operands;
2168       vr->hashcode = vn_reference_compute_hash (vr);
2169 
2170       /* Try folding the new reference to a constant.  */
2171       tree val = fully_constant_vn_reference_p (vr);
2172       if (val)
2173 	return vn_reference_lookup_or_insert_for_pieces
2174 		 (vuse, vr->set, vr->type, vr->operands, val);
2175 
2176       /* Adjust *ref from the new operands.  */
2177       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2178 	return (void *)-1;
2179       /* This can happen with bitfields.  */
2180       if (ref->size != r.size)
2181 	return (void *)-1;
2182       *ref = r;
2183 
2184       /* Do not update last seen VUSE after translating.  */
2185       last_vuse_ptr = NULL;
2186 
2187       /* Keep looking for the adjusted *REF / VR pair.  */
2188       return NULL;
2189     }
2190 
2191   /* 6) For memcpy copies translate the reference through them if
2192      the copy kills ref.  */
2193   else if (vn_walk_kind == VN_WALKREWRITE
2194 	   && is_gimple_reg_type (vr->type)
2195 	   /* ???  Handle BCOPY as well.  */
2196 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2197 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2198 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2199 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2200 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2201 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2202 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2203 	   && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2204     {
2205       tree lhs, rhs;
2206       ao_ref r;
2207       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2208       vn_reference_op_s op;
2209       HOST_WIDE_INT at;
2210 
2211       /* Only handle non-variable, addressable refs.  */
2212       if (ref->size != maxsize
2213 	  || offset % BITS_PER_UNIT != 0
2214 	  || ref->size % BITS_PER_UNIT != 0)
2215 	return (void *)-1;
2216 
2217       /* Extract a pointer base and an offset for the destination.  */
2218       lhs = gimple_call_arg (def_stmt, 0);
2219       lhs_offset = 0;
2220       if (TREE_CODE (lhs) == SSA_NAME)
2221 	{
2222 	  lhs = SSA_VAL (lhs);
2223 	  if (TREE_CODE (lhs) == SSA_NAME)
2224 	    {
2225 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2226 	      if (gimple_assign_single_p (def_stmt)
2227 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2228 		lhs = gimple_assign_rhs1 (def_stmt);
2229 	    }
2230 	}
2231       if (TREE_CODE (lhs) == ADDR_EXPR)
2232 	{
2233 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2234 						    &lhs_offset);
2235 	  if (!tem)
2236 	    return (void *)-1;
2237 	  if (TREE_CODE (tem) == MEM_REF
2238 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2239 	    {
2240 	      lhs = TREE_OPERAND (tem, 0);
2241 	      if (TREE_CODE (lhs) == SSA_NAME)
2242 		lhs = SSA_VAL (lhs);
2243 	      lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2244 	    }
2245 	  else if (DECL_P (tem))
2246 	    lhs = build_fold_addr_expr (tem);
2247 	  else
2248 	    return (void *)-1;
2249 	}
2250       if (TREE_CODE (lhs) != SSA_NAME
2251 	  && TREE_CODE (lhs) != ADDR_EXPR)
2252 	return (void *)-1;
2253 
2254       /* Extract a pointer base and an offset for the source.  */
2255       rhs = gimple_call_arg (def_stmt, 1);
2256       rhs_offset = 0;
2257       if (TREE_CODE (rhs) == SSA_NAME)
2258 	rhs = SSA_VAL (rhs);
2259       if (TREE_CODE (rhs) == ADDR_EXPR)
2260 	{
2261 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2262 						    &rhs_offset);
2263 	  if (!tem)
2264 	    return (void *)-1;
2265 	  if (TREE_CODE (tem) == MEM_REF
2266 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2267 	    {
2268 	      rhs = TREE_OPERAND (tem, 0);
2269 	      rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2270 	    }
2271 	  else if (DECL_P (tem))
2272 	    rhs = build_fold_addr_expr (tem);
2273 	  else
2274 	    return (void *)-1;
2275 	}
2276       if (TREE_CODE (rhs) != SSA_NAME
2277 	  && TREE_CODE (rhs) != ADDR_EXPR)
2278 	return (void *)-1;
2279 
2280       copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2281 
2282       /* The bases of the destination and the references have to agree.  */
2283       if ((TREE_CODE (base) != MEM_REF
2284 	   && !DECL_P (base))
2285 	  || (TREE_CODE (base) == MEM_REF
2286 	      && (TREE_OPERAND (base, 0) != lhs
2287 		  || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2288 	  || (DECL_P (base)
2289 	      && (TREE_CODE (lhs) != ADDR_EXPR
2290 		  || TREE_OPERAND (lhs, 0) != base)))
2291 	return (void *)-1;
2292 
2293       at = offset / BITS_PER_UNIT;
2294       if (TREE_CODE (base) == MEM_REF)
2295 	at += tree_to_uhwi (TREE_OPERAND (base, 1));
2296       /* If the access is completely outside of the memcpy destination
2297 	 area there is no aliasing.  */
2298       if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2299 	  || lhs_offset + copy_size <= at)
2300 	return NULL;
2301       /* And the access has to be contained within the memcpy destination.  */
2302       if (lhs_offset > at
2303 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2304 	return (void *)-1;
2305 
2306       /* Make room for 2 operands in the new reference.  */
2307       if (vr->operands.length () < 2)
2308 	{
2309 	  vec<vn_reference_op_s> old = vr->operands;
2310 	  vr->operands.safe_grow_cleared (2);
2311 	  if (old == shared_lookup_references)
2312 	    shared_lookup_references = vr->operands;
2313 	}
2314       else
2315 	vr->operands.truncate (2);
2316 
2317       /* The looked-through reference is a simple MEM_REF.  */
2318       memset (&op, 0, sizeof (op));
2319       op.type = vr->type;
2320       op.opcode = MEM_REF;
2321       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2322       op.off = at - lhs_offset + rhs_offset;
2323       vr->operands[0] = op;
2324       op.type = TREE_TYPE (rhs);
2325       op.opcode = TREE_CODE (rhs);
2326       op.op0 = rhs;
2327       op.off = -1;
2328       vr->operands[1] = op;
2329       vr->hashcode = vn_reference_compute_hash (vr);
2330 
2331       /* Try folding the new reference to a constant.  */
2332       tree val = fully_constant_vn_reference_p (vr);
2333       if (val)
2334 	return vn_reference_lookup_or_insert_for_pieces
2335 		 (vuse, vr->set, vr->type, vr->operands, val);
2336 
2337       /* Adjust *ref from the new operands.  */
2338       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2339 	return (void *)-1;
2340       /* This can happen with bitfields.  */
2341       if (ref->size != r.size)
2342 	return (void *)-1;
2343       *ref = r;
2344 
2345       /* Do not update last seen VUSE after translating.  */
2346       last_vuse_ptr = NULL;
2347 
2348       /* Keep looking for the adjusted *REF / VR pair.  */
2349       return NULL;
2350     }
2351 
2352   /* Bail out and stop walking.  */
2353   return (void *)-1;
2354 }
2355 
2356 /* Return a reference op vector from OP that can be used for
2357    vn_reference_lookup_pieces.  The caller is responsible for releasing
2358    the vector.  */
2359 
2360 vec<vn_reference_op_s>
2361 vn_reference_operands_for_lookup (tree op)
2362 {
2363   bool valueized;
2364   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2365 }
2366 
2367 /* Lookup a reference operation by it's parts, in the current hash table.
2368    Returns the resulting value number if it exists in the hash table,
2369    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2370    vn_reference_t stored in the hashtable if something is found.  */
2371 
2372 tree
2373 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2374 			    vec<vn_reference_op_s> operands,
2375 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2376 {
2377   struct vn_reference_s vr1;
2378   vn_reference_t tmp;
2379   tree cst;
2380 
2381   if (!vnresult)
2382     vnresult = &tmp;
2383   *vnresult = NULL;
2384 
2385   vr1.vuse = vuse_ssa_val (vuse);
2386   shared_lookup_references.truncate (0);
2387   shared_lookup_references.safe_grow (operands.length ());
2388   memcpy (shared_lookup_references.address (),
2389 	  operands.address (),
2390 	  sizeof (vn_reference_op_s)
2391 	  * operands.length ());
2392   vr1.operands = operands = shared_lookup_references
2393     = valueize_refs (shared_lookup_references);
2394   vr1.type = type;
2395   vr1.set = set;
2396   vr1.hashcode = vn_reference_compute_hash (&vr1);
2397   if ((cst = fully_constant_vn_reference_p (&vr1)))
2398     return cst;
2399 
2400   vn_reference_lookup_1 (&vr1, vnresult);
2401   if (!*vnresult
2402       && kind != VN_NOWALK
2403       && vr1.vuse)
2404     {
2405       ao_ref r;
2406       vn_walk_kind = kind;
2407       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2408 	*vnresult =
2409 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2410 						  vn_reference_lookup_2,
2411 						  vn_reference_lookup_3,
2412 						  vuse_ssa_val, &vr1);
2413       gcc_checking_assert (vr1.operands == shared_lookup_references);
2414     }
2415 
2416   if (*vnresult)
2417      return (*vnresult)->result;
2418 
2419   return NULL_TREE;
2420 }
2421 
2422 /* Lookup OP in the current hash table, and return the resulting value
2423    number if it exists in the hash table.  Return NULL_TREE if it does
2424    not exist in the hash table or if the result field of the structure
2425    was NULL..  VNRESULT will be filled in with the vn_reference_t
2426    stored in the hashtable if one exists.  When TBAA_P is false assume
2427    we are looking up a store and treat it as having alias-set zero.  */
2428 
2429 tree
2430 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2431 		     vn_reference_t *vnresult, bool tbaa_p)
2432 {
2433   vec<vn_reference_op_s> operands;
2434   struct vn_reference_s vr1;
2435   tree cst;
2436   bool valuezied_anything;
2437 
2438   if (vnresult)
2439     *vnresult = NULL;
2440 
2441   vr1.vuse = vuse_ssa_val (vuse);
2442   vr1.operands = operands
2443     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2444   vr1.type = TREE_TYPE (op);
2445   vr1.set = tbaa_p ? get_alias_set (op) : 0;
2446   vr1.hashcode = vn_reference_compute_hash (&vr1);
2447   if ((cst = fully_constant_vn_reference_p (&vr1)))
2448     return cst;
2449 
2450   if (kind != VN_NOWALK
2451       && vr1.vuse)
2452     {
2453       vn_reference_t wvnresult;
2454       ao_ref r;
2455       /* Make sure to use a valueized reference if we valueized anything.
2456          Otherwise preserve the full reference for advanced TBAA.  */
2457       if (!valuezied_anything
2458 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2459 					     vr1.operands))
2460 	ao_ref_init (&r, op);
2461       if (! tbaa_p)
2462 	r.ref_alias_set = r.base_alias_set = 0;
2463       vn_walk_kind = kind;
2464       wvnresult =
2465 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2466 						vn_reference_lookup_2,
2467 						vn_reference_lookup_3,
2468 						vuse_ssa_val, &vr1);
2469       gcc_checking_assert (vr1.operands == shared_lookup_references);
2470       if (wvnresult)
2471 	{
2472 	  if (vnresult)
2473 	    *vnresult = wvnresult;
2474 	  return wvnresult->result;
2475 	}
2476 
2477       return NULL_TREE;
2478     }
2479 
2480   return vn_reference_lookup_1 (&vr1, vnresult);
2481 }
2482 
2483 /* Lookup CALL in the current hash table and return the entry in
2484    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2485 
2486 void
2487 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2488 			  vn_reference_t vr)
2489 {
2490   if (vnresult)
2491     *vnresult = NULL;
2492 
2493   tree vuse = gimple_vuse (call);
2494 
2495   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2496   vr->operands = valueize_shared_reference_ops_from_call (call);
2497   vr->type = gimple_expr_type (call);
2498   vr->set = 0;
2499   vr->hashcode = vn_reference_compute_hash (vr);
2500   vn_reference_lookup_1 (vr, vnresult);
2501 }
2502 
2503 /* Insert OP into the current hash table with a value number of
2504    RESULT, and return the resulting reference structure we created.  */
2505 
2506 static vn_reference_t
2507 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2508 {
2509   vn_reference_s **slot;
2510   vn_reference_t vr1;
2511   bool tem;
2512 
2513   vr1 = current_info->references_pool->allocate ();
2514   if (TREE_CODE (result) == SSA_NAME)
2515     vr1->value_id = VN_INFO (result)->value_id;
2516   else
2517     vr1->value_id = get_or_alloc_constant_value_id (result);
2518   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2519   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2520   vr1->type = TREE_TYPE (op);
2521   vr1->set = get_alias_set (op);
2522   vr1->hashcode = vn_reference_compute_hash (vr1);
2523   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2524   vr1->result_vdef = vdef;
2525 
2526   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2527 							INSERT);
2528 
2529   /* Because we lookup stores using vuses, and value number failures
2530      using the vdefs (see visit_reference_op_store for how and why),
2531      it's possible that on failure we may try to insert an already
2532      inserted store.  This is not wrong, there is no ssa name for a
2533      store that we could use as a differentiator anyway.  Thus, unlike
2534      the other lookup functions, you cannot gcc_assert (!*slot)
2535      here.  */
2536 
2537   /* But free the old slot in case of a collision.  */
2538   if (*slot)
2539     free_reference (*slot);
2540 
2541   *slot = vr1;
2542   return vr1;
2543 }
2544 
2545 /* Insert a reference by it's pieces into the current hash table with
2546    a value number of RESULT.  Return the resulting reference
2547    structure we created.  */
2548 
2549 vn_reference_t
2550 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2551 			    vec<vn_reference_op_s> operands,
2552 			    tree result, unsigned int value_id)
2553 
2554 {
2555   vn_reference_s **slot;
2556   vn_reference_t vr1;
2557 
2558   vr1 = current_info->references_pool->allocate ();
2559   vr1->value_id = value_id;
2560   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2561   vr1->operands = valueize_refs (operands);
2562   vr1->type = type;
2563   vr1->set = set;
2564   vr1->hashcode = vn_reference_compute_hash (vr1);
2565   if (result && TREE_CODE (result) == SSA_NAME)
2566     result = SSA_VAL (result);
2567   vr1->result = result;
2568 
2569   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2570 							INSERT);
2571 
2572   /* At this point we should have all the things inserted that we have
2573      seen before, and we should never try inserting something that
2574      already exists.  */
2575   gcc_assert (!*slot);
2576   if (*slot)
2577     free_reference (*slot);
2578 
2579   *slot = vr1;
2580   return vr1;
2581 }
2582 
2583 /* Compute and return the hash value for nary operation VBO1.  */
2584 
2585 static hashval_t
2586 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2587 {
2588   inchash::hash hstate;
2589   unsigned i;
2590 
2591   for (i = 0; i < vno1->length; ++i)
2592     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2593       vno1->op[i] = SSA_VAL (vno1->op[i]);
2594 
2595   if (((vno1->length == 2
2596 	&& commutative_tree_code (vno1->opcode))
2597        || (vno1->length == 3
2598 	   && commutative_ternary_tree_code (vno1->opcode)))
2599       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2600     std::swap (vno1->op[0], vno1->op[1]);
2601   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2602 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2603     {
2604       std::swap (vno1->op[0], vno1->op[1]);
2605       vno1->opcode = swap_tree_comparison  (vno1->opcode);
2606     }
2607 
2608   hstate.add_int (vno1->opcode);
2609   for (i = 0; i < vno1->length; ++i)
2610     inchash::add_expr (vno1->op[i], hstate);
2611 
2612   return hstate.end ();
2613 }
2614 
2615 /* Compare nary operations VNO1 and VNO2 and return true if they are
2616    equivalent.  */
2617 
2618 bool
2619 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2620 {
2621   unsigned i;
2622 
2623   if (vno1->hashcode != vno2->hashcode)
2624     return false;
2625 
2626   if (vno1->length != vno2->length)
2627     return false;
2628 
2629   if (vno1->opcode != vno2->opcode
2630       || !types_compatible_p (vno1->type, vno2->type))
2631     return false;
2632 
2633   for (i = 0; i < vno1->length; ++i)
2634     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2635       return false;
2636 
2637   return true;
2638 }
2639 
2640 /* Initialize VNO from the pieces provided.  */
2641 
2642 static void
2643 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2644 			     enum tree_code code, tree type, tree *ops)
2645 {
2646   vno->opcode = code;
2647   vno->length = length;
2648   vno->type = type;
2649   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2650 }
2651 
2652 /* Initialize VNO from OP.  */
2653 
2654 static void
2655 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2656 {
2657   unsigned i;
2658 
2659   vno->opcode = TREE_CODE (op);
2660   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2661   vno->type = TREE_TYPE (op);
2662   for (i = 0; i < vno->length; ++i)
2663     vno->op[i] = TREE_OPERAND (op, i);
2664 }
2665 
2666 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2667 
2668 static unsigned int
2669 vn_nary_length_from_stmt (gimple *stmt)
2670 {
2671   switch (gimple_assign_rhs_code (stmt))
2672     {
2673     case REALPART_EXPR:
2674     case IMAGPART_EXPR:
2675     case VIEW_CONVERT_EXPR:
2676       return 1;
2677 
2678     case BIT_FIELD_REF:
2679       return 3;
2680 
2681     case CONSTRUCTOR:
2682       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2683 
2684     default:
2685       return gimple_num_ops (stmt) - 1;
2686     }
2687 }
2688 
2689 /* Initialize VNO from STMT.  */
2690 
2691 static void
2692 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2693 {
2694   unsigned i;
2695 
2696   vno->opcode = gimple_assign_rhs_code (stmt);
2697   vno->type = gimple_expr_type (stmt);
2698   switch (vno->opcode)
2699     {
2700     case REALPART_EXPR:
2701     case IMAGPART_EXPR:
2702     case VIEW_CONVERT_EXPR:
2703       vno->length = 1;
2704       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2705       break;
2706 
2707     case BIT_FIELD_REF:
2708       vno->length = 3;
2709       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2710       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2711       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2712       break;
2713 
2714     case CONSTRUCTOR:
2715       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2716       for (i = 0; i < vno->length; ++i)
2717 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2718       break;
2719 
2720     default:
2721       gcc_checking_assert (!gimple_assign_single_p (stmt));
2722       vno->length = gimple_num_ops (stmt) - 1;
2723       for (i = 0; i < vno->length; ++i)
2724 	vno->op[i] = gimple_op (stmt, i + 1);
2725     }
2726 }
2727 
2728 /* Compute the hashcode for VNO and look for it in the hash table;
2729    return the resulting value number if it exists in the hash table.
2730    Return NULL_TREE if it does not exist in the hash table or if the
2731    result field of the operation is NULL.  VNRESULT will contain the
2732    vn_nary_op_t from the hashtable if it exists.  */
2733 
2734 static tree
2735 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2736 {
2737   vn_nary_op_s **slot;
2738 
2739   if (vnresult)
2740     *vnresult = NULL;
2741 
2742   vno->hashcode = vn_nary_op_compute_hash (vno);
2743   slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2744 						  NO_INSERT);
2745   if (!slot && current_info == optimistic_info)
2746     slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2747 						  NO_INSERT);
2748   if (!slot)
2749     return NULL_TREE;
2750   if (vnresult)
2751     *vnresult = *slot;
2752   return (*slot)->result;
2753 }
2754 
2755 /* Lookup a n-ary operation by its pieces and return the resulting value
2756    number if it exists in the hash table.  Return NULL_TREE if it does
2757    not exist in the hash table or if the result field of the operation
2758    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2759    if it exists.  */
2760 
2761 tree
2762 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2763 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2764 {
2765   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2766 				  sizeof_vn_nary_op (length));
2767   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2768   return vn_nary_op_lookup_1 (vno1, vnresult);
2769 }
2770 
2771 /* Lookup OP in the current hash table, and return the resulting value
2772    number if it exists in the hash table.  Return NULL_TREE if it does
2773    not exist in the hash table or if the result field of the operation
2774    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2775    if it exists.  */
2776 
2777 tree
2778 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2779 {
2780   vn_nary_op_t vno1
2781     = XALLOCAVAR (struct vn_nary_op_s,
2782 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2783   init_vn_nary_op_from_op (vno1, op);
2784   return vn_nary_op_lookup_1 (vno1, vnresult);
2785 }
2786 
2787 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2788    value number if it exists in the hash table.  Return NULL_TREE if
2789    it does not exist in the hash table.  VNRESULT will contain the
2790    vn_nary_op_t from the hashtable if it exists.  */
2791 
2792 tree
2793 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2794 {
2795   vn_nary_op_t vno1
2796     = XALLOCAVAR (struct vn_nary_op_s,
2797 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2798   init_vn_nary_op_from_stmt (vno1, stmt);
2799   return vn_nary_op_lookup_1 (vno1, vnresult);
2800 }
2801 
2802 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2803 
2804 static vn_nary_op_t
2805 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2806 {
2807   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2808 }
2809 
2810 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2811    obstack.  */
2812 
2813 static vn_nary_op_t
2814 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2815 {
2816   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2817 					       &current_info->nary_obstack);
2818 
2819   vno1->value_id = value_id;
2820   vno1->length = length;
2821   vno1->result = result;
2822 
2823   return vno1;
2824 }
2825 
2826 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2827    VNO->HASHCODE first.  */
2828 
2829 static vn_nary_op_t
2830 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2831 			bool compute_hash)
2832 {
2833   vn_nary_op_s **slot;
2834 
2835   if (compute_hash)
2836     vno->hashcode = vn_nary_op_compute_hash (vno);
2837 
2838   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2839   /* While we do not want to insert things twice it's awkward to
2840      avoid it in the case where visit_nary_op pattern-matches stuff
2841      and ends up simplifying the replacement to itself.  We then
2842      get two inserts, one from visit_nary_op and one from
2843      vn_nary_build_or_lookup.
2844      So allow inserts with the same value number.  */
2845   if (*slot && (*slot)->result == vno->result)
2846     return *slot;
2847 
2848   gcc_assert (!*slot);
2849 
2850   *slot = vno;
2851   return vno;
2852 }
2853 
2854 /* Insert a n-ary operation into the current hash table using it's
2855    pieces.  Return the vn_nary_op_t structure we created and put in
2856    the hashtable.  */
2857 
2858 vn_nary_op_t
2859 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2860 			  tree type, tree *ops,
2861 			  tree result, unsigned int value_id)
2862 {
2863   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2864   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2865   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2866 }
2867 
2868 /* Insert OP into the current hash table with a value number of
2869    RESULT.  Return the vn_nary_op_t structure we created and put in
2870    the hashtable.  */
2871 
2872 vn_nary_op_t
2873 vn_nary_op_insert (tree op, tree result)
2874 {
2875   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2876   vn_nary_op_t vno1;
2877 
2878   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2879   init_vn_nary_op_from_op (vno1, op);
2880   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2881 }
2882 
2883 /* Insert the rhs of STMT into the current hash table with a value number of
2884    RESULT.  */
2885 
2886 static vn_nary_op_t
2887 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2888 {
2889   vn_nary_op_t vno1
2890     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2891 			result, VN_INFO (result)->value_id);
2892   init_vn_nary_op_from_stmt (vno1, stmt);
2893   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2894 }
2895 
2896 /* Compute a hashcode for PHI operation VP1 and return it.  */
2897 
2898 static inline hashval_t
2899 vn_phi_compute_hash (vn_phi_t vp1)
2900 {
2901   inchash::hash hstate (vp1->phiargs.length () > 2
2902 			? vp1->block->index : vp1->phiargs.length ());
2903   tree phi1op;
2904   tree type;
2905   edge e;
2906   edge_iterator ei;
2907 
2908   /* If all PHI arguments are constants we need to distinguish
2909      the PHI node via its type.  */
2910   type = vp1->type;
2911   hstate.merge_hash (vn_hash_type (type));
2912 
2913   FOR_EACH_EDGE (e, ei, vp1->block->preds)
2914     {
2915       /* Don't hash backedge values they need to be handled as VN_TOP
2916          for optimistic value-numbering.  */
2917       if (e->flags & EDGE_DFS_BACK)
2918 	continue;
2919 
2920       phi1op = vp1->phiargs[e->dest_idx];
2921       if (phi1op == VN_TOP)
2922 	continue;
2923       inchash::add_expr (phi1op, hstate);
2924     }
2925 
2926   return hstate.end ();
2927 }
2928 
2929 
2930 /* Return true if COND1 and COND2 represent the same condition, set
2931    *INVERTED_P if one needs to be inverted to make it the same as
2932    the other.  */
2933 
2934 static bool
2935 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2936 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2937 {
2938   enum tree_code code1 = gimple_cond_code (cond1);
2939   enum tree_code code2 = gimple_cond_code (cond2);
2940 
2941   *inverted_p = false;
2942   if (code1 == code2)
2943     ;
2944   else if (code1 == swap_tree_comparison (code2))
2945     std::swap (lhs2, rhs2);
2946   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2947     *inverted_p = true;
2948   else if (code1 == invert_tree_comparison
2949 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2950     {
2951       std::swap (lhs2, rhs2);
2952       *inverted_p = true;
2953     }
2954   else
2955     return false;
2956 
2957   return ((expressions_equal_p (lhs1, lhs2)
2958 	   && expressions_equal_p (rhs1, rhs2))
2959 	  || (commutative_tree_code (code1)
2960 	      && expressions_equal_p (lhs1, rhs2)
2961 	      && expressions_equal_p (rhs1, lhs2)));
2962 }
2963 
2964 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2965 
2966 static int
2967 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2968 {
2969   if (vp1->hashcode != vp2->hashcode)
2970     return false;
2971 
2972   if (vp1->block != vp2->block)
2973     {
2974       if (vp1->phiargs.length () != vp2->phiargs.length ())
2975 	return false;
2976 
2977       switch (vp1->phiargs.length ())
2978 	{
2979 	case 1:
2980 	  /* Single-arg PHIs are just copies.  */
2981 	  break;
2982 
2983 	case 2:
2984 	  {
2985 	    /* Rule out backedges into the PHI.  */
2986 	    if (vp1->block->loop_father->header == vp1->block
2987 		|| vp2->block->loop_father->header == vp2->block)
2988 	      return false;
2989 
2990 	    /* If the PHI nodes do not have compatible types
2991 	       they are not the same.  */
2992 	    if (!types_compatible_p (vp1->type, vp2->type))
2993 	      return false;
2994 
2995 	    basic_block idom1
2996 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2997 	    basic_block idom2
2998 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2999 	    /* If the immediate dominator end in switch stmts multiple
3000 	       values may end up in the same PHI arg via intermediate
3001 	       CFG merges.  */
3002 	    if (EDGE_COUNT (idom1->succs) != 2
3003 		|| EDGE_COUNT (idom2->succs) != 2)
3004 	      return false;
3005 
3006 	    /* Verify the controlling stmt is the same.  */
3007 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3008 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3009 	    if (! last1 || ! last2)
3010 	      return false;
3011 	    bool inverted_p;
3012 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3013 				      last2, vp2->cclhs, vp2->ccrhs,
3014 				      &inverted_p))
3015 	      return false;
3016 
3017 	    /* Get at true/false controlled edges into the PHI.  */
3018 	    edge te1, te2, fe1, fe2;
3019 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
3020 						       &te1, &fe1)
3021 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
3022 							  &te2, &fe2))
3023 	      return false;
3024 
3025 	    /* Swap edges if the second condition is the inverted of the
3026 	       first.  */
3027 	    if (inverted_p)
3028 	      std::swap (te2, fe2);
3029 
3030 	    /* ???  Handle VN_TOP specially.  */
3031 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3032 				       vp2->phiargs[te2->dest_idx])
3033 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3034 					  vp2->phiargs[fe2->dest_idx]))
3035 	      return false;
3036 
3037 	    return true;
3038 	  }
3039 
3040 	default:
3041 	  return false;
3042 	}
3043     }
3044 
3045   /* If the PHI nodes do not have compatible types
3046      they are not the same.  */
3047   if (!types_compatible_p (vp1->type, vp2->type))
3048     return false;
3049 
3050   /* Any phi in the same block will have it's arguments in the
3051      same edge order, because of how we store phi nodes.  */
3052   int i;
3053   tree phi1op;
3054   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3055     {
3056       tree phi2op = vp2->phiargs[i];
3057       if (phi1op == VN_TOP || phi2op == VN_TOP)
3058 	continue;
3059       if (!expressions_equal_p (phi1op, phi2op))
3060 	return false;
3061     }
3062 
3063   return true;
3064 }
3065 
3066 static vec<tree> shared_lookup_phiargs;
3067 
3068 /* Lookup PHI in the current hash table, and return the resulting
3069    value number if it exists in the hash table.  Return NULL_TREE if
3070    it does not exist in the hash table. */
3071 
3072 static tree
3073 vn_phi_lookup (gimple *phi)
3074 {
3075   vn_phi_s **slot;
3076   struct vn_phi_s vp1;
3077   edge e;
3078   edge_iterator ei;
3079 
3080   shared_lookup_phiargs.truncate (0);
3081   shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3082 
3083   /* Canonicalize the SSA_NAME's to their value number.  */
3084   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3085     {
3086       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3087       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3088       shared_lookup_phiargs[e->dest_idx] = def;
3089     }
3090   vp1.type = TREE_TYPE (gimple_phi_result (phi));
3091   vp1.phiargs = shared_lookup_phiargs;
3092   vp1.block = gimple_bb (phi);
3093   /* Extract values of the controlling condition.  */
3094   vp1.cclhs = NULL_TREE;
3095   vp1.ccrhs = NULL_TREE;
3096   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3097   if (EDGE_COUNT (idom1->succs) == 2)
3098     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3099       {
3100 	vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3101 	vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3102       }
3103   vp1.hashcode = vn_phi_compute_hash (&vp1);
3104   slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3105 						  NO_INSERT);
3106   if (!slot && current_info == optimistic_info)
3107     slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3108 						  NO_INSERT);
3109   if (!slot)
3110     return NULL_TREE;
3111   return (*slot)->result;
3112 }
3113 
3114 /* Insert PHI into the current hash table with a value number of
3115    RESULT.  */
3116 
3117 static vn_phi_t
3118 vn_phi_insert (gimple *phi, tree result)
3119 {
3120   vn_phi_s **slot;
3121   vn_phi_t vp1 = current_info->phis_pool->allocate ();
3122   vec<tree> args = vNULL;
3123   edge e;
3124   edge_iterator ei;
3125 
3126   args.safe_grow (gimple_phi_num_args (phi));
3127 
3128   /* Canonicalize the SSA_NAME's to their value number.  */
3129   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3130     {
3131       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3132       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3133       args[e->dest_idx] = def;
3134     }
3135   vp1->value_id = VN_INFO (result)->value_id;
3136   vp1->type = TREE_TYPE (gimple_phi_result (phi));
3137   vp1->phiargs = args;
3138   vp1->block = gimple_bb (phi);
3139   /* Extract values of the controlling condition.  */
3140   vp1->cclhs = NULL_TREE;
3141   vp1->ccrhs = NULL_TREE;
3142   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3143   if (EDGE_COUNT (idom1->succs) == 2)
3144     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3145       {
3146 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3147 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3148       }
3149   vp1->result = result;
3150   vp1->hashcode = vn_phi_compute_hash (vp1);
3151 
3152   slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3153 
3154   /* Because we iterate over phi operations more than once, it's
3155      possible the slot might already exist here, hence no assert.*/
3156   *slot = vp1;
3157   return vp1;
3158 }
3159 
3160 
3161 /* Print set of components in strongly connected component SCC to OUT. */
3162 
3163 static void
3164 print_scc (FILE *out, vec<tree> scc)
3165 {
3166   tree var;
3167   unsigned int i;
3168 
3169   fprintf (out, "SCC consists of:");
3170   FOR_EACH_VEC_ELT (scc, i, var)
3171     {
3172       fprintf (out, " ");
3173       print_generic_expr (out, var, 0);
3174     }
3175   fprintf (out, "\n");
3176 }
3177 
3178 /* Return true if BB1 is dominated by BB2 taking into account edges
3179    that are not executable.  */
3180 
3181 static bool
3182 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3183 {
3184   edge_iterator ei;
3185   edge e;
3186 
3187   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3188     return true;
3189 
3190   /* Before iterating we'd like to know if there exists a
3191      (executable) path from bb2 to bb1 at all, if not we can
3192      directly return false.  For now simply iterate once.  */
3193 
3194   /* Iterate to the single executable bb1 predecessor.  */
3195   if (EDGE_COUNT (bb1->preds) > 1)
3196     {
3197       edge prede = NULL;
3198       FOR_EACH_EDGE (e, ei, bb1->preds)
3199 	if (e->flags & EDGE_EXECUTABLE)
3200 	  {
3201 	    if (prede)
3202 	      {
3203 		prede = NULL;
3204 		break;
3205 	      }
3206 	    prede = e;
3207 	  }
3208       if (prede)
3209 	{
3210 	  bb1 = prede->src;
3211 
3212 	  /* Re-do the dominance check with changed bb1.  */
3213 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3214 	    return true;
3215 	}
3216     }
3217 
3218   /* Iterate to the single executable bb2 successor.  */
3219   edge succe = NULL;
3220   FOR_EACH_EDGE (e, ei, bb2->succs)
3221     if (e->flags & EDGE_EXECUTABLE)
3222       {
3223 	if (succe)
3224 	  {
3225 	    succe = NULL;
3226 	    break;
3227 	  }
3228 	succe = e;
3229       }
3230   if (succe)
3231     {
3232       /* Verify the reached block is only reached through succe.
3233 	 If there is only one edge we can spare us the dominator
3234 	 check and iterate directly.  */
3235       if (EDGE_COUNT (succe->dest->preds) > 1)
3236 	{
3237 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
3238 	    if (e != succe
3239 		&& (e->flags & EDGE_EXECUTABLE))
3240 	      {
3241 		succe = NULL;
3242 		break;
3243 	      }
3244 	}
3245       if (succe)
3246 	{
3247 	  bb2 = succe->dest;
3248 
3249 	  /* Re-do the dominance check with changed bb2.  */
3250 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3251 	    return true;
3252 	}
3253     }
3254 
3255   /* We could now iterate updating bb1 / bb2.  */
3256   return false;
3257 }
3258 
3259 /* Set the value number of FROM to TO, return true if it has changed
3260    as a result.  */
3261 
3262 static inline bool
3263 set_ssa_val_to (tree from, tree to)
3264 {
3265   tree currval = SSA_VAL (from);
3266   HOST_WIDE_INT toff, coff;
3267 
3268   /* The only thing we allow as value numbers are ssa_names
3269      and invariants.  So assert that here.  We don't allow VN_TOP
3270      as visiting a stmt should produce a value-number other than
3271      that.
3272      ???  Still VN_TOP can happen for unreachable code, so force
3273      it to varying in that case.  Not all code is prepared to
3274      get VN_TOP on valueization.  */
3275   if (to == VN_TOP)
3276     {
3277       if (dump_file && (dump_flags & TDF_DETAILS))
3278 	fprintf (dump_file, "Forcing value number to varying on "
3279 		 "receiving VN_TOP\n");
3280       to = from;
3281     }
3282 
3283   gcc_assert (to != NULL_TREE
3284 	      && ((TREE_CODE (to) == SSA_NAME
3285 		   && (to == from || SSA_VAL (to) == to))
3286 		  || is_gimple_min_invariant (to)));
3287 
3288   if (from != to)
3289     {
3290       if (currval == from)
3291 	{
3292 	  if (dump_file && (dump_flags & TDF_DETAILS))
3293 	    {
3294 	      fprintf (dump_file, "Not changing value number of ");
3295 	      print_generic_expr (dump_file, from, 0);
3296 	      fprintf (dump_file, " from VARYING to ");
3297 	      print_generic_expr (dump_file, to, 0);
3298 	      fprintf (dump_file, "\n");
3299 	    }
3300 	  return false;
3301 	}
3302       else if (currval != VN_TOP
3303 	       && ! is_gimple_min_invariant (currval)
3304 	       && is_gimple_min_invariant (to))
3305 	{
3306 	  if (dump_file && (dump_flags & TDF_DETAILS))
3307 	    {
3308 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3309 		       "value number of ");
3310 	      print_generic_expr (dump_file, from, 0);
3311 	      fprintf (dump_file, " from ");
3312 	      print_generic_expr (dump_file, currval, 0);
3313 	      fprintf (dump_file, " (non-constant) to ");
3314 	      print_generic_expr (dump_file, to, 0);
3315 	      fprintf (dump_file, " (constant)\n");
3316 	    }
3317 	  to = from;
3318 	}
3319       else if (TREE_CODE (to) == SSA_NAME
3320 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3321 	to = from;
3322     }
3323 
3324   if (dump_file && (dump_flags & TDF_DETAILS))
3325     {
3326       fprintf (dump_file, "Setting value number of ");
3327       print_generic_expr (dump_file, from, 0);
3328       fprintf (dump_file, " to ");
3329       print_generic_expr (dump_file, to, 0);
3330     }
3331 
3332   if (currval != to
3333       && !operand_equal_p (currval, to, 0)
3334       /* ???  For addresses involving volatile objects or types operand_equal_p
3335          does not reliably detect ADDR_EXPRs as equal.  We know we are only
3336 	 getting invariant gimple addresses here, so can use
3337 	 get_addr_base_and_unit_offset to do this comparison.  */
3338       && !(TREE_CODE (currval) == ADDR_EXPR
3339 	   && TREE_CODE (to) == ADDR_EXPR
3340 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3341 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3342 	   && coff == toff))
3343     {
3344       /* If we equate two SSA names we have to make the side-band info
3345          of the leader conservative (and remember whatever original value
3346 	 was present).  */
3347       if (TREE_CODE (to) == SSA_NAME)
3348 	{
3349 	  if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3350 	      && SSA_NAME_RANGE_INFO (to))
3351 	    {
3352 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3353 		  || dominated_by_p_w_unex
3354 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3355 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3356 		/* Keep the info from the dominator.  */
3357 		;
3358 	      else if (SSA_NAME_IS_DEFAULT_DEF (from)
3359 		       || dominated_by_p_w_unex
3360 			    (gimple_bb (SSA_NAME_DEF_STMT (to)),
3361 			     gimple_bb (SSA_NAME_DEF_STMT (from))))
3362 		{
3363 		  /* Save old info.  */
3364 		  if (! VN_INFO (to)->info.range_info)
3365 		    {
3366 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3367 		      VN_INFO (to)->range_info_anti_range_p
3368 			= SSA_NAME_ANTI_RANGE_P (to);
3369 		    }
3370 		  /* Use that from the dominator.  */
3371 		  SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3372 		  SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3373 		}
3374 	      else
3375 		{
3376 		  /* Save old info.  */
3377 		  if (! VN_INFO (to)->info.range_info)
3378 		    {
3379 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3380 		      VN_INFO (to)->range_info_anti_range_p
3381 			= SSA_NAME_ANTI_RANGE_P (to);
3382 		    }
3383 		  /* Rather than allocating memory and unioning the info
3384 		     just clear it.  */
3385 		  SSA_NAME_RANGE_INFO (to) = NULL;
3386 		}
3387 	    }
3388 	  else if (POINTER_TYPE_P (TREE_TYPE (to))
3389 		   && SSA_NAME_PTR_INFO (to))
3390 	    {
3391 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3392 		  || dominated_by_p_w_unex
3393 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3394 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3395 		/* Keep the info from the dominator.  */
3396 		;
3397 	      else if (SSA_NAME_IS_DEFAULT_DEF (from)
3398 		       || dominated_by_p_w_unex
3399 			    (gimple_bb (SSA_NAME_DEF_STMT (to)),
3400 			     gimple_bb (SSA_NAME_DEF_STMT (from))))
3401 		{
3402 		  /* Save old info.  */
3403 		  if (! VN_INFO (to)->info.ptr_info)
3404 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3405 		  /* Use that from the dominator.  */
3406 		  SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3407 		}
3408 	      else if (! SSA_NAME_PTR_INFO (from)
3409 		       /* Handle the case of trivially equivalent info.  */
3410 		       || memcmp (SSA_NAME_PTR_INFO (to),
3411 				  SSA_NAME_PTR_INFO (from),
3412 				  sizeof (ptr_info_def)) != 0)
3413 		{
3414 		  /* Save old info.  */
3415 		  if (! VN_INFO (to)->info.ptr_info)
3416 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3417 		  /* Rather than allocating memory and unioning the info
3418 		     just clear it.  */
3419 		  SSA_NAME_PTR_INFO (to) = NULL;
3420 		}
3421 	    }
3422 	}
3423 
3424       VN_INFO (from)->valnum = to;
3425       if (dump_file && (dump_flags & TDF_DETAILS))
3426 	fprintf (dump_file, " (changed)\n");
3427       return true;
3428     }
3429   if (dump_file && (dump_flags & TDF_DETAILS))
3430     fprintf (dump_file, "\n");
3431   return false;
3432 }
3433 
3434 /* Mark as processed all the definitions in the defining stmt of USE, or
3435    the USE itself.  */
3436 
3437 static void
3438 mark_use_processed (tree use)
3439 {
3440   ssa_op_iter iter;
3441   def_operand_p defp;
3442   gimple *stmt = SSA_NAME_DEF_STMT (use);
3443 
3444   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3445     {
3446       VN_INFO (use)->use_processed = true;
3447       return;
3448     }
3449 
3450   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3451     {
3452       tree def = DEF_FROM_PTR (defp);
3453 
3454       VN_INFO (def)->use_processed = true;
3455     }
3456 }
3457 
3458 /* Set all definitions in STMT to value number to themselves.
3459    Return true if a value number changed. */
3460 
3461 static bool
3462 defs_to_varying (gimple *stmt)
3463 {
3464   bool changed = false;
3465   ssa_op_iter iter;
3466   def_operand_p defp;
3467 
3468   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3469     {
3470       tree def = DEF_FROM_PTR (defp);
3471       changed |= set_ssa_val_to (def, def);
3472     }
3473   return changed;
3474 }
3475 
3476 /* Visit a copy between LHS and RHS, return true if the value number
3477    changed.  */
3478 
3479 static bool
3480 visit_copy (tree lhs, tree rhs)
3481 {
3482   /* Valueize.  */
3483   rhs = SSA_VAL (rhs);
3484 
3485   return set_ssa_val_to (lhs, rhs);
3486 }
3487 
3488 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3489    is the same.  */
3490 
3491 static tree
3492 valueized_wider_op (tree wide_type, tree op)
3493 {
3494   if (TREE_CODE (op) == SSA_NAME)
3495     op = SSA_VAL (op);
3496 
3497   /* Either we have the op widened available.  */
3498   tree ops[3] = {};
3499   ops[0] = op;
3500   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3501 				       wide_type, ops, NULL);
3502   if (tem)
3503     return tem;
3504 
3505   /* Or the op is truncated from some existing value.  */
3506   if (TREE_CODE (op) == SSA_NAME)
3507     {
3508       gimple *def = SSA_NAME_DEF_STMT (op);
3509       if (is_gimple_assign (def)
3510 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3511 	{
3512 	  tem = gimple_assign_rhs1 (def);
3513 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3514 	    {
3515 	      if (TREE_CODE (tem) == SSA_NAME)
3516 		tem = SSA_VAL (tem);
3517 	      return tem;
3518 	    }
3519 	}
3520     }
3521 
3522   /* For constants simply extend it.  */
3523   if (TREE_CODE (op) == INTEGER_CST)
3524     return wide_int_to_tree (wide_type, op);
3525 
3526   return NULL_TREE;
3527 }
3528 
3529 /* Visit a nary operator RHS, value number it, and return true if the
3530    value number of LHS has changed as a result.  */
3531 
3532 static bool
3533 visit_nary_op (tree lhs, gassign *stmt)
3534 {
3535   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3536   if (result)
3537     return set_ssa_val_to (lhs, result);
3538 
3539   /* Do some special pattern matching for redundancies of operations
3540      in different types.  */
3541   enum tree_code code = gimple_assign_rhs_code (stmt);
3542   tree type = TREE_TYPE (lhs);
3543   tree rhs1 = gimple_assign_rhs1 (stmt);
3544   switch (code)
3545     {
3546     CASE_CONVERT:
3547       /* Match arithmetic done in a different type where we can easily
3548          substitute the result from some earlier sign-changed or widened
3549 	 operation.  */
3550       if (INTEGRAL_TYPE_P (type)
3551 	  && TREE_CODE (rhs1) == SSA_NAME
3552 	  /* We only handle sign-changes or zero-extension -> & mask.  */
3553 	  && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3554 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3555 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3556 	{
3557 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3558 	  if (def
3559 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
3560 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
3561 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
3562 	    {
3563 	      tree ops[3] = {};
3564 	      /* Either we have the op widened available.  */
3565 	      ops[0] = valueized_wider_op (type,
3566 					   gimple_assign_rhs1 (def));
3567 	      if (ops[0])
3568 		ops[1] = valueized_wider_op (type,
3569 					     gimple_assign_rhs2 (def));
3570 	      if (ops[0] && ops[1])
3571 		{
3572 		  ops[0] = vn_nary_op_lookup_pieces
3573 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
3574 		  /* We have wider operation available.  */
3575 		  if (ops[0])
3576 		    {
3577 		      unsigned lhs_prec = TYPE_PRECISION (type);
3578 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3579 		      if (lhs_prec == rhs_prec)
3580 			{
3581 			  ops[1] = NULL_TREE;
3582 			  result = vn_nary_build_or_lookup (NOP_EXPR,
3583 							    type, ops);
3584 			  if (result)
3585 			    {
3586 			      bool changed = set_ssa_val_to (lhs, result);
3587 			      vn_nary_op_insert_stmt (stmt, result);
3588 			      return changed;
3589 			    }
3590 			}
3591 		      else
3592 			{
3593 			  ops[1] = wide_int_to_tree (type,
3594 						     wi::mask (rhs_prec, false,
3595 							       lhs_prec));
3596 			  result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3597 							    TREE_TYPE (lhs),
3598 							    ops);
3599 			  if (result)
3600 			    {
3601 			      bool changed = set_ssa_val_to (lhs, result);
3602 			      vn_nary_op_insert_stmt (stmt, result);
3603 			      return changed;
3604 			    }
3605 			}
3606 		    }
3607 		}
3608 	    }
3609 	}
3610     default:;
3611     }
3612 
3613   bool changed = set_ssa_val_to (lhs, lhs);
3614   vn_nary_op_insert_stmt (stmt, lhs);
3615   return changed;
3616 }
3617 
3618 /* Visit a call STMT storing into LHS.  Return true if the value number
3619    of the LHS has changed as a result.  */
3620 
3621 static bool
3622 visit_reference_op_call (tree lhs, gcall *stmt)
3623 {
3624   bool changed = false;
3625   struct vn_reference_s vr1;
3626   vn_reference_t vnresult = NULL;
3627   tree vdef = gimple_vdef (stmt);
3628 
3629   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
3630   if (lhs && TREE_CODE (lhs) != SSA_NAME)
3631     lhs = NULL_TREE;
3632 
3633   vn_reference_lookup_call (stmt, &vnresult, &vr1);
3634   if (vnresult)
3635     {
3636       if (vnresult->result_vdef && vdef)
3637 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3638       else if (vdef)
3639 	/* If the call was discovered to be pure or const reflect
3640 	   that as far as possible.  */
3641 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3642 
3643       if (!vnresult->result && lhs)
3644 	vnresult->result = lhs;
3645 
3646       if (vnresult->result && lhs)
3647 	changed |= set_ssa_val_to (lhs, vnresult->result);
3648     }
3649   else
3650     {
3651       vn_reference_t vr2;
3652       vn_reference_s **slot;
3653       tree vdef_val = vdef;
3654       if (vdef)
3655 	{
3656 	  /* If we value numbered an indirect functions function to
3657 	     one not clobbering memory value number its VDEF to its
3658 	     VUSE.  */
3659 	  tree fn = gimple_call_fn (stmt);
3660 	  if (fn && TREE_CODE (fn) == SSA_NAME)
3661 	    {
3662 	      fn = SSA_VAL (fn);
3663 	      if (TREE_CODE (fn) == ADDR_EXPR
3664 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3665 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3666 		      & (ECF_CONST | ECF_PURE)))
3667 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3668 	    }
3669 	  changed |= set_ssa_val_to (vdef, vdef_val);
3670 	}
3671       if (lhs)
3672 	changed |= set_ssa_val_to (lhs, lhs);
3673       vr2 = current_info->references_pool->allocate ();
3674       vr2->vuse = vr1.vuse;
3675       /* As we are not walking the virtual operand chain we know the
3676 	 shared_lookup_references are still original so we can re-use
3677 	 them here.  */
3678       vr2->operands = vr1.operands.copy ();
3679       vr2->type = vr1.type;
3680       vr2->set = vr1.set;
3681       vr2->hashcode = vr1.hashcode;
3682       vr2->result = lhs;
3683       vr2->result_vdef = vdef_val;
3684       slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3685 							    INSERT);
3686       gcc_assert (!*slot);
3687       *slot = vr2;
3688     }
3689 
3690   return changed;
3691 }
3692 
3693 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3694    and return true if the value number of the LHS has changed as a result.  */
3695 
3696 static bool
3697 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3698 {
3699   bool changed = false;
3700   tree last_vuse;
3701   tree result;
3702 
3703   last_vuse = gimple_vuse (stmt);
3704   last_vuse_ptr = &last_vuse;
3705   result = vn_reference_lookup (op, gimple_vuse (stmt),
3706 				default_vn_walk_kind, NULL, true);
3707   last_vuse_ptr = NULL;
3708 
3709   /* We handle type-punning through unions by value-numbering based
3710      on offset and size of the access.  Be prepared to handle a
3711      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3712   if (result
3713       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3714     {
3715       /* We will be setting the value number of lhs to the value number
3716 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3717 	 So first simplify and lookup this expression to see if it
3718 	 is already available.  */
3719       code_helper rcode = VIEW_CONVERT_EXPR;
3720       tree ops[3] = { result };
3721       result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3722     }
3723 
3724   if (result)
3725     changed = set_ssa_val_to (lhs, result);
3726   else
3727     {
3728       changed = set_ssa_val_to (lhs, lhs);
3729       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3730     }
3731 
3732   return changed;
3733 }
3734 
3735 
3736 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3737    and return true if the value number of the LHS has changed as a result.  */
3738 
3739 static bool
3740 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3741 {
3742   bool changed = false;
3743   vn_reference_t vnresult = NULL;
3744   tree assign;
3745   bool resultsame = false;
3746   tree vuse = gimple_vuse (stmt);
3747   tree vdef = gimple_vdef (stmt);
3748 
3749   if (TREE_CODE (op) == SSA_NAME)
3750     op = SSA_VAL (op);
3751 
3752   /* First we want to lookup using the *vuses* from the store and see
3753      if there the last store to this location with the same address
3754      had the same value.
3755 
3756      The vuses represent the memory state before the store.  If the
3757      memory state, address, and value of the store is the same as the
3758      last store to this location, then this store will produce the
3759      same memory state as that store.
3760 
3761      In this case the vdef versions for this store are value numbered to those
3762      vuse versions, since they represent the same memory state after
3763      this store.
3764 
3765      Otherwise, the vdefs for the store are used when inserting into
3766      the table, since the store generates a new memory state.  */
3767 
3768   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3769   if (vnresult
3770       && vnresult->result)
3771     {
3772       tree result = vnresult->result;
3773       if (TREE_CODE (result) == SSA_NAME)
3774 	result = SSA_VAL (result);
3775       resultsame = expressions_equal_p (result, op);
3776       if (resultsame)
3777 	{
3778 	  /* If the TBAA state isn't compatible for downstream reads
3779 	     we cannot value-number the VDEFs the same.  */
3780 	  alias_set_type set = get_alias_set (lhs);
3781 	  if (vnresult->set != set
3782 	      && ! alias_set_subset_of (set, vnresult->set))
3783 	    resultsame = false;
3784 	}
3785     }
3786 
3787   if (!resultsame)
3788     {
3789       /* Only perform the following when being called from PRE
3790 	 which embeds tail merging.  */
3791       if (default_vn_walk_kind == VN_WALK)
3792 	{
3793 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3794 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3795 	  if (vnresult)
3796 	    {
3797 	      VN_INFO (vdef)->use_processed = true;
3798 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
3799 	    }
3800 	}
3801 
3802       if (dump_file && (dump_flags & TDF_DETAILS))
3803 	{
3804 	  fprintf (dump_file, "No store match\n");
3805 	  fprintf (dump_file, "Value numbering store ");
3806 	  print_generic_expr (dump_file, lhs, 0);
3807 	  fprintf (dump_file, " to ");
3808 	  print_generic_expr (dump_file, op, 0);
3809 	  fprintf (dump_file, "\n");
3810 	}
3811       /* Have to set value numbers before insert, since insert is
3812 	 going to valueize the references in-place.  */
3813       if (vdef)
3814 	changed |= set_ssa_val_to (vdef, vdef);
3815 
3816       /* Do not insert structure copies into the tables.  */
3817       if (is_gimple_min_invariant (op)
3818 	  || is_gimple_reg (op))
3819         vn_reference_insert (lhs, op, vdef, NULL);
3820 
3821       /* Only perform the following when being called from PRE
3822 	 which embeds tail merging.  */
3823       if (default_vn_walk_kind == VN_WALK)
3824 	{
3825 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3826 	  vn_reference_insert (assign, lhs, vuse, vdef);
3827 	}
3828     }
3829   else
3830     {
3831       /* We had a match, so value number the vdef to have the value
3832 	 number of the vuse it came from.  */
3833 
3834       if (dump_file && (dump_flags & TDF_DETAILS))
3835 	fprintf (dump_file, "Store matched earlier value, "
3836 		 "value numbering store vdefs to matching vuses.\n");
3837 
3838       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3839     }
3840 
3841   return changed;
3842 }
3843 
3844 /* Visit and value number PHI, return true if the value number
3845    changed.  */
3846 
3847 static bool
3848 visit_phi (gimple *phi)
3849 {
3850   bool changed = false;
3851   tree result;
3852   tree sameval = VN_TOP;
3853   bool allsame = true;
3854   unsigned n_executable = 0;
3855 
3856   /* TODO: We could check for this in init_sccvn, and replace this
3857      with a gcc_assert.  */
3858   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3859     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3860 
3861   /* See if all non-TOP arguments have the same value.  TOP is
3862      equivalent to everything, so we can ignore it.  */
3863   edge_iterator ei;
3864   edge e;
3865   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3866     if (e->flags & EDGE_EXECUTABLE)
3867       {
3868 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3869 
3870 	++n_executable;
3871 	if (TREE_CODE (def) == SSA_NAME)
3872 	  def = SSA_VAL (def);
3873 	if (def == VN_TOP)
3874 	  continue;
3875 	if (sameval == VN_TOP)
3876 	  sameval = def;
3877 	else if (!expressions_equal_p (def, sameval))
3878 	  {
3879 	    allsame = false;
3880 	    break;
3881 	  }
3882       }
3883 
3884   /* If none of the edges was executable or all incoming values are
3885      undefined keep the value-number at VN_TOP.  If only a single edge
3886      is exectuable use its value.  */
3887   if (sameval == VN_TOP
3888       || n_executable == 1)
3889     return set_ssa_val_to (PHI_RESULT (phi), sameval);
3890 
3891   /* First see if it is equivalent to a phi node in this block.  We prefer
3892      this as it allows IV elimination - see PRs 66502 and 67167.  */
3893   result = vn_phi_lookup (phi);
3894   if (result)
3895     changed = set_ssa_val_to (PHI_RESULT (phi), result);
3896   /* Otherwise all value numbered to the same value, the phi node has that
3897      value.  */
3898   else if (allsame)
3899     changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3900   else
3901     {
3902       vn_phi_insert (phi, PHI_RESULT (phi));
3903       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3904     }
3905 
3906   return changed;
3907 }
3908 
3909 /* Try to simplify RHS using equivalences and constant folding.  */
3910 
3911 static tree
3912 try_to_simplify (gassign *stmt)
3913 {
3914   enum tree_code code = gimple_assign_rhs_code (stmt);
3915   tree tem;
3916 
3917   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3918      in this case, there is no point in doing extra work.  */
3919   if (code == SSA_NAME)
3920     return NULL_TREE;
3921 
3922   /* First try constant folding based on our current lattice.  */
3923   mprts_hook = vn_lookup_simplify_result;
3924   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3925   mprts_hook = NULL;
3926   if (tem
3927       && (TREE_CODE (tem) == SSA_NAME
3928 	  || is_gimple_min_invariant (tem)))
3929     return tem;
3930 
3931   return NULL_TREE;
3932 }
3933 
3934 /* Visit and value number USE, return true if the value number
3935    changed. */
3936 
3937 static bool
3938 visit_use (tree use)
3939 {
3940   bool changed = false;
3941   gimple *stmt = SSA_NAME_DEF_STMT (use);
3942 
3943   mark_use_processed (use);
3944 
3945   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3946   if (dump_file && (dump_flags & TDF_DETAILS)
3947       && !SSA_NAME_IS_DEFAULT_DEF (use))
3948     {
3949       fprintf (dump_file, "Value numbering ");
3950       print_generic_expr (dump_file, use, 0);
3951       fprintf (dump_file, " stmt = ");
3952       print_gimple_stmt (dump_file, stmt, 0, 0);
3953     }
3954 
3955   /* Handle uninitialized uses.  */
3956   if (SSA_NAME_IS_DEFAULT_DEF (use))
3957     changed = set_ssa_val_to (use, use);
3958   else if (gimple_code (stmt) == GIMPLE_PHI)
3959     changed = visit_phi (stmt);
3960   else if (gimple_has_volatile_ops (stmt))
3961     changed = defs_to_varying (stmt);
3962   else if (gassign *ass = dyn_cast <gassign *> (stmt))
3963     {
3964       enum tree_code code = gimple_assign_rhs_code (ass);
3965       tree lhs = gimple_assign_lhs (ass);
3966       tree rhs1 = gimple_assign_rhs1 (ass);
3967       tree simplified;
3968 
3969       /* Shortcut for copies. Simplifying copies is pointless,
3970 	 since we copy the expression and value they represent.  */
3971       if (code == SSA_NAME
3972 	  && TREE_CODE (lhs) == SSA_NAME)
3973 	{
3974 	  changed = visit_copy (lhs, rhs1);
3975 	  goto done;
3976 	}
3977       simplified = try_to_simplify (ass);
3978       if (simplified)
3979 	{
3980 	  if (dump_file && (dump_flags & TDF_DETAILS))
3981 	    {
3982 	      fprintf (dump_file, "RHS ");
3983 	      print_gimple_expr (dump_file, ass, 0, 0);
3984 	      fprintf (dump_file, " simplified to ");
3985 	      print_generic_expr (dump_file, simplified, 0);
3986 	      fprintf (dump_file, "\n");
3987 	    }
3988 	}
3989       /* Setting value numbers to constants will occasionally
3990 	 screw up phi congruence because constants are not
3991 	 uniquely associated with a single ssa name that can be
3992 	 looked up.  */
3993       if (simplified
3994 	  && is_gimple_min_invariant (simplified)
3995 	  && TREE_CODE (lhs) == SSA_NAME)
3996 	{
3997 	  changed = set_ssa_val_to (lhs, simplified);
3998 	  goto done;
3999 	}
4000       else if (simplified
4001 	       && TREE_CODE (simplified) == SSA_NAME
4002 	       && TREE_CODE (lhs) == SSA_NAME)
4003 	{
4004 	  changed = visit_copy (lhs, simplified);
4005 	  goto done;
4006 	}
4007 
4008       if ((TREE_CODE (lhs) == SSA_NAME
4009 	   /* We can substitute SSA_NAMEs that are live over
4010 	      abnormal edges with their constant value.  */
4011 	   && !(gimple_assign_copy_p (ass)
4012 		&& is_gimple_min_invariant (rhs1))
4013 	   && !(simplified
4014 		&& is_gimple_min_invariant (simplified))
4015 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4016 	  /* Stores or copies from SSA_NAMEs that are live over
4017 	     abnormal edges are a problem.  */
4018 	  || (code == SSA_NAME
4019 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4020 	changed = defs_to_varying (ass);
4021       else if (REFERENCE_CLASS_P (lhs)
4022 	       || DECL_P (lhs))
4023 	changed = visit_reference_op_store (lhs, rhs1, ass);
4024       else if (TREE_CODE (lhs) == SSA_NAME)
4025 	{
4026 	  if ((gimple_assign_copy_p (ass)
4027 	       && is_gimple_min_invariant (rhs1))
4028 	      || (simplified
4029 		  && is_gimple_min_invariant (simplified)))
4030 	    {
4031 	      if (simplified)
4032 		changed = set_ssa_val_to (lhs, simplified);
4033 	      else
4034 		changed = set_ssa_val_to (lhs, rhs1);
4035 	    }
4036 	  else
4037 	    {
4038 	      /* Visit the original statement.  */
4039 	      switch (vn_get_stmt_kind (ass))
4040 		{
4041 		case VN_NARY:
4042 		changed = visit_nary_op (lhs, ass);
4043 		break;
4044 		case VN_REFERENCE:
4045 		changed = visit_reference_op_load (lhs, rhs1, ass);
4046 		break;
4047 		default:
4048 		changed = defs_to_varying (ass);
4049 		break;
4050 		}
4051 	    }
4052 	}
4053       else
4054 	changed = defs_to_varying (ass);
4055     }
4056   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4057     {
4058       tree lhs = gimple_call_lhs (call_stmt);
4059       if (lhs && TREE_CODE (lhs) == SSA_NAME)
4060 	{
4061 	  /* Try constant folding based on our current lattice.  */
4062 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4063 							    vn_valueize);
4064 	  if (simplified)
4065 	    {
4066 	      if (dump_file && (dump_flags & TDF_DETAILS))
4067 		{
4068 		  fprintf (dump_file, "call ");
4069 		  print_gimple_expr (dump_file, call_stmt, 0, 0);
4070 		  fprintf (dump_file, " simplified to ");
4071 		  print_generic_expr (dump_file, simplified, 0);
4072 		  fprintf (dump_file, "\n");
4073 		}
4074 	    }
4075 	  /* Setting value numbers to constants will occasionally
4076 	     screw up phi congruence because constants are not
4077 	     uniquely associated with a single ssa name that can be
4078 	     looked up.  */
4079 	  if (simplified
4080 	      && is_gimple_min_invariant (simplified))
4081 	    {
4082 	      changed = set_ssa_val_to (lhs, simplified);
4083 	      if (gimple_vdef (call_stmt))
4084 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4085 					   SSA_VAL (gimple_vuse (call_stmt)));
4086 	      goto done;
4087 	    }
4088 	  else if (simplified
4089 		   && TREE_CODE (simplified) == SSA_NAME)
4090 	    {
4091 	      changed = visit_copy (lhs, simplified);
4092 	      if (gimple_vdef (call_stmt))
4093 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4094 					   SSA_VAL (gimple_vuse (call_stmt)));
4095 	      goto done;
4096 	    }
4097 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4098 	    {
4099 	      changed = defs_to_varying (call_stmt);
4100 	      goto done;
4101 	    }
4102 	}
4103 
4104       /* Pick up flags from a devirtualization target.  */
4105       tree fn = gimple_call_fn (stmt);
4106       int extra_fnflags = 0;
4107       if (fn && TREE_CODE (fn) == SSA_NAME)
4108 	{
4109 	  fn = SSA_VAL (fn);
4110 	  if (TREE_CODE (fn) == ADDR_EXPR
4111 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4112 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4113 	}
4114       if (!gimple_call_internal_p (call_stmt)
4115 	  && (/* Calls to the same function with the same vuse
4116 		 and the same operands do not necessarily return the same
4117 		 value, unless they're pure or const.  */
4118 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
4119 	       & (ECF_PURE | ECF_CONST))
4120 	      /* If calls have a vdef, subsequent calls won't have
4121 		 the same incoming vuse.  So, if 2 calls with vdef have the
4122 		 same vuse, we know they're not subsequent.
4123 		 We can value number 2 calls to the same function with the
4124 		 same vuse and the same operands which are not subsequent
4125 		 the same, because there is no code in the program that can
4126 		 compare the 2 values...  */
4127 	      || (gimple_vdef (call_stmt)
4128 		  /* ... unless the call returns a pointer which does
4129 		     not alias with anything else.  In which case the
4130 		     information that the values are distinct are encoded
4131 		     in the IL.  */
4132 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4133 		  /* Only perform the following when being called from PRE
4134 		     which embeds tail merging.  */
4135 		  && default_vn_walk_kind == VN_WALK)))
4136 	changed = visit_reference_op_call (lhs, call_stmt);
4137       else
4138 	changed = defs_to_varying (call_stmt);
4139     }
4140   else
4141     changed = defs_to_varying (stmt);
4142  done:
4143   return changed;
4144 }
4145 
4146 /* Compare two operands by reverse postorder index */
4147 
4148 static int
4149 compare_ops (const void *pa, const void *pb)
4150 {
4151   const tree opa = *((const tree *)pa);
4152   const tree opb = *((const tree *)pb);
4153   gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4154   gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4155   basic_block bba;
4156   basic_block bbb;
4157 
4158   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4159     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4160   else if (gimple_nop_p (opstmta))
4161     return -1;
4162   else if (gimple_nop_p (opstmtb))
4163     return 1;
4164 
4165   bba = gimple_bb (opstmta);
4166   bbb = gimple_bb (opstmtb);
4167 
4168   if (!bba && !bbb)
4169     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4170   else if (!bba)
4171     return -1;
4172   else if (!bbb)
4173     return 1;
4174 
4175   if (bba == bbb)
4176     {
4177       if (gimple_code (opstmta) == GIMPLE_PHI
4178 	  && gimple_code (opstmtb) == GIMPLE_PHI)
4179 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4180       else if (gimple_code (opstmta) == GIMPLE_PHI)
4181 	return -1;
4182       else if (gimple_code (opstmtb) == GIMPLE_PHI)
4183 	return 1;
4184       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4185         return gimple_uid (opstmta) - gimple_uid (opstmtb);
4186       else
4187 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4188     }
4189   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4190 }
4191 
4192 /* Sort an array containing members of a strongly connected component
4193    SCC so that the members are ordered by RPO number.
4194    This means that when the sort is complete, iterating through the
4195    array will give you the members in RPO order.  */
4196 
4197 static void
4198 sort_scc (vec<tree> scc)
4199 {
4200   scc.qsort (compare_ops);
4201 }
4202 
4203 /* Insert the no longer used nary ONARY to the hash INFO.  */
4204 
4205 static void
4206 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4207 {
4208   size_t size = sizeof_vn_nary_op (onary->length);
4209   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4210 					       &info->nary_obstack);
4211   memcpy (nary, onary, size);
4212   vn_nary_op_insert_into (nary, info->nary, false);
4213 }
4214 
4215 /* Insert the no longer used phi OPHI to the hash INFO.  */
4216 
4217 static void
4218 copy_phi (vn_phi_t ophi, vn_tables_t info)
4219 {
4220   vn_phi_t phi = info->phis_pool->allocate ();
4221   vn_phi_s **slot;
4222   memcpy (phi, ophi, sizeof (*phi));
4223   ophi->phiargs.create (0);
4224   slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4225   gcc_assert (!*slot);
4226   *slot = phi;
4227 }
4228 
4229 /* Insert the no longer used reference OREF to the hash INFO.  */
4230 
4231 static void
4232 copy_reference (vn_reference_t oref, vn_tables_t info)
4233 {
4234   vn_reference_t ref;
4235   vn_reference_s **slot;
4236   ref = info->references_pool->allocate ();
4237   memcpy (ref, oref, sizeof (*ref));
4238   oref->operands.create (0);
4239   slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4240   if (*slot)
4241     free_reference (*slot);
4242   *slot = ref;
4243 }
4244 
4245 /* Process a strongly connected component in the SSA graph.  */
4246 
4247 static void
4248 process_scc (vec<tree> scc)
4249 {
4250   tree var;
4251   unsigned int i;
4252   unsigned int iterations = 0;
4253   bool changed = true;
4254   vn_nary_op_iterator_type hin;
4255   vn_phi_iterator_type hip;
4256   vn_reference_iterator_type hir;
4257   vn_nary_op_t nary;
4258   vn_phi_t phi;
4259   vn_reference_t ref;
4260 
4261   /* If the SCC has a single member, just visit it.  */
4262   if (scc.length () == 1)
4263     {
4264       tree use = scc[0];
4265       if (VN_INFO (use)->use_processed)
4266 	return;
4267       /* We need to make sure it doesn't form a cycle itself, which can
4268 	 happen for self-referential PHI nodes.  In that case we would
4269 	 end up inserting an expression with VN_TOP operands into the
4270 	 valid table which makes us derive bogus equivalences later.
4271 	 The cheapest way to check this is to assume it for all PHI nodes.  */
4272       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4273 	/* Fallthru to iteration.  */ ;
4274       else
4275 	{
4276 	  visit_use (use);
4277 	  return;
4278 	}
4279     }
4280 
4281   if (dump_file && (dump_flags & TDF_DETAILS))
4282     print_scc (dump_file, scc);
4283 
4284   /* Iterate over the SCC with the optimistic table until it stops
4285      changing.  */
4286   current_info = optimistic_info;
4287   while (changed)
4288     {
4289       changed = false;
4290       iterations++;
4291       if (dump_file && (dump_flags & TDF_DETAILS))
4292 	fprintf (dump_file, "Starting iteration %d\n", iterations);
4293       /* As we are value-numbering optimistically we have to
4294 	 clear the expression tables and the simplified expressions
4295 	 in each iteration until we converge.  */
4296       optimistic_info->nary->empty ();
4297       optimistic_info->phis->empty ();
4298       optimistic_info->references->empty ();
4299       obstack_free (&optimistic_info->nary_obstack, NULL);
4300       gcc_obstack_init (&optimistic_info->nary_obstack);
4301       optimistic_info->phis_pool->release ();
4302       optimistic_info->references_pool->release ();
4303       FOR_EACH_VEC_ELT (scc, i, var)
4304 	gcc_assert (!VN_INFO (var)->needs_insertion
4305 		    && VN_INFO (var)->expr == NULL);
4306       FOR_EACH_VEC_ELT (scc, i, var)
4307 	changed |= visit_use (var);
4308     }
4309 
4310   if (dump_file && (dump_flags & TDF_DETAILS))
4311     fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4312   statistics_histogram_event (cfun, "SCC iterations", iterations);
4313 
4314   /* Finally, copy the contents of the no longer used optimistic
4315      table to the valid table.  */
4316   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4317     copy_nary (nary, valid_info);
4318   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4319     copy_phi (phi, valid_info);
4320   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4321 			       ref, vn_reference_t, hir)
4322     copy_reference (ref, valid_info);
4323 
4324   current_info = valid_info;
4325 }
4326 
4327 
4328 /* Pop the components of the found SCC for NAME off the SCC stack
4329    and process them.  Returns true if all went well, false if
4330    we run into resource limits.  */
4331 
4332 static bool
4333 extract_and_process_scc_for_name (tree name)
4334 {
4335   auto_vec<tree> scc;
4336   tree x;
4337 
4338   /* Found an SCC, pop the components off the SCC stack and
4339      process them.  */
4340   do
4341     {
4342       x = sccstack.pop ();
4343 
4344       VN_INFO (x)->on_sccstack = false;
4345       scc.safe_push (x);
4346     } while (x != name);
4347 
4348   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
4349   if (scc.length ()
4350       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4351     {
4352       if (dump_file)
4353 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4354 		 "SCC size %u exceeding %u\n", scc.length (),
4355 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4356 
4357       return false;
4358     }
4359 
4360   if (scc.length () > 1)
4361     sort_scc (scc);
4362 
4363   process_scc (scc);
4364 
4365   return true;
4366 }
4367 
4368 /* Depth first search on NAME to discover and process SCC's in the SSA
4369    graph.
4370    Execution of this algorithm relies on the fact that the SCC's are
4371    popped off the stack in topological order.
4372    Returns true if successful, false if we stopped processing SCC's due
4373    to resource constraints.  */
4374 
4375 static bool
4376 DFS (tree name)
4377 {
4378   auto_vec<ssa_op_iter> itervec;
4379   auto_vec<tree> namevec;
4380   use_operand_p usep = NULL;
4381   gimple *defstmt;
4382   tree use;
4383   ssa_op_iter iter;
4384 
4385 start_over:
4386   /* SCC info */
4387   VN_INFO (name)->dfsnum = next_dfs_num++;
4388   VN_INFO (name)->visited = true;
4389   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4390 
4391   sccstack.safe_push (name);
4392   VN_INFO (name)->on_sccstack = true;
4393   defstmt = SSA_NAME_DEF_STMT (name);
4394 
4395   /* Recursively DFS on our operands, looking for SCC's.  */
4396   if (!gimple_nop_p (defstmt))
4397     {
4398       /* Push a new iterator.  */
4399       if (gphi *phi = dyn_cast <gphi *> (defstmt))
4400 	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4401       else
4402 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4403     }
4404   else
4405     clear_and_done_ssa_iter (&iter);
4406 
4407   while (1)
4408     {
4409       /* If we are done processing uses of a name, go up the stack
4410 	 of iterators and process SCCs as we found them.  */
4411       if (op_iter_done (&iter))
4412 	{
4413 	  /* See if we found an SCC.  */
4414 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4415 	    if (!extract_and_process_scc_for_name (name))
4416 	      return false;
4417 
4418 	  /* Check if we are done.  */
4419 	  if (namevec.is_empty ())
4420 	    return true;
4421 
4422 	  /* Restore the last use walker and continue walking there.  */
4423 	  use = name;
4424 	  name = namevec.pop ();
4425 	  memcpy (&iter, &itervec.last (),
4426 		  sizeof (ssa_op_iter));
4427 	  itervec.pop ();
4428 	  goto continue_walking;
4429 	}
4430 
4431       use = USE_FROM_PTR (usep);
4432 
4433       /* Since we handle phi nodes, we will sometimes get
4434 	 invariants in the use expression.  */
4435       if (TREE_CODE (use) == SSA_NAME)
4436 	{
4437 	  if (! (VN_INFO (use)->visited))
4438 	    {
4439 	      /* Recurse by pushing the current use walking state on
4440 		 the stack and starting over.  */
4441 	      itervec.safe_push (iter);
4442 	      namevec.safe_push (name);
4443 	      name = use;
4444 	      goto start_over;
4445 
4446 continue_walking:
4447 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4448 					 VN_INFO (use)->low);
4449 	    }
4450 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4451 	      && VN_INFO (use)->on_sccstack)
4452 	    {
4453 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4454 					 VN_INFO (name)->low);
4455 	    }
4456 	}
4457 
4458       usep = op_iter_next_use (&iter);
4459     }
4460 }
4461 
4462 /* Allocate a value number table.  */
4463 
4464 static void
4465 allocate_vn_table (vn_tables_t table)
4466 {
4467   table->phis = new vn_phi_table_type (23);
4468   table->nary = new vn_nary_op_table_type (23);
4469   table->references = new vn_reference_table_type (23);
4470 
4471   gcc_obstack_init (&table->nary_obstack);
4472   table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4473   table->references_pool = new object_allocator<vn_reference_s>
4474     ("VN references");
4475 }
4476 
4477 /* Free a value number table.  */
4478 
4479 static void
4480 free_vn_table (vn_tables_t table)
4481 {
4482   delete table->phis;
4483   table->phis = NULL;
4484   delete table->nary;
4485   table->nary = NULL;
4486   delete table->references;
4487   table->references = NULL;
4488   obstack_free (&table->nary_obstack, NULL);
4489   delete table->phis_pool;
4490   delete table->references_pool;
4491 }
4492 
4493 static void
4494 init_scc_vn (void)
4495 {
4496   int j;
4497   int *rpo_numbers_temp;
4498 
4499   calculate_dominance_info (CDI_DOMINATORS);
4500   mark_dfs_back_edges ();
4501 
4502   sccstack.create (0);
4503   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4504 
4505   constant_value_ids = BITMAP_ALLOC (NULL);
4506 
4507   next_dfs_num = 1;
4508   next_value_id = 1;
4509 
4510   vn_ssa_aux_table.create (num_ssa_names + 1);
4511   /* VEC_alloc doesn't actually grow it to the right size, it just
4512      preallocates the space to do so.  */
4513   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4514   gcc_obstack_init (&vn_ssa_aux_obstack);
4515 
4516   shared_lookup_phiargs.create (0);
4517   shared_lookup_references.create (0);
4518   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4519   rpo_numbers_temp =
4520     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4521   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4522 
4523   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4524      the i'th block in RPO order is bb.  We want to map bb's to RPO
4525      numbers, so we need to rearrange this array.  */
4526   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4527     rpo_numbers[rpo_numbers_temp[j]] = j;
4528 
4529   XDELETE (rpo_numbers_temp);
4530 
4531   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4532 
4533   renumber_gimple_stmt_uids ();
4534 
4535   /* Create the valid and optimistic value numbering tables.  */
4536   valid_info = XCNEW (struct vn_tables_s);
4537   allocate_vn_table (valid_info);
4538   optimistic_info = XCNEW (struct vn_tables_s);
4539   allocate_vn_table (optimistic_info);
4540   current_info = valid_info;
4541 
4542   /* Create the VN_INFO structures, and initialize value numbers to
4543      TOP or VARYING for parameters.  */
4544   size_t i;
4545   tree name;
4546 
4547   FOR_EACH_SSA_NAME (i, name, cfun)
4548     {
4549       VN_INFO_GET (name)->valnum = VN_TOP;
4550       VN_INFO (name)->needs_insertion = false;
4551       VN_INFO (name)->expr = NULL;
4552       VN_INFO (name)->value_id = 0;
4553 
4554       if (!SSA_NAME_IS_DEFAULT_DEF (name))
4555 	continue;
4556 
4557       switch (TREE_CODE (SSA_NAME_VAR (name)))
4558 	{
4559 	case VAR_DECL:
4560 	  /* Undefined vars keep TOP.  */
4561 	  break;
4562 
4563 	case PARM_DECL:
4564 	  /* Parameters are VARYING but we can record a condition
4565 	     if we know it is a non-NULL pointer.  */
4566 	  VN_INFO (name)->visited = true;
4567 	  VN_INFO (name)->valnum = name;
4568 	  if (POINTER_TYPE_P (TREE_TYPE (name))
4569 	      && nonnull_arg_p (SSA_NAME_VAR (name)))
4570 	    {
4571 	      tree ops[2];
4572 	      ops[0] = name;
4573 	      ops[1] = build_int_cst (TREE_TYPE (name), 0);
4574 	      vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4575 					boolean_true_node, 0);
4576 	      if (dump_file && (dump_flags & TDF_DETAILS))
4577 		{
4578 		  fprintf (dump_file, "Recording ");
4579 		  print_generic_expr (dump_file, name, TDF_SLIM);
4580 		  fprintf (dump_file, " != 0\n");
4581 		}
4582 	    }
4583 	  break;
4584 
4585 	case RESULT_DECL:
4586 	  /* If the result is passed by invisible reference the default
4587 	     def is initialized, otherwise it's uninitialized.  */
4588 	  if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4589 	    {
4590 	      VN_INFO (name)->visited = true;
4591 	      VN_INFO (name)->valnum = name;
4592 	    }
4593 	  break;
4594 
4595 	default:
4596 	  gcc_unreachable ();
4597 	}
4598     }
4599 }
4600 
4601 /* Restore SSA info that has been reset on value leaders.  */
4602 
4603 void
4604 scc_vn_restore_ssa_info (void)
4605 {
4606   unsigned i;
4607   tree name;
4608 
4609   FOR_EACH_SSA_NAME (i, name, cfun)
4610     {
4611       if (has_VN_INFO (name))
4612 	{
4613 	  if (VN_INFO (name)->needs_insertion)
4614 	    ;
4615 	  else if (POINTER_TYPE_P (TREE_TYPE (name))
4616 		   && VN_INFO (name)->info.ptr_info)
4617 	    SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4618 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4619 		   && VN_INFO (name)->info.range_info)
4620 	    {
4621 	      SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4622 	      SSA_NAME_ANTI_RANGE_P (name)
4623 		= VN_INFO (name)->range_info_anti_range_p;
4624 	    }
4625 	}
4626     }
4627 }
4628 
4629 void
4630 free_scc_vn (void)
4631 {
4632   size_t i;
4633   tree name;
4634 
4635   delete constant_to_value_id;
4636   constant_to_value_id = NULL;
4637   BITMAP_FREE (constant_value_ids);
4638   shared_lookup_phiargs.release ();
4639   shared_lookup_references.release ();
4640   XDELETEVEC (rpo_numbers);
4641 
4642   FOR_EACH_SSA_NAME (i, name, cfun)
4643     {
4644       if (has_VN_INFO (name)
4645 	  && VN_INFO (name)->needs_insertion)
4646 	release_ssa_name (name);
4647     }
4648   obstack_free (&vn_ssa_aux_obstack, NULL);
4649   vn_ssa_aux_table.release ();
4650 
4651   sccstack.release ();
4652   free_vn_table (valid_info);
4653   XDELETE (valid_info);
4654   free_vn_table (optimistic_info);
4655   XDELETE (optimistic_info);
4656 
4657   BITMAP_FREE (const_parms);
4658 }
4659 
4660 /* Set *ID according to RESULT.  */
4661 
4662 static void
4663 set_value_id_for_result (tree result, unsigned int *id)
4664 {
4665   if (result && TREE_CODE (result) == SSA_NAME)
4666     *id = VN_INFO (result)->value_id;
4667   else if (result && is_gimple_min_invariant (result))
4668     *id = get_or_alloc_constant_value_id (result);
4669   else
4670     *id = get_next_value_id ();
4671 }
4672 
4673 /* Set the value ids in the valid hash tables.  */
4674 
4675 static void
4676 set_hashtable_value_ids (void)
4677 {
4678   vn_nary_op_iterator_type hin;
4679   vn_phi_iterator_type hip;
4680   vn_reference_iterator_type hir;
4681   vn_nary_op_t vno;
4682   vn_reference_t vr;
4683   vn_phi_t vp;
4684 
4685   /* Now set the value ids of the things we had put in the hash
4686      table.  */
4687 
4688   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4689     set_value_id_for_result (vno->result, &vno->value_id);
4690 
4691   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4692     set_value_id_for_result (vp->result, &vp->value_id);
4693 
4694   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4695 			       hir)
4696     set_value_id_for_result (vr->result, &vr->value_id);
4697 }
4698 
4699 class sccvn_dom_walker : public dom_walker
4700 {
4701 public:
4702   sccvn_dom_walker ()
4703     : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (0) {}
4704 
4705   virtual edge before_dom_children (basic_block);
4706   virtual void after_dom_children (basic_block);
4707 
4708   void record_cond (basic_block,
4709 		    enum tree_code code, tree lhs, tree rhs, bool value);
4710   void record_conds (basic_block,
4711 		     enum tree_code code, tree lhs, tree rhs, bool value);
4712 
4713   bool fail;
4714   auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4715     cond_stack;
4716 };
4717 
4718 /* Record a temporary condition for the BB and its dominated blocks.  */
4719 
4720 void
4721 sccvn_dom_walker::record_cond (basic_block bb,
4722 			       enum tree_code code, tree lhs, tree rhs,
4723 			       bool value)
4724 {
4725   tree ops[2] = { lhs, rhs };
4726   vn_nary_op_t old = NULL;
4727   if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4728     current_info->nary->remove_elt_with_hash (old, old->hashcode);
4729   vn_nary_op_t cond
4730     = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4731 				value
4732 				? boolean_true_node
4733 				: boolean_false_node, 0);
4734   if (dump_file && (dump_flags & TDF_DETAILS))
4735     {
4736       fprintf (dump_file, "Recording temporarily ");
4737       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4738       fprintf (dump_file, " %s ", get_tree_code_name (code));
4739       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4740       fprintf (dump_file, " == %s%s\n",
4741 	       value ? "true" : "false",
4742 	       old ? " (old entry saved)" : "");
4743     }
4744   cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4745 }
4746 
4747 /* Record temporary conditions for the BB and its dominated blocks
4748    according to LHS CODE RHS == VALUE and its dominated conditions.  */
4749 
4750 void
4751 sccvn_dom_walker::record_conds (basic_block bb,
4752 				enum tree_code code, tree lhs, tree rhs,
4753 				bool value)
4754 {
4755   /* Record the original condition.  */
4756   record_cond (bb, code, lhs, rhs, value);
4757 
4758   if (!value)
4759     return;
4760 
4761   /* Record dominated conditions if the condition is true.  Note that
4762      the inversion is already recorded.  */
4763   switch (code)
4764     {
4765     case LT_EXPR:
4766     case GT_EXPR:
4767       record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4768       record_cond (bb, NE_EXPR, lhs, rhs, true);
4769       record_cond (bb, EQ_EXPR, lhs, rhs, false);
4770       break;
4771 
4772     case EQ_EXPR:
4773       record_cond (bb, LE_EXPR, lhs, rhs, true);
4774       record_cond (bb, GE_EXPR, lhs, rhs, true);
4775       record_cond (bb, LT_EXPR, lhs, rhs, false);
4776       record_cond (bb, GT_EXPR, lhs, rhs, false);
4777       break;
4778 
4779     default:
4780       break;
4781     }
4782 }
4783 
4784 /* Restore expressions and values derived from conditionals.  */
4785 
4786 void
4787 sccvn_dom_walker::after_dom_children (basic_block bb)
4788 {
4789   while (!cond_stack.is_empty ()
4790 	 && cond_stack.last ().first == bb)
4791     {
4792       vn_nary_op_t cond = cond_stack.last ().second.first;
4793       vn_nary_op_t old = cond_stack.last ().second.second;
4794       current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4795       if (old)
4796 	vn_nary_op_insert_into (old, current_info->nary, false);
4797       cond_stack.pop ();
4798     }
4799 }
4800 
4801 /* Value number all statements in BB.  */
4802 
4803 edge
4804 sccvn_dom_walker::before_dom_children (basic_block bb)
4805 {
4806   edge e;
4807   edge_iterator ei;
4808 
4809   if (fail)
4810     return NULL;
4811 
4812   if (dump_file && (dump_flags & TDF_DETAILS))
4813     fprintf (dump_file, "Visiting BB %d\n", bb->index);
4814 
4815   /* If we have a single predecessor record the equivalence from a
4816      possible condition on the predecessor edge.  */
4817   edge pred_e = NULL;
4818   FOR_EACH_EDGE (e, ei, bb->preds)
4819     {
4820       /* Ignore simple backedges from this to allow recording conditions
4821          in loop headers.  */
4822       if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4823 	continue;
4824       if (! pred_e)
4825 	pred_e = e;
4826       else
4827 	{
4828 	  pred_e = NULL;
4829 	  break;
4830 	}
4831     }
4832   if (pred_e)
4833     {
4834       /* Check if there are multiple executable successor edges in
4835 	 the source block.  Otherwise there is no additional info
4836 	 to be recorded.  */
4837       edge e2;
4838       FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4839 	if (e2 != pred_e
4840 	    && e2->flags & EDGE_EXECUTABLE)
4841 	  break;
4842       if (e2 && (e2->flags & EDGE_EXECUTABLE))
4843 	{
4844 	  gimple *stmt = last_stmt (pred_e->src);
4845 	  if (stmt
4846 	      && gimple_code (stmt) == GIMPLE_COND)
4847 	    {
4848 	      enum tree_code code = gimple_cond_code (stmt);
4849 	      tree lhs = gimple_cond_lhs (stmt);
4850 	      tree rhs = gimple_cond_rhs (stmt);
4851 	      record_conds (bb, code, lhs, rhs,
4852 			    (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4853 	      code = invert_tree_comparison (code, HONOR_NANS (lhs));
4854 	      if (code != ERROR_MARK)
4855 		record_conds (bb, code, lhs, rhs,
4856 			      (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4857 	    }
4858 	}
4859     }
4860 
4861   /* Value-number all defs in the basic-block.  */
4862   for (gphi_iterator gsi = gsi_start_phis (bb);
4863        !gsi_end_p (gsi); gsi_next (&gsi))
4864     {
4865       gphi *phi = gsi.phi ();
4866       tree res = PHI_RESULT (phi);
4867       if (!VN_INFO (res)->visited
4868 	  && !DFS (res))
4869 	{
4870 	  fail = true;
4871 	  return NULL;
4872 	}
4873     }
4874   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4875        !gsi_end_p (gsi); gsi_next (&gsi))
4876     {
4877       ssa_op_iter i;
4878       tree op;
4879       FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4880 	if (!VN_INFO (op)->visited
4881 	    && !DFS (op))
4882 	  {
4883 	    fail = true;
4884 	    return NULL;
4885 	  }
4886     }
4887 
4888   /* Finally look at the last stmt.  */
4889   gimple *stmt = last_stmt (bb);
4890   if (!stmt)
4891     return NULL;
4892 
4893   enum gimple_code code = gimple_code (stmt);
4894   if (code != GIMPLE_COND
4895       && code != GIMPLE_SWITCH
4896       && code != GIMPLE_GOTO)
4897     return NULL;
4898 
4899   if (dump_file && (dump_flags & TDF_DETAILS))
4900     {
4901       fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4902       print_gimple_stmt (dump_file, stmt, 0, 0);
4903     }
4904 
4905   /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4906      if value-numbering can prove they are not reachable.  Handling
4907      computed gotos is also possible.  */
4908   tree val;
4909   switch (code)
4910     {
4911     case GIMPLE_COND:
4912       {
4913 	tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4914 	tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4915 	val = gimple_simplify (gimple_cond_code (stmt),
4916 			       boolean_type_node, lhs, rhs,
4917 			       NULL, vn_valueize);
4918 	/* If that didn't simplify to a constant see if we have recorded
4919 	   temporary expressions from taken edges.  */
4920 	if (!val || TREE_CODE (val) != INTEGER_CST)
4921 	  {
4922 	    tree ops[2];
4923 	    ops[0] = lhs;
4924 	    ops[1] = rhs;
4925 	    val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4926 					    boolean_type_node, ops, NULL);
4927 	  }
4928 	break;
4929       }
4930     case GIMPLE_SWITCH:
4931       val = gimple_switch_index (as_a <gswitch *> (stmt));
4932       break;
4933     case GIMPLE_GOTO:
4934       val = gimple_goto_dest (stmt);
4935       break;
4936     default:
4937       gcc_unreachable ();
4938     }
4939   if (!val)
4940     return NULL;
4941 
4942   edge taken = find_taken_edge (bb, vn_valueize (val));
4943   if (!taken)
4944     return NULL;
4945 
4946   if (dump_file && (dump_flags & TDF_DETAILS))
4947     fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4948 	     "not executable\n", bb->index, bb->index, taken->dest->index);
4949 
4950   return taken;
4951 }
4952 
4953 /* Do SCCVN.  Returns true if it finished, false if we bailed out
4954    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4955    how we use the alias oracle walking during the VN process.  */
4956 
4957 bool
4958 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4959 {
4960   size_t i;
4961 
4962   default_vn_walk_kind = default_vn_walk_kind_;
4963 
4964   init_scc_vn ();
4965 
4966   /* Collect pointers we know point to readonly memory.  */
4967   const_parms = BITMAP_ALLOC (NULL);
4968   tree fnspec = lookup_attribute ("fn spec",
4969 				  TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4970   if (fnspec)
4971     {
4972       fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4973       i = 1;
4974       for (tree arg = DECL_ARGUMENTS (cfun->decl);
4975 	   arg; arg = DECL_CHAIN (arg), ++i)
4976 	{
4977 	  if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4978 	    break;
4979 	  if (TREE_STRING_POINTER (fnspec)[i]  == 'R'
4980 	      || TREE_STRING_POINTER (fnspec)[i] == 'r')
4981 	    {
4982 	      tree name = ssa_default_def (cfun, arg);
4983 	      if (name)
4984 		bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4985 	    }
4986 	}
4987     }
4988 
4989   /* Walk all blocks in dominator order, value-numbering stmts
4990      SSA defs and decide whether outgoing edges are not executable.  */
4991   sccvn_dom_walker walker;
4992   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4993   if (walker.fail)
4994     {
4995       scc_vn_restore_ssa_info ();
4996       free_scc_vn ();
4997       return false;
4998     }
4999 
5000   /* Initialize the value ids and prune out remaining VN_TOPs
5001      from dead code.  */
5002   tree name;
5003 
5004   FOR_EACH_SSA_NAME (i, name, cfun)
5005     {
5006       vn_ssa_aux_t info = VN_INFO (name);
5007       if (!info->visited)
5008 	info->valnum = name;
5009       if (info->valnum == name
5010 	  || info->valnum == VN_TOP)
5011 	info->value_id = get_next_value_id ();
5012       else if (is_gimple_min_invariant (info->valnum))
5013 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
5014     }
5015 
5016   /* Propagate.  */
5017   FOR_EACH_SSA_NAME (i, name, cfun)
5018     {
5019       vn_ssa_aux_t info = VN_INFO (name);
5020       if (TREE_CODE (info->valnum) == SSA_NAME
5021 	  && info->valnum != name
5022 	  && info->value_id != VN_INFO (info->valnum)->value_id)
5023 	info->value_id = VN_INFO (info->valnum)->value_id;
5024     }
5025 
5026   set_hashtable_value_ids ();
5027 
5028   if (dump_file && (dump_flags & TDF_DETAILS))
5029     {
5030       fprintf (dump_file, "Value numbers:\n");
5031       FOR_EACH_SSA_NAME (i, name, cfun)
5032 	{
5033 	  if (VN_INFO (name)->visited
5034 	      && SSA_VAL (name) != name)
5035 	    {
5036 	      print_generic_expr (dump_file, name, 0);
5037 	      fprintf (dump_file, " = ");
5038 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
5039 	      fprintf (dump_file, "\n");
5040 	    }
5041 	}
5042     }
5043 
5044   return true;
5045 }
5046 
5047 /* Return the maximum value id we have ever seen.  */
5048 
5049 unsigned int
5050 get_max_value_id (void)
5051 {
5052   return next_value_id;
5053 }
5054 
5055 /* Return the next unique value id.  */
5056 
5057 unsigned int
5058 get_next_value_id (void)
5059 {
5060   return next_value_id++;
5061 }
5062 
5063 
5064 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5065 
5066 bool
5067 expressions_equal_p (tree e1, tree e2)
5068 {
5069   /* The obvious case.  */
5070   if (e1 == e2)
5071     return true;
5072 
5073   /* If either one is VN_TOP consider them equal.  */
5074   if (e1 == VN_TOP || e2 == VN_TOP)
5075     return true;
5076 
5077   /* If only one of them is null, they cannot be equal.  */
5078   if (!e1 || !e2)
5079     return false;
5080 
5081   /* Now perform the actual comparison.  */
5082   if (TREE_CODE (e1) == TREE_CODE (e2)
5083       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5084     return true;
5085 
5086   return false;
5087 }
5088 
5089 
5090 /* Return true if the nary operation NARY may trap.  This is a copy
5091    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5092 
5093 bool
5094 vn_nary_may_trap (vn_nary_op_t nary)
5095 {
5096   tree type;
5097   tree rhs2 = NULL_TREE;
5098   bool honor_nans = false;
5099   bool honor_snans = false;
5100   bool fp_operation = false;
5101   bool honor_trapv = false;
5102   bool handled, ret;
5103   unsigned i;
5104 
5105   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5106       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5107       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5108     {
5109       type = nary->type;
5110       fp_operation = FLOAT_TYPE_P (type);
5111       if (fp_operation)
5112 	{
5113 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5114 	  honor_snans = flag_signaling_nans != 0;
5115 	}
5116       else if (INTEGRAL_TYPE_P (type)
5117 	       && TYPE_OVERFLOW_TRAPS (type))
5118 	honor_trapv = true;
5119     }
5120   if (nary->length >= 2)
5121     rhs2 = nary->op[1];
5122   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5123 				       honor_trapv,
5124 				       honor_nans, honor_snans, rhs2,
5125 				       &handled);
5126   if (handled
5127       && ret)
5128     return true;
5129 
5130   for (i = 0; i < nary->length; ++i)
5131     if (tree_could_trap_p (nary->op[i]))
5132       return true;
5133 
5134   return false;
5135 }
5136