xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-sccvn.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* SCC value numbering for trees
2    Copyright (C) 2006-2015 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "gimple-pretty-print.h"
45 #include "tree-inline.h"
46 #include "hash-table.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "tree-eh.h"
51 #include "gimple-expr.h"
52 #include "is-a.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-ssa.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
60 #include "hashtab.h"
61 #include "rtl.h"
62 #include "flags.h"
63 #include "statistics.h"
64 #include "real.h"
65 #include "fixed-value.h"
66 #include "insn-config.h"
67 #include "expmed.h"
68 #include "dojump.h"
69 #include "explow.h"
70 #include "calls.h"
71 #include "emit-rtl.h"
72 #include "varasm.h"
73 #include "stmt.h"
74 #include "expr.h"
75 #include "tree-dfa.h"
76 #include "tree-ssa.h"
77 #include "dumpfile.h"
78 #include "alloc-pool.h"
79 #include "cfgloop.h"
80 #include "params.h"
81 #include "tree-ssa-propagate.h"
82 #include "tree-ssa-sccvn.h"
83 #include "tree-cfg.h"
84 #include "domwalk.h"
85 #include "ipa-ref.h"
86 #include "plugin-api.h"
87 #include "cgraph.h"
88 
89 /* This algorithm is based on the SCC algorithm presented by Keith
90    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
91    (http://citeseer.ist.psu.edu/41805.html).  In
92    straight line code, it is equivalent to a regular hash based value
93    numbering that is performed in reverse postorder.
94 
95    For code with cycles, there are two alternatives, both of which
96    require keeping the hashtables separate from the actual list of
97    value numbers for SSA names.
98 
99    1. Iterate value numbering in an RPO walk of the blocks, removing
100    all the entries from the hashtable after each iteration (but
101    keeping the SSA name->value number mapping between iterations).
102    Iterate until it does not change.
103 
104    2. Perform value numbering as part of an SCC walk on the SSA graph,
105    iterating only the cycles in the SSA graph until they do not change
106    (using a separate, optimistic hashtable for value numbering the SCC
107    operands).
108 
109    The second is not just faster in practice (because most SSA graph
110    cycles do not involve all the variables in the graph), it also has
111    some nice properties.
112 
113    One of these nice properties is that when we pop an SCC off the
114    stack, we are guaranteed to have processed all the operands coming from
115    *outside of that SCC*, so we do not need to do anything special to
116    ensure they have value numbers.
117 
118    Another nice property is that the SCC walk is done as part of a DFS
119    of the SSA graph, which makes it easy to perform combining and
120    simplifying operations at the same time.
121 
122    The code below is deliberately written in a way that makes it easy
123    to separate the SCC walk from the other work it does.
124 
125    In order to propagate constants through the code, we track which
126    expressions contain constants, and use those while folding.  In
127    theory, we could also track expressions whose value numbers are
128    replaced, in case we end up folding based on expression
129    identities.
130 
131    In order to value number memory, we assign value numbers to vuses.
132    This enables us to note that, for example, stores to the same
133    address of the same value from the same starting memory states are
134    equivalent.
135    TODO:
136 
137    1. We can iterate only the changing portions of the SCC's, but
138    I have not seen an SCC big enough for this to be a win.
139    2. If you differentiate between phi nodes for loops and phi nodes
140    for if-then-else, you can properly consider phi nodes in different
141    blocks for equivalence.
142    3. We could value number vuses in more cases, particularly, whole
143    structure copies.
144 */
145 
146 
147 /* vn_nary_op hashtable helpers.  */
148 
149 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
150 {
151   typedef vn_nary_op_s value_type;
152   typedef vn_nary_op_s compare_type;
153   static inline hashval_t hash (const value_type *);
154   static inline bool equal (const value_type *, const compare_type *);
155 };
156 
157 /* Return the computed hashcode for nary operation P1.  */
158 
159 inline hashval_t
160 vn_nary_op_hasher::hash (const value_type *vno1)
161 {
162   return vno1->hashcode;
163 }
164 
165 /* Compare nary operations P1 and P2 and return true if they are
166    equivalent.  */
167 
168 inline bool
169 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
170 {
171   return vn_nary_op_eq (vno1, vno2);
172 }
173 
174 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
175 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
176 
177 
178 /* vn_phi hashtable helpers.  */
179 
180 static int
181 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
182 
183 struct vn_phi_hasher
184 {
185   typedef vn_phi_s value_type;
186   typedef vn_phi_s compare_type;
187   static inline hashval_t hash (const value_type *);
188   static inline bool equal (const value_type *, const compare_type *);
189   static inline void remove (value_type *);
190 };
191 
192 /* Return the computed hashcode for phi operation P1.  */
193 
194 inline hashval_t
195 vn_phi_hasher::hash (const value_type *vp1)
196 {
197   return vp1->hashcode;
198 }
199 
200 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
201 
202 inline bool
203 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
204 {
205   return vn_phi_eq (vp1, vp2);
206 }
207 
208 /* Free a phi operation structure VP.  */
209 
210 inline void
211 vn_phi_hasher::remove (value_type *phi)
212 {
213   phi->phiargs.release ();
214 }
215 
216 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
217 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
218 
219 
220 /* Compare two reference operands P1 and P2 for equality.  Return true if
221    they are equal, and false otherwise.  */
222 
223 static int
224 vn_reference_op_eq (const void *p1, const void *p2)
225 {
226   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
227   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
228 
229   return (vro1->opcode == vro2->opcode
230 	  /* We do not care for differences in type qualification.  */
231 	  && (vro1->type == vro2->type
232 	      || (vro1->type && vro2->type
233 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
234 					 TYPE_MAIN_VARIANT (vro2->type))))
235 	  && expressions_equal_p (vro1->op0, vro2->op0)
236 	  && expressions_equal_p (vro1->op1, vro2->op1)
237 	  && expressions_equal_p (vro1->op2, vro2->op2));
238 }
239 
240 /* Free a reference operation structure VP.  */
241 
242 static inline void
243 free_reference (vn_reference_s *vr)
244 {
245   vr->operands.release ();
246 }
247 
248 
249 /* vn_reference hashtable helpers.  */
250 
251 struct vn_reference_hasher
252 {
253   typedef vn_reference_s value_type;
254   typedef vn_reference_s compare_type;
255   static inline hashval_t hash (const value_type *);
256   static inline bool equal (const value_type *, const compare_type *);
257   static inline void remove (value_type *);
258 };
259 
260 /* Return the hashcode for a given reference operation P1.  */
261 
262 inline hashval_t
263 vn_reference_hasher::hash (const value_type *vr1)
264 {
265   return vr1->hashcode;
266 }
267 
268 inline bool
269 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
270 {
271   return vn_reference_eq (v, c);
272 }
273 
274 inline void
275 vn_reference_hasher::remove (value_type *v)
276 {
277   free_reference (v);
278 }
279 
280 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
281 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
282 
283 
284 /* The set of hashtables and alloc_pool's for their items.  */
285 
286 typedef struct vn_tables_s
287 {
288   vn_nary_op_table_type *nary;
289   vn_phi_table_type *phis;
290   vn_reference_table_type *references;
291   struct obstack nary_obstack;
292   alloc_pool phis_pool;
293   alloc_pool references_pool;
294 } *vn_tables_t;
295 
296 
297 /* vn_constant hashtable helpers.  */
298 
299 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
300 {
301   typedef vn_constant_s value_type;
302   typedef vn_constant_s compare_type;
303   static inline hashval_t hash (const value_type *);
304   static inline bool equal (const value_type *, const compare_type *);
305 };
306 
307 /* Hash table hash function for vn_constant_t.  */
308 
309 inline hashval_t
310 vn_constant_hasher::hash (const value_type *vc1)
311 {
312   return vc1->hashcode;
313 }
314 
315 /* Hash table equality function for vn_constant_t.  */
316 
317 inline bool
318 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
319 {
320   if (vc1->hashcode != vc2->hashcode)
321     return false;
322 
323   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 }
325 
326 static hash_table<vn_constant_hasher> *constant_to_value_id;
327 static bitmap constant_value_ids;
328 
329 
330 /* Valid hashtables storing information we have proven to be
331    correct.  */
332 
333 static vn_tables_t valid_info;
334 
335 /* Optimistic hashtables storing information we are making assumptions about
336    during iterations.  */
337 
338 static vn_tables_t optimistic_info;
339 
340 /* Pointer to the set of hashtables that is currently being used.
341    Should always point to either the optimistic_info, or the
342    valid_info.  */
343 
344 static vn_tables_t current_info;
345 
346 
347 /* Reverse post order index for each basic block.  */
348 
349 static int *rpo_numbers;
350 
351 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
352 
353 /* Return the SSA value of the VUSE x, supporting released VDEFs
354    during elimination which will value-number the VDEF to the
355    associated VUSE (but not substitute in the whole lattice).  */
356 
357 static inline tree
358 vuse_ssa_val (tree x)
359 {
360   if (!x)
361     return NULL_TREE;
362 
363   do
364     {
365       x = SSA_VAL (x);
366     }
367   while (SSA_NAME_IN_FREE_LIST (x));
368 
369   return x;
370 }
371 
372 /* This represents the top of the VN lattice, which is the universal
373    value.  */
374 
375 tree VN_TOP;
376 
377 /* Unique counter for our value ids.  */
378 
379 static unsigned int next_value_id;
380 
381 /* Next DFS number and the stack for strongly connected component
382    detection. */
383 
384 static unsigned int next_dfs_num;
385 static vec<tree> sccstack;
386 
387 
388 
389 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
390    are allocated on an obstack for locality reasons, and to free them
391    without looping over the vec.  */
392 
393 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
394 static struct obstack vn_ssa_aux_obstack;
395 
396 /* Return the value numbering information for a given SSA name.  */
397 
398 vn_ssa_aux_t
399 VN_INFO (tree name)
400 {
401   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
402   gcc_checking_assert (res);
403   return res;
404 }
405 
406 /* Set the value numbering info for a given SSA name to a given
407    value.  */
408 
409 static inline void
410 VN_INFO_SET (tree name, vn_ssa_aux_t value)
411 {
412   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 }
414 
415 /* Initialize the value numbering info for a given SSA name.
416    This should be called just once for every SSA name.  */
417 
418 vn_ssa_aux_t
419 VN_INFO_GET (tree name)
420 {
421   vn_ssa_aux_t newinfo;
422 
423   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426     vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
427   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428   return newinfo;
429 }
430 
431 
432 /* Get the representative expression for the SSA_NAME NAME.  Returns
433    the representative SSA_NAME if there is no expression associated with it.  */
434 
435 tree
436 vn_get_expr_for (tree name)
437 {
438   vn_ssa_aux_t vn = VN_INFO (name);
439   gimple def_stmt;
440   tree expr = NULL_TREE;
441   enum tree_code code;
442 
443   if (vn->valnum == VN_TOP)
444     return name;
445 
446   /* If the value-number is a constant it is the representative
447      expression.  */
448   if (TREE_CODE (vn->valnum) != SSA_NAME)
449     return vn->valnum;
450 
451   /* Get to the information of the value of this SSA_NAME.  */
452   vn = VN_INFO (vn->valnum);
453 
454   /* If the value-number is a constant it is the representative
455      expression.  */
456   if (TREE_CODE (vn->valnum) != SSA_NAME)
457     return vn->valnum;
458 
459   /* Else if we have an expression, return it.  */
460   if (vn->expr != NULL_TREE)
461     return vn->expr;
462 
463   /* Otherwise use the defining statement to build the expression.  */
464   def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
465 
466   /* If the value number is not an assignment use it directly.  */
467   if (!is_gimple_assign (def_stmt))
468     return vn->valnum;
469 
470   /* Note that we can valueize here because we clear the cached
471      simplified expressions after each optimistic iteration.  */
472   code = gimple_assign_rhs_code (def_stmt);
473   switch (TREE_CODE_CLASS (code))
474     {
475     case tcc_reference:
476       if ((code == REALPART_EXPR
477 	   || code == IMAGPART_EXPR
478 	   || code == VIEW_CONVERT_EXPR)
479 	  && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
480 				      0)) == SSA_NAME)
481 	expr = fold_build1 (code,
482 			    gimple_expr_type (def_stmt),
483 			    vn_valueize (TREE_OPERAND
484 					   (gimple_assign_rhs1 (def_stmt), 0)));
485       break;
486 
487     case tcc_unary:
488       expr = fold_build1 (code,
489 			  gimple_expr_type (def_stmt),
490 			  vn_valueize (gimple_assign_rhs1 (def_stmt)));
491       break;
492 
493     case tcc_binary:
494       expr = fold_build2 (code,
495 			  gimple_expr_type (def_stmt),
496 			  vn_valueize (gimple_assign_rhs1 (def_stmt)),
497 			  vn_valueize (gimple_assign_rhs2 (def_stmt)));
498       break;
499 
500     case tcc_exceptional:
501       if (code == CONSTRUCTOR
502 	  && TREE_CODE
503 	       (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
504 	expr = gimple_assign_rhs1 (def_stmt);
505       break;
506 
507     default:;
508     }
509   if (expr == NULL_TREE)
510     return vn->valnum;
511 
512   /* Cache the expression.  */
513   vn->expr = expr;
514 
515   return expr;
516 }
517 
518 /* Return the vn_kind the expression computed by the stmt should be
519    associated with.  */
520 
521 enum vn_kind
522 vn_get_stmt_kind (gimple stmt)
523 {
524   switch (gimple_code (stmt))
525     {
526     case GIMPLE_CALL:
527       return VN_REFERENCE;
528     case GIMPLE_PHI:
529       return VN_PHI;
530     case GIMPLE_ASSIGN:
531       {
532 	enum tree_code code = gimple_assign_rhs_code (stmt);
533 	tree rhs1 = gimple_assign_rhs1 (stmt);
534 	switch (get_gimple_rhs_class (code))
535 	  {
536 	  case GIMPLE_UNARY_RHS:
537 	  case GIMPLE_BINARY_RHS:
538 	  case GIMPLE_TERNARY_RHS:
539 	    return VN_NARY;
540 	  case GIMPLE_SINGLE_RHS:
541 	    switch (TREE_CODE_CLASS (code))
542 	      {
543 	      case tcc_reference:
544 		/* VOP-less references can go through unary case.  */
545 		if ((code == REALPART_EXPR
546 		     || code == IMAGPART_EXPR
547 		     || code == VIEW_CONVERT_EXPR
548 		     || code == BIT_FIELD_REF)
549 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
550 		  return VN_NARY;
551 
552 		/* Fallthrough.  */
553 	      case tcc_declaration:
554 		return VN_REFERENCE;
555 
556 	      case tcc_constant:
557 		return VN_CONSTANT;
558 
559 	      default:
560 		if (code == ADDR_EXPR)
561 		  return (is_gimple_min_invariant (rhs1)
562 			  ? VN_CONSTANT : VN_REFERENCE);
563 		else if (code == CONSTRUCTOR)
564 		  return VN_NARY;
565 		return VN_NONE;
566 	      }
567 	  default:
568 	    return VN_NONE;
569 	  }
570       }
571     default:
572       return VN_NONE;
573     }
574 }
575 
576 /* Lookup a value id for CONSTANT and return it.  If it does not
577    exist returns 0.  */
578 
579 unsigned int
580 get_constant_value_id (tree constant)
581 {
582   vn_constant_s **slot;
583   struct vn_constant_s vc;
584 
585   vc.hashcode = vn_hash_constant_with_type (constant);
586   vc.constant = constant;
587   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
588   if (slot)
589     return (*slot)->value_id;
590   return 0;
591 }
592 
593 /* Lookup a value id for CONSTANT, and if it does not exist, create a
594    new one and return it.  If it does exist, return it.  */
595 
596 unsigned int
597 get_or_alloc_constant_value_id (tree constant)
598 {
599   vn_constant_s **slot;
600   struct vn_constant_s vc;
601   vn_constant_t vcp;
602 
603   vc.hashcode = vn_hash_constant_with_type (constant);
604   vc.constant = constant;
605   slot = constant_to_value_id->find_slot (&vc, INSERT);
606   if (*slot)
607     return (*slot)->value_id;
608 
609   vcp = XNEW (struct vn_constant_s);
610   vcp->hashcode = vc.hashcode;
611   vcp->constant = constant;
612   vcp->value_id = get_next_value_id ();
613   *slot = vcp;
614   bitmap_set_bit (constant_value_ids, vcp->value_id);
615   return vcp->value_id;
616 }
617 
618 /* Return true if V is a value id for a constant.  */
619 
620 bool
621 value_id_constant_p (unsigned int v)
622 {
623   return bitmap_bit_p (constant_value_ids, v);
624 }
625 
626 /* Compute the hash for a reference operand VRO1.  */
627 
628 static void
629 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
630 {
631   hstate.add_int (vro1->opcode);
632   if (vro1->op0)
633     inchash::add_expr (vro1->op0, hstate);
634   if (vro1->op1)
635     inchash::add_expr (vro1->op1, hstate);
636   if (vro1->op2)
637     inchash::add_expr (vro1->op2, hstate);
638 }
639 
640 /* Compute a hash for the reference operation VR1 and return it.  */
641 
642 static hashval_t
643 vn_reference_compute_hash (const vn_reference_t vr1)
644 {
645   inchash::hash hstate;
646   hashval_t result;
647   int i;
648   vn_reference_op_t vro;
649   HOST_WIDE_INT off = -1;
650   bool deref = false;
651 
652   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
653     {
654       if (vro->opcode == MEM_REF)
655 	deref = true;
656       else if (vro->opcode != ADDR_EXPR)
657 	deref = false;
658       if (vro->off != -1)
659 	{
660 	  if (off == -1)
661 	    off = 0;
662 	  off += vro->off;
663 	}
664       else
665 	{
666 	  if (off != -1
667 	      && off != 0)
668 	    hstate.add_int (off);
669 	  off = -1;
670 	  if (deref
671 	      && vro->opcode == ADDR_EXPR)
672 	    {
673 	      if (vro->op0)
674 		{
675 		  tree op = TREE_OPERAND (vro->op0, 0);
676 		  hstate.add_int (TREE_CODE (op));
677 		  inchash::add_expr (op, hstate);
678 		}
679 	    }
680 	  else
681 	    vn_reference_op_compute_hash (vro, hstate);
682 	}
683     }
684   result = hstate.end ();
685   /* ??? We would ICE later if we hash instead of adding that in. */
686   if (vr1->vuse)
687     result += SSA_NAME_VERSION (vr1->vuse);
688 
689   return result;
690 }
691 
692 /* Return true if reference operations VR1 and VR2 are equivalent.  This
693    means they have the same set of operands and vuses.  */
694 
695 bool
696 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
697 {
698   unsigned i, j;
699 
700   /* Early out if this is not a hash collision.  */
701   if (vr1->hashcode != vr2->hashcode)
702     return false;
703 
704   /* The VOP needs to be the same.  */
705   if (vr1->vuse != vr2->vuse)
706     return false;
707 
708   /* If the operands are the same we are done.  */
709   if (vr1->operands == vr2->operands)
710     return true;
711 
712   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
713     return false;
714 
715   if (INTEGRAL_TYPE_P (vr1->type)
716       && INTEGRAL_TYPE_P (vr2->type))
717     {
718       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
719 	return false;
720     }
721   else if (INTEGRAL_TYPE_P (vr1->type)
722 	   && (TYPE_PRECISION (vr1->type)
723 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
724     return false;
725   else if (INTEGRAL_TYPE_P (vr2->type)
726 	   && (TYPE_PRECISION (vr2->type)
727 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
728     return false;
729 
730   i = 0;
731   j = 0;
732   do
733     {
734       HOST_WIDE_INT off1 = 0, off2 = 0;
735       vn_reference_op_t vro1, vro2;
736       vn_reference_op_s tem1, tem2;
737       bool deref1 = false, deref2 = false;
738       for (; vr1->operands.iterate (i, &vro1); i++)
739 	{
740 	  if (vro1->opcode == MEM_REF)
741 	    deref1 = true;
742 	  if (vro1->off == -1)
743 	    break;
744 	  off1 += vro1->off;
745 	}
746       for (; vr2->operands.iterate (j, &vro2); j++)
747 	{
748 	  if (vro2->opcode == MEM_REF)
749 	    deref2 = true;
750 	  if (vro2->off == -1)
751 	    break;
752 	  off2 += vro2->off;
753 	}
754       if (off1 != off2)
755 	return false;
756       if (deref1 && vro1->opcode == ADDR_EXPR)
757 	{
758 	  memset (&tem1, 0, sizeof (tem1));
759 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
760 	  tem1.type = TREE_TYPE (tem1.op0);
761 	  tem1.opcode = TREE_CODE (tem1.op0);
762 	  vro1 = &tem1;
763 	  deref1 = false;
764 	}
765       if (deref2 && vro2->opcode == ADDR_EXPR)
766 	{
767 	  memset (&tem2, 0, sizeof (tem2));
768 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
769 	  tem2.type = TREE_TYPE (tem2.op0);
770 	  tem2.opcode = TREE_CODE (tem2.op0);
771 	  vro2 = &tem2;
772 	  deref2 = false;
773 	}
774       if (deref1 != deref2)
775 	return false;
776       if (!vn_reference_op_eq (vro1, vro2))
777 	return false;
778       ++j;
779       ++i;
780     }
781   while (vr1->operands.length () != i
782 	 || vr2->operands.length () != j);
783 
784   return true;
785 }
786 
787 /* Copy the operations present in load/store REF into RESULT, a vector of
788    vn_reference_op_s's.  */
789 
790 static void
791 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
792 {
793   if (TREE_CODE (ref) == TARGET_MEM_REF)
794     {
795       vn_reference_op_s temp;
796 
797       result->reserve (3);
798 
799       memset (&temp, 0, sizeof (temp));
800       temp.type = TREE_TYPE (ref);
801       temp.opcode = TREE_CODE (ref);
802       temp.op0 = TMR_INDEX (ref);
803       temp.op1 = TMR_STEP (ref);
804       temp.op2 = TMR_OFFSET (ref);
805       temp.off = -1;
806       result->quick_push (temp);
807 
808       memset (&temp, 0, sizeof (temp));
809       temp.type = NULL_TREE;
810       temp.opcode = ERROR_MARK;
811       temp.op0 = TMR_INDEX2 (ref);
812       temp.off = -1;
813       result->quick_push (temp);
814 
815       memset (&temp, 0, sizeof (temp));
816       temp.type = NULL_TREE;
817       temp.opcode = TREE_CODE (TMR_BASE (ref));
818       temp.op0 = TMR_BASE (ref);
819       temp.off = -1;
820       result->quick_push (temp);
821       return;
822     }
823 
824   /* For non-calls, store the information that makes up the address.  */
825   tree orig = ref;
826   while (ref)
827     {
828       vn_reference_op_s temp;
829 
830       memset (&temp, 0, sizeof (temp));
831       temp.type = TREE_TYPE (ref);
832       temp.opcode = TREE_CODE (ref);
833       temp.off = -1;
834 
835       switch (temp.opcode)
836 	{
837 	case MODIFY_EXPR:
838 	  temp.op0 = TREE_OPERAND (ref, 1);
839 	  break;
840 	case WITH_SIZE_EXPR:
841 	  temp.op0 = TREE_OPERAND (ref, 1);
842 	  temp.off = 0;
843 	  break;
844 	case MEM_REF:
845 	  /* The base address gets its own vn_reference_op_s structure.  */
846 	  temp.op0 = TREE_OPERAND (ref, 1);
847 	    {
848 	      offset_int off = mem_ref_offset (ref);
849 	      if (wi::fits_shwi_p (off))
850 		temp.off = off.to_shwi ();
851 	    }
852 	  break;
853 	case BIT_FIELD_REF:
854 	  /* Record bits and position.  */
855 	  temp.op0 = TREE_OPERAND (ref, 1);
856 	  temp.op1 = TREE_OPERAND (ref, 2);
857 	  break;
858 	case COMPONENT_REF:
859 	  /* The field decl is enough to unambiguously specify the field,
860 	     a matching type is not necessary and a mismatching type
861 	     is always a spurious difference.  */
862 	  temp.type = NULL_TREE;
863 	  temp.op0 = TREE_OPERAND (ref, 1);
864 	  temp.op1 = TREE_OPERAND (ref, 2);
865 	  {
866 	    tree this_offset = component_ref_field_offset (ref);
867 	    if (this_offset
868 		&& TREE_CODE (this_offset) == INTEGER_CST)
869 	      {
870 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
871 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
872 		  {
873 		    offset_int off
874 		      = (wi::to_offset (this_offset)
875 			 + wi::lrshift (wi::to_offset (bit_offset),
876 					LOG2_BITS_PER_UNIT));
877 		    if (wi::fits_shwi_p (off)
878 			/* Probibit value-numbering zero offset components
879 			   of addresses the same before the pass folding
880 			   __builtin_object_size had a chance to run
881 			   (checking cfun->after_inlining does the
882 			   trick here).  */
883 			&& (TREE_CODE (orig) != ADDR_EXPR
884 			    || off != 0
885 			    || cfun->after_inlining))
886 		      temp.off = off.to_shwi ();
887 		  }
888 	      }
889 	  }
890 	  break;
891 	case ARRAY_RANGE_REF:
892 	case ARRAY_REF:
893 	  /* Record index as operand.  */
894 	  temp.op0 = TREE_OPERAND (ref, 1);
895 	  /* Always record lower bounds and element size.  */
896 	  temp.op1 = array_ref_low_bound (ref);
897 	  temp.op2 = array_ref_element_size (ref);
898 	  if (TREE_CODE (temp.op0) == INTEGER_CST
899 	      && TREE_CODE (temp.op1) == INTEGER_CST
900 	      && TREE_CODE (temp.op2) == INTEGER_CST)
901 	    {
902 	      offset_int off = ((wi::to_offset (temp.op0)
903 				 - wi::to_offset (temp.op1))
904 				* wi::to_offset (temp.op2));
905 	      if (wi::fits_shwi_p (off))
906 		temp.off = off.to_shwi();
907 	    }
908 	  break;
909 	case VAR_DECL:
910 	  if (DECL_HARD_REGISTER (ref))
911 	    {
912 	      temp.op0 = ref;
913 	      break;
914 	    }
915 	  /* Fallthru.  */
916 	case PARM_DECL:
917 	case CONST_DECL:
918 	case RESULT_DECL:
919 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
920 	     when valueizing MEM[ptr] with ptr = &decl.  */
921 	  temp.opcode = MEM_REF;
922 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
923 	  temp.off = 0;
924 	  result->safe_push (temp);
925 	  temp.opcode = ADDR_EXPR;
926 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
927 	  temp.type = TREE_TYPE (temp.op0);
928 	  temp.off = -1;
929 	  break;
930 	case STRING_CST:
931 	case INTEGER_CST:
932 	case COMPLEX_CST:
933 	case VECTOR_CST:
934 	case REAL_CST:
935 	case FIXED_CST:
936 	case CONSTRUCTOR:
937 	case SSA_NAME:
938 	  temp.op0 = ref;
939 	  break;
940 	case ADDR_EXPR:
941 	  if (is_gimple_min_invariant (ref))
942 	    {
943 	      temp.op0 = ref;
944 	      break;
945 	    }
946 	  break;
947 	  /* These are only interesting for their operands, their
948 	     existence, and their type.  They will never be the last
949 	     ref in the chain of references (IE they require an
950 	     operand), so we don't have to put anything
951 	     for op* as it will be handled by the iteration  */
952 	case REALPART_EXPR:
953 	case VIEW_CONVERT_EXPR:
954 	  temp.off = 0;
955 	  break;
956 	case IMAGPART_EXPR:
957 	  /* This is only interesting for its constant offset.  */
958 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
959 	  break;
960 	default:
961 	  gcc_unreachable ();
962 	}
963       result->safe_push (temp);
964 
965       if (REFERENCE_CLASS_P (ref)
966 	  || TREE_CODE (ref) == MODIFY_EXPR
967 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
968 	  || (TREE_CODE (ref) == ADDR_EXPR
969 	      && !is_gimple_min_invariant (ref)))
970 	ref = TREE_OPERAND (ref, 0);
971       else
972 	ref = NULL_TREE;
973     }
974 }
975 
976 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
977    operands in *OPS, the reference alias set SET and the reference type TYPE.
978    Return true if something useful was produced.  */
979 
980 bool
981 ao_ref_init_from_vn_reference (ao_ref *ref,
982 			       alias_set_type set, tree type,
983 			       vec<vn_reference_op_s> ops)
984 {
985   vn_reference_op_t op;
986   unsigned i;
987   tree base = NULL_TREE;
988   tree *op0_p = &base;
989   HOST_WIDE_INT offset = 0;
990   HOST_WIDE_INT max_size;
991   HOST_WIDE_INT size = -1;
992   tree size_tree = NULL_TREE;
993   alias_set_type base_alias_set = -1;
994 
995   /* First get the final access size from just the outermost expression.  */
996   op = &ops[0];
997   if (op->opcode == COMPONENT_REF)
998     size_tree = DECL_SIZE (op->op0);
999   else if (op->opcode == BIT_FIELD_REF)
1000     size_tree = op->op0;
1001   else
1002     {
1003       machine_mode mode = TYPE_MODE (type);
1004       if (mode == BLKmode)
1005 	size_tree = TYPE_SIZE (type);
1006       else
1007         size = GET_MODE_BITSIZE (mode);
1008     }
1009   if (size_tree != NULL_TREE)
1010     {
1011       if (!tree_fits_uhwi_p (size_tree))
1012 	size = -1;
1013       else
1014 	size = tree_to_uhwi (size_tree);
1015     }
1016 
1017   /* Initially, maxsize is the same as the accessed element size.
1018      In the following it will only grow (or become -1).  */
1019   max_size = size;
1020 
1021   /* Compute cumulative bit-offset for nested component-refs and array-refs,
1022      and find the ultimate containing object.  */
1023   FOR_EACH_VEC_ELT (ops, i, op)
1024     {
1025       switch (op->opcode)
1026 	{
1027 	/* These may be in the reference ops, but we cannot do anything
1028 	   sensible with them here.  */
1029 	case ADDR_EXPR:
1030 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
1031 	  if (base != NULL_TREE
1032 	      && TREE_CODE (base) == MEM_REF
1033 	      && op->op0
1034 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
1035 	    {
1036 	      vn_reference_op_t pop = &ops[i-1];
1037 	      base = TREE_OPERAND (op->op0, 0);
1038 	      if (pop->off == -1)
1039 		{
1040 		  max_size = -1;
1041 		  offset = 0;
1042 		}
1043 	      else
1044 		offset += pop->off * BITS_PER_UNIT;
1045 	      op0_p = NULL;
1046 	      break;
1047 	    }
1048 	  /* Fallthru.  */
1049 	case CALL_EXPR:
1050 	  return false;
1051 
1052 	/* Record the base objects.  */
1053 	case MEM_REF:
1054 	  base_alias_set = get_deref_alias_set (op->op0);
1055 	  *op0_p = build2 (MEM_REF, op->type,
1056 			   NULL_TREE, op->op0);
1057 	  op0_p = &TREE_OPERAND (*op0_p, 0);
1058 	  break;
1059 
1060 	case VAR_DECL:
1061 	case PARM_DECL:
1062 	case RESULT_DECL:
1063 	case SSA_NAME:
1064 	  *op0_p = op->op0;
1065 	  op0_p = NULL;
1066 	  break;
1067 
1068 	/* And now the usual component-reference style ops.  */
1069 	case BIT_FIELD_REF:
1070 	  offset += tree_to_shwi (op->op1);
1071 	  break;
1072 
1073 	case COMPONENT_REF:
1074 	  {
1075 	    tree field = op->op0;
1076 	    /* We do not have a complete COMPONENT_REF tree here so we
1077 	       cannot use component_ref_field_offset.  Do the interesting
1078 	       parts manually.  */
1079 
1080 	    if (op->op1
1081 		|| !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1082 	      max_size = -1;
1083 	    else
1084 	      {
1085 		offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1086 			   * BITS_PER_UNIT);
1087 		offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1088 	      }
1089 	    break;
1090 	  }
1091 
1092 	case ARRAY_RANGE_REF:
1093 	case ARRAY_REF:
1094 	  /* We recorded the lower bound and the element size.  */
1095 	  if (!tree_fits_shwi_p (op->op0)
1096 	      || !tree_fits_shwi_p (op->op1)
1097 	      || !tree_fits_shwi_p (op->op2))
1098 	    max_size = -1;
1099 	  else
1100 	    {
1101 	      HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1102 	      hindex -= tree_to_shwi (op->op1);
1103 	      hindex *= tree_to_shwi (op->op2);
1104 	      hindex *= BITS_PER_UNIT;
1105 	      offset += hindex;
1106 	    }
1107 	  break;
1108 
1109 	case REALPART_EXPR:
1110 	  break;
1111 
1112 	case IMAGPART_EXPR:
1113 	  offset += size;
1114 	  break;
1115 
1116 	case VIEW_CONVERT_EXPR:
1117 	  break;
1118 
1119 	case STRING_CST:
1120 	case INTEGER_CST:
1121 	case COMPLEX_CST:
1122 	case VECTOR_CST:
1123 	case REAL_CST:
1124 	case CONSTRUCTOR:
1125 	case CONST_DECL:
1126 	  return false;
1127 
1128 	default:
1129 	  return false;
1130 	}
1131     }
1132 
1133   if (base == NULL_TREE)
1134     return false;
1135 
1136   ref->ref = NULL_TREE;
1137   ref->base = base;
1138   ref->offset = offset;
1139   ref->size = size;
1140   ref->max_size = max_size;
1141   ref->ref_alias_set = set;
1142   if (base_alias_set != -1)
1143     ref->base_alias_set = base_alias_set;
1144   else
1145     ref->base_alias_set = get_alias_set (base);
1146   /* We discount volatiles from value-numbering elsewhere.  */
1147   ref->volatile_p = false;
1148 
1149   return true;
1150 }
1151 
1152 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1153    vn_reference_op_s's.  */
1154 
1155 static void
1156 copy_reference_ops_from_call (gcall *call,
1157 			      vec<vn_reference_op_s> *result)
1158 {
1159   vn_reference_op_s temp;
1160   unsigned i;
1161   tree lhs = gimple_call_lhs (call);
1162   int lr;
1163 
1164   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1165      different.  By adding the lhs here in the vector, we ensure that the
1166      hashcode is different, guaranteeing a different value number.  */
1167   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1168     {
1169       memset (&temp, 0, sizeof (temp));
1170       temp.opcode = MODIFY_EXPR;
1171       temp.type = TREE_TYPE (lhs);
1172       temp.op0 = lhs;
1173       temp.off = -1;
1174       result->safe_push (temp);
1175     }
1176 
1177   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1178   memset (&temp, 0, sizeof (temp));
1179   temp.type = gimple_call_return_type (call);
1180   temp.opcode = CALL_EXPR;
1181   temp.op0 = gimple_call_fn (call);
1182   temp.op1 = gimple_call_chain (call);
1183   if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1184     temp.op2 = size_int (lr);
1185   temp.off = -1;
1186   if (gimple_call_with_bounds_p (call))
1187     temp.with_bounds = 1;
1188   result->safe_push (temp);
1189 
1190   /* Copy the call arguments.  As they can be references as well,
1191      just chain them together.  */
1192   for (i = 0; i < gimple_call_num_args (call); ++i)
1193     {
1194       tree callarg = gimple_call_arg (call, i);
1195       copy_reference_ops_from_ref (callarg, result);
1196     }
1197 }
1198 
1199 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1200    *I_P to point to the last element of the replacement.  */
1201 void
1202 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1203 			    unsigned int *i_p)
1204 {
1205   unsigned int i = *i_p;
1206   vn_reference_op_t op = &(*ops)[i];
1207   vn_reference_op_t mem_op = &(*ops)[i - 1];
1208   tree addr_base;
1209   HOST_WIDE_INT addr_offset = 0;
1210 
1211   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1212      from .foo.bar to the preceding MEM_REF offset and replace the
1213      address with &OBJ.  */
1214   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1215 					     &addr_offset);
1216   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1217   if (addr_base != TREE_OPERAND (op->op0, 0))
1218     {
1219       offset_int off = offset_int::from (mem_op->op0, SIGNED);
1220       off += addr_offset;
1221       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1222       op->op0 = build_fold_addr_expr (addr_base);
1223       if (tree_fits_shwi_p (mem_op->op0))
1224 	mem_op->off = tree_to_shwi (mem_op->op0);
1225       else
1226 	mem_op->off = -1;
1227     }
1228 }
1229 
1230 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1231    *I_P to point to the last element of the replacement.  */
1232 static void
1233 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1234 				     unsigned int *i_p)
1235 {
1236   unsigned int i = *i_p;
1237   vn_reference_op_t op = &(*ops)[i];
1238   vn_reference_op_t mem_op = &(*ops)[i - 1];
1239   gimple def_stmt;
1240   enum tree_code code;
1241   offset_int off;
1242 
1243   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1244   if (!is_gimple_assign (def_stmt))
1245     return;
1246 
1247   code = gimple_assign_rhs_code (def_stmt);
1248   if (code != ADDR_EXPR
1249       && code != POINTER_PLUS_EXPR)
1250     return;
1251 
1252   off = offset_int::from (mem_op->op0, SIGNED);
1253 
1254   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1255      from .foo.bar to the preceding MEM_REF offset and replace the
1256      address with &OBJ.  */
1257   if (code == ADDR_EXPR)
1258     {
1259       tree addr, addr_base;
1260       HOST_WIDE_INT addr_offset;
1261 
1262       addr = gimple_assign_rhs1 (def_stmt);
1263       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1264 						 &addr_offset);
1265       if (!addr_base
1266 	  || TREE_CODE (addr_base) != MEM_REF)
1267 	return;
1268 
1269       off += addr_offset;
1270       off += mem_ref_offset (addr_base);
1271       op->op0 = TREE_OPERAND (addr_base, 0);
1272     }
1273   else
1274     {
1275       tree ptr, ptroff;
1276       ptr = gimple_assign_rhs1 (def_stmt);
1277       ptroff = gimple_assign_rhs2 (def_stmt);
1278       if (TREE_CODE (ptr) != SSA_NAME
1279 	  || TREE_CODE (ptroff) != INTEGER_CST)
1280 	return;
1281 
1282       off += wi::to_offset (ptroff);
1283       op->op0 = ptr;
1284     }
1285 
1286   mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1287   if (tree_fits_shwi_p (mem_op->op0))
1288     mem_op->off = tree_to_shwi (mem_op->op0);
1289   else
1290     mem_op->off = -1;
1291   if (TREE_CODE (op->op0) == SSA_NAME)
1292     op->op0 = SSA_VAL (op->op0);
1293   if (TREE_CODE (op->op0) != SSA_NAME)
1294     op->opcode = TREE_CODE (op->op0);
1295 
1296   /* And recurse.  */
1297   if (TREE_CODE (op->op0) == SSA_NAME)
1298     vn_reference_maybe_forwprop_address (ops, i_p);
1299   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1300     vn_reference_fold_indirect (ops, i_p);
1301 }
1302 
1303 /* Optimize the reference REF to a constant if possible or return
1304    NULL_TREE if not.  */
1305 
1306 tree
1307 fully_constant_vn_reference_p (vn_reference_t ref)
1308 {
1309   vec<vn_reference_op_s> operands = ref->operands;
1310   vn_reference_op_t op;
1311 
1312   /* Try to simplify the translated expression if it is
1313      a call to a builtin function with at most two arguments.  */
1314   op = &operands[0];
1315   if (op->opcode == CALL_EXPR
1316       && TREE_CODE (op->op0) == ADDR_EXPR
1317       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1318       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1319       && operands.length () >= 2
1320       && operands.length () <= 3)
1321     {
1322       vn_reference_op_t arg0, arg1 = NULL;
1323       bool anyconst = false;
1324       arg0 = &operands[1];
1325       if (operands.length () > 2)
1326 	arg1 = &operands[2];
1327       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1328 	  || (arg0->opcode == ADDR_EXPR
1329 	      && is_gimple_min_invariant (arg0->op0)))
1330 	anyconst = true;
1331       if (arg1
1332 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1333 	      || (arg1->opcode == ADDR_EXPR
1334 		  && is_gimple_min_invariant (arg1->op0))))
1335 	anyconst = true;
1336       if (anyconst)
1337 	{
1338 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1339 					 arg1 ? 2 : 1,
1340 					 arg0->op0,
1341 					 arg1 ? arg1->op0 : NULL);
1342 	  if (folded
1343 	      && TREE_CODE (folded) == NOP_EXPR)
1344 	    folded = TREE_OPERAND (folded, 0);
1345 	  if (folded
1346 	      && is_gimple_min_invariant (folded))
1347 	    return folded;
1348 	}
1349     }
1350 
1351   /* Simplify reads from constants or constant initializers.  */
1352   else if (BITS_PER_UNIT == 8
1353 	   && is_gimple_reg_type (ref->type)
1354 	   && (!INTEGRAL_TYPE_P (ref->type)
1355 	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1356     {
1357       HOST_WIDE_INT off = 0;
1358       HOST_WIDE_INT size;
1359       if (INTEGRAL_TYPE_P (ref->type))
1360 	size = TYPE_PRECISION (ref->type);
1361       else
1362 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1363       if (size % BITS_PER_UNIT != 0
1364 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1365 	return NULL_TREE;
1366       size /= BITS_PER_UNIT;
1367       unsigned i;
1368       for (i = 0; i < operands.length (); ++i)
1369 	{
1370 	  if (operands[i].off == -1)
1371 	    return NULL_TREE;
1372 	  off += operands[i].off;
1373 	  if (operands[i].opcode == MEM_REF)
1374 	    {
1375 	      ++i;
1376 	      break;
1377 	    }
1378 	}
1379       vn_reference_op_t base = &operands[--i];
1380       tree ctor = error_mark_node;
1381       tree decl = NULL_TREE;
1382       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1383 	ctor = base->op0;
1384       else if (base->opcode == MEM_REF
1385 	       && base[1].opcode == ADDR_EXPR
1386 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1387 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1388 	{
1389 	  decl = TREE_OPERAND (base[1].op0, 0);
1390 	  ctor = ctor_for_folding (decl);
1391 	}
1392       if (ctor == NULL_TREE)
1393 	return build_zero_cst (ref->type);
1394       else if (ctor != error_mark_node)
1395 	{
1396 	  if (decl)
1397 	    {
1398 	      tree res = fold_ctor_reference (ref->type, ctor,
1399 					      off * BITS_PER_UNIT,
1400 					      size * BITS_PER_UNIT, decl);
1401 	      if (res)
1402 		{
1403 		  STRIP_USELESS_TYPE_CONVERSION (res);
1404 		  if (is_gimple_min_invariant (res))
1405 		    return res;
1406 		}
1407 	    }
1408 	  else
1409 	    {
1410 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 	      if (native_encode_expr (ctor, buf, size, off) > 0)
1412 		return native_interpret_expr (ref->type, buf, size);
1413 	    }
1414 	}
1415     }
1416 
1417   return NULL_TREE;
1418 }
1419 
1420 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1421    structures into their value numbers.  This is done in-place, and
1422    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1423    whether any operands were valueized.  */
1424 
1425 static vec<vn_reference_op_s>
1426 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1427 {
1428   vn_reference_op_t vro;
1429   unsigned int i;
1430 
1431   *valueized_anything = false;
1432 
1433   FOR_EACH_VEC_ELT (orig, i, vro)
1434     {
1435       if (vro->opcode == SSA_NAME
1436 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1437 	{
1438 	  tree tem = SSA_VAL (vro->op0);
1439 	  if (tem != vro->op0)
1440 	    {
1441 	      *valueized_anything = true;
1442 	      vro->op0 = tem;
1443 	    }
1444 	  /* If it transforms from an SSA_NAME to a constant, update
1445 	     the opcode.  */
1446 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1447 	    vro->opcode = TREE_CODE (vro->op0);
1448 	}
1449       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1450 	{
1451 	  tree tem = SSA_VAL (vro->op1);
1452 	  if (tem != vro->op1)
1453 	    {
1454 	      *valueized_anything = true;
1455 	      vro->op1 = tem;
1456 	    }
1457 	}
1458       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1459 	{
1460 	  tree tem = SSA_VAL (vro->op2);
1461 	  if (tem != vro->op2)
1462 	    {
1463 	      *valueized_anything = true;
1464 	      vro->op2 = tem;
1465 	    }
1466 	}
1467       /* If it transforms from an SSA_NAME to an address, fold with
1468 	 a preceding indirect reference.  */
1469       if (i > 0
1470 	  && vro->op0
1471 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1472 	  && orig[i - 1].opcode == MEM_REF)
1473 	vn_reference_fold_indirect (&orig, &i);
1474       else if (i > 0
1475 	       && vro->opcode == SSA_NAME
1476 	       && orig[i - 1].opcode == MEM_REF)
1477 	vn_reference_maybe_forwprop_address (&orig, &i);
1478       /* If it transforms a non-constant ARRAY_REF into a constant
1479 	 one, adjust the constant offset.  */
1480       else if (vro->opcode == ARRAY_REF
1481 	       && vro->off == -1
1482 	       && TREE_CODE (vro->op0) == INTEGER_CST
1483 	       && TREE_CODE (vro->op1) == INTEGER_CST
1484 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1485 	{
1486 	  offset_int off = ((wi::to_offset (vro->op0)
1487 			     - wi::to_offset (vro->op1))
1488 			    * wi::to_offset (vro->op2));
1489 	  if (wi::fits_shwi_p (off))
1490 	    vro->off = off.to_shwi ();
1491 	}
1492     }
1493 
1494   return orig;
1495 }
1496 
1497 static vec<vn_reference_op_s>
1498 valueize_refs (vec<vn_reference_op_s> orig)
1499 {
1500   bool tem;
1501   return valueize_refs_1 (orig, &tem);
1502 }
1503 
1504 static vec<vn_reference_op_s> shared_lookup_references;
1505 
1506 /* Create a vector of vn_reference_op_s structures from REF, a
1507    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1508    this function.  *VALUEIZED_ANYTHING will specify whether any
1509    operands were valueized.  */
1510 
1511 static vec<vn_reference_op_s>
1512 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1513 {
1514   if (!ref)
1515     return vNULL;
1516   shared_lookup_references.truncate (0);
1517   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1518   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1519 					      valueized_anything);
1520   return shared_lookup_references;
1521 }
1522 
1523 /* Create a vector of vn_reference_op_s structures from CALL, a
1524    call statement.  The vector is shared among all callers of
1525    this function.  */
1526 
1527 static vec<vn_reference_op_s>
1528 valueize_shared_reference_ops_from_call (gcall *call)
1529 {
1530   if (!call)
1531     return vNULL;
1532   shared_lookup_references.truncate (0);
1533   copy_reference_ops_from_call (call, &shared_lookup_references);
1534   shared_lookup_references = valueize_refs (shared_lookup_references);
1535   return shared_lookup_references;
1536 }
1537 
1538 /* Lookup a SCCVN reference operation VR in the current hash table.
1539    Returns the resulting value number if it exists in the hash table,
1540    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1541    vn_reference_t stored in the hashtable if something is found.  */
1542 
1543 static tree
1544 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1545 {
1546   vn_reference_s **slot;
1547   hashval_t hash;
1548 
1549   hash = vr->hashcode;
1550   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1551   if (!slot && current_info == optimistic_info)
1552     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1553   if (slot)
1554     {
1555       if (vnresult)
1556 	*vnresult = (vn_reference_t)*slot;
1557       return ((vn_reference_t)*slot)->result;
1558     }
1559 
1560   return NULL_TREE;
1561 }
1562 
1563 static tree *last_vuse_ptr;
1564 static vn_lookup_kind vn_walk_kind;
1565 static vn_lookup_kind default_vn_walk_kind;
1566 
1567 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1568    with the current VUSE and performs the expression lookup.  */
1569 
1570 static void *
1571 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1572 		       unsigned int cnt, void *vr_)
1573 {
1574   vn_reference_t vr = (vn_reference_t)vr_;
1575   vn_reference_s **slot;
1576   hashval_t hash;
1577 
1578   /* This bounds the stmt walks we perform on reference lookups
1579      to O(1) instead of O(N) where N is the number of dominating
1580      stores.  */
1581   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1582     return (void *)-1;
1583 
1584   if (last_vuse_ptr)
1585     *last_vuse_ptr = vuse;
1586 
1587   /* Fixup vuse and hash.  */
1588   if (vr->vuse)
1589     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1590   vr->vuse = vuse_ssa_val (vuse);
1591   if (vr->vuse)
1592     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1593 
1594   hash = vr->hashcode;
1595   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1596   if (!slot && current_info == optimistic_info)
1597     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1598   if (slot)
1599     return *slot;
1600 
1601   return NULL;
1602 }
1603 
1604 /* Lookup an existing or insert a new vn_reference entry into the
1605    value table for the VUSE, SET, TYPE, OPERANDS reference which
1606    has the value VALUE which is either a constant or an SSA name.  */
1607 
1608 static vn_reference_t
1609 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1610 					  alias_set_type set,
1611 					  tree type,
1612 					  vec<vn_reference_op_s,
1613 					        va_heap> operands,
1614 					  tree value)
1615 {
1616   vn_reference_s vr1;
1617   vn_reference_t result;
1618   unsigned value_id;
1619   vr1.vuse = vuse;
1620   vr1.operands = operands;
1621   vr1.type = type;
1622   vr1.set = set;
1623   vr1.hashcode = vn_reference_compute_hash (&vr1);
1624   if (vn_reference_lookup_1 (&vr1, &result))
1625     return result;
1626   if (TREE_CODE (value) == SSA_NAME)
1627     value_id = VN_INFO (value)->value_id;
1628   else
1629     value_id = get_or_alloc_constant_value_id (value);
1630   return vn_reference_insert_pieces (vuse, set, type,
1631 				     operands.copy (), value, value_id);
1632 }
1633 
1634 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1635    from the statement defining VUSE and if not successful tries to
1636    translate *REFP and VR_ through an aggregate copy at the definition
1637    of VUSE.  */
1638 
1639 static void *
1640 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1641 		       bool disambiguate_only)
1642 {
1643   vn_reference_t vr = (vn_reference_t)vr_;
1644   gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1645   tree base;
1646   HOST_WIDE_INT offset, maxsize;
1647   static vec<vn_reference_op_s>
1648     lhs_ops = vNULL;
1649   ao_ref lhs_ref;
1650   bool lhs_ref_ok = false;
1651 
1652   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1653   if (is_gimple_assign (def_stmt))
1654     {
1655       vec<vn_reference_op_s> tem;
1656       tree lhs = gimple_assign_lhs (def_stmt);
1657       bool valueized_anything = false;
1658       /* Avoid re-allocation overhead.  */
1659       lhs_ops.truncate (0);
1660       copy_reference_ops_from_ref (lhs, &lhs_ops);
1661       tem = lhs_ops;
1662       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1663       gcc_assert (lhs_ops == tem);
1664       if (valueized_anything)
1665 	{
1666 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1667 						      get_alias_set (lhs),
1668 						      TREE_TYPE (lhs), lhs_ops);
1669 	  if (lhs_ref_ok
1670 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1671 	    return NULL;
1672 	}
1673       else
1674 	{
1675 	  ao_ref_init (&lhs_ref, lhs);
1676 	  lhs_ref_ok = true;
1677 	}
1678     }
1679   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1680 	   && gimple_call_num_args (def_stmt) <= 4)
1681     {
1682       /* For builtin calls valueize its arguments and call the
1683          alias oracle again.  Valueization may improve points-to
1684 	 info of pointers and constify size and position arguments.
1685 	 Originally this was motivated by PR61034 which has
1686 	 conditional calls to free falsely clobbering ref because
1687 	 of imprecise points-to info of the argument.  */
1688       tree oldargs[4];
1689       bool valueized_anything = false;
1690       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1691 	{
1692 	  oldargs[i] = gimple_call_arg (def_stmt, i);
1693 	  if (TREE_CODE (oldargs[i]) == SSA_NAME
1694 	      && VN_INFO (oldargs[i])->valnum != oldargs[i])
1695 	    {
1696 	      gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1697 	      valueized_anything = true;
1698 	    }
1699 	}
1700       if (valueized_anything)
1701 	{
1702 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1703 					       ref);
1704 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1705 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1706 	  if (!res)
1707 	    return NULL;
1708 	}
1709     }
1710 
1711   if (disambiguate_only)
1712     return (void *)-1;
1713 
1714   base = ao_ref_base (ref);
1715   offset = ref->offset;
1716   maxsize = ref->max_size;
1717 
1718   /* If we cannot constrain the size of the reference we cannot
1719      test if anything kills it.  */
1720   if (maxsize == -1)
1721     return (void *)-1;
1722 
1723   /* We can't deduce anything useful from clobbers.  */
1724   if (gimple_clobber_p (def_stmt))
1725     return (void *)-1;
1726 
1727   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1728      from that definition.
1729      1) Memset.  */
1730   if (is_gimple_reg_type (vr->type)
1731       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1732       && integer_zerop (gimple_call_arg (def_stmt, 1))
1733       && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1734       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1735     {
1736       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1737       tree base2;
1738       HOST_WIDE_INT offset2, size2, maxsize2;
1739       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1740       size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1741       if ((unsigned HOST_WIDE_INT)size2 / 8
1742 	  == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1743 	  && maxsize2 != -1
1744 	  && operand_equal_p (base, base2, 0)
1745 	  && offset2 <= offset
1746 	  && offset2 + size2 >= offset + maxsize)
1747 	{
1748 	  tree val = build_zero_cst (vr->type);
1749 	  return vn_reference_lookup_or_insert_for_pieces
1750 	           (vuse, vr->set, vr->type, vr->operands, val);
1751 	}
1752     }
1753 
1754   /* 2) Assignment from an empty CONSTRUCTOR.  */
1755   else if (is_gimple_reg_type (vr->type)
1756 	   && gimple_assign_single_p (def_stmt)
1757 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1758 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1759     {
1760       tree base2;
1761       HOST_WIDE_INT offset2, size2, maxsize2;
1762       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1763 				       &offset2, &size2, &maxsize2);
1764       if (maxsize2 != -1
1765 	  && operand_equal_p (base, base2, 0)
1766 	  && offset2 <= offset
1767 	  && offset2 + size2 >= offset + maxsize)
1768 	{
1769 	  tree val = build_zero_cst (vr->type);
1770 	  return vn_reference_lookup_or_insert_for_pieces
1771 	           (vuse, vr->set, vr->type, vr->operands, val);
1772 	}
1773     }
1774 
1775   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1776      routines to extract the assigned bits.  */
1777   else if (vn_walk_kind == VN_WALKREWRITE
1778 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1779 	   && ref->size == maxsize
1780 	   && maxsize % BITS_PER_UNIT == 0
1781 	   && offset % BITS_PER_UNIT == 0
1782 	   && is_gimple_reg_type (vr->type)
1783 	   && gimple_assign_single_p (def_stmt)
1784 	   && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1785     {
1786       tree base2;
1787       HOST_WIDE_INT offset2, size2, maxsize2;
1788       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1789 				       &offset2, &size2, &maxsize2);
1790       if (maxsize2 != -1
1791 	  && maxsize2 == size2
1792 	  && size2 % BITS_PER_UNIT == 0
1793 	  && offset2 % BITS_PER_UNIT == 0
1794 	  && operand_equal_p (base, base2, 0)
1795 	  && offset2 <= offset
1796 	  && offset2 + size2 >= offset + maxsize)
1797 	{
1798 	  /* We support up to 512-bit values (for V8DFmode).  */
1799 	  unsigned char buffer[64];
1800 	  int len;
1801 
1802 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1803 				    buffer, sizeof (buffer));
1804 	  if (len > 0)
1805 	    {
1806 	      tree val = native_interpret_expr (vr->type,
1807 						buffer
1808 						+ ((offset - offset2)
1809 						   / BITS_PER_UNIT),
1810 						ref->size / BITS_PER_UNIT);
1811 	      if (val)
1812 		return vn_reference_lookup_or_insert_for_pieces
1813 		         (vuse, vr->set, vr->type, vr->operands, val);
1814 	    }
1815 	}
1816     }
1817 
1818   /* 4) Assignment from an SSA name which definition we may be able
1819      to access pieces from.  */
1820   else if (ref->size == maxsize
1821 	   && is_gimple_reg_type (vr->type)
1822 	   && gimple_assign_single_p (def_stmt)
1823 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1824     {
1825       tree rhs1 = gimple_assign_rhs1 (def_stmt);
1826       gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1827       if (is_gimple_assign (def_stmt2)
1828 	  && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1829 	      || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1830 	  && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1831 	{
1832 	  tree base2;
1833 	  HOST_WIDE_INT offset2, size2, maxsize2, off;
1834 	  base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1835 					   &offset2, &size2, &maxsize2);
1836 	  off = offset - offset2;
1837 	  if (maxsize2 != -1
1838 	      && maxsize2 == size2
1839 	      && operand_equal_p (base, base2, 0)
1840 	      && offset2 <= offset
1841 	      && offset2 + size2 >= offset + maxsize)
1842 	    {
1843 	      tree val = NULL_TREE;
1844 	      HOST_WIDE_INT elsz
1845 		= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1846 	      if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1847 		{
1848 		  if (off == 0)
1849 		    val = gimple_assign_rhs1 (def_stmt2);
1850 		  else if (off == elsz)
1851 		    val = gimple_assign_rhs2 (def_stmt2);
1852 		}
1853 	      else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1854 		       && off % elsz == 0)
1855 		{
1856 		  tree ctor = gimple_assign_rhs1 (def_stmt2);
1857 		  unsigned i = off / elsz;
1858 		  if (i < CONSTRUCTOR_NELTS (ctor))
1859 		    {
1860 		      constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1861 		      if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1862 			{
1863 			  if (TREE_CODE (TREE_TYPE (elt->value))
1864 			      != VECTOR_TYPE)
1865 			    val = elt->value;
1866 			}
1867 		    }
1868 		}
1869 	      if (val)
1870 		return vn_reference_lookup_or_insert_for_pieces
1871 		         (vuse, vr->set, vr->type, vr->operands, val);
1872 	    }
1873 	}
1874     }
1875 
1876   /* 5) For aggregate copies translate the reference through them if
1877      the copy kills ref.  */
1878   else if (vn_walk_kind == VN_WALKREWRITE
1879 	   && gimple_assign_single_p (def_stmt)
1880 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1881 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1882 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1883     {
1884       tree base2;
1885       HOST_WIDE_INT offset2, size2, maxsize2;
1886       int i, j;
1887       auto_vec<vn_reference_op_s> rhs;
1888       vn_reference_op_t vro;
1889       ao_ref r;
1890 
1891       if (!lhs_ref_ok)
1892 	return (void *)-1;
1893 
1894       /* See if the assignment kills REF.  */
1895       base2 = ao_ref_base (&lhs_ref);
1896       offset2 = lhs_ref.offset;
1897       size2 = lhs_ref.size;
1898       maxsize2 = lhs_ref.max_size;
1899       if (maxsize2 == -1
1900 	  || (base != base2 && !operand_equal_p (base, base2, 0))
1901 	  || offset2 > offset
1902 	  || offset2 + size2 < offset + maxsize)
1903 	return (void *)-1;
1904 
1905       /* Find the common base of ref and the lhs.  lhs_ops already
1906          contains valueized operands for the lhs.  */
1907       i = vr->operands.length () - 1;
1908       j = lhs_ops.length () - 1;
1909       while (j >= 0 && i >= 0
1910 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1911 	{
1912 	  i--;
1913 	  j--;
1914 	}
1915 
1916       /* ???  The innermost op should always be a MEM_REF and we already
1917          checked that the assignment to the lhs kills vr.  Thus for
1918 	 aggregate copies using char[] types the vn_reference_op_eq
1919 	 may fail when comparing types for compatibility.  But we really
1920 	 don't care here - further lookups with the rewritten operands
1921 	 will simply fail if we messed up types too badly.  */
1922       HOST_WIDE_INT extra_off = 0;
1923       if (j == 0 && i >= 0
1924 	  && lhs_ops[0].opcode == MEM_REF
1925 	  && lhs_ops[0].off != -1)
1926 	{
1927 	  if (lhs_ops[0].off == vr->operands[i].off)
1928 	    i--, j--;
1929 	  else if (vr->operands[i].opcode == MEM_REF
1930 		   && vr->operands[i].off != -1)
1931 	    {
1932 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
1933 	      i--, j--;
1934 	    }
1935 	}
1936 
1937       /* i now points to the first additional op.
1938 	 ???  LHS may not be completely contained in VR, one or more
1939 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1940 	 try handling outermost VIEW_CONVERT_EXPRs.  */
1941       if (j != -1)
1942 	return (void *)-1;
1943 
1944       /* Now re-write REF to be based on the rhs of the assignment.  */
1945       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1946 
1947       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
1948       if (extra_off != 0)
1949 	{
1950 	  if (rhs.length () < 2
1951 	      || rhs[0].opcode != MEM_REF
1952 	      || rhs[0].off == -1)
1953 	    return (void *)-1;
1954 	  rhs[0].off += extra_off;
1955 	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1956 					build_int_cst (TREE_TYPE (rhs[0].op0),
1957 						       extra_off));
1958 	}
1959 
1960       /* We need to pre-pend vr->operands[0..i] to rhs.  */
1961       vec<vn_reference_op_s> old = vr->operands;
1962       if (i + 1 + rhs.length () > vr->operands.length ())
1963 	{
1964 	  vr->operands.safe_grow (i + 1 + rhs.length ());
1965 	  if (old == shared_lookup_references)
1966 	    shared_lookup_references = vr->operands;
1967 	}
1968       else
1969 	vr->operands.truncate (i + 1 + rhs.length ());
1970       FOR_EACH_VEC_ELT (rhs, j, vro)
1971 	vr->operands[i + 1 + j] = *vro;
1972       vr->operands = valueize_refs (vr->operands);
1973       if (old == shared_lookup_references)
1974 	shared_lookup_references = vr->operands;
1975       vr->hashcode = vn_reference_compute_hash (vr);
1976 
1977       /* Try folding the new reference to a constant.  */
1978       tree val = fully_constant_vn_reference_p (vr);
1979       if (val)
1980 	return vn_reference_lookup_or_insert_for_pieces
1981 		 (vuse, vr->set, vr->type, vr->operands, val);
1982 
1983       /* Adjust *ref from the new operands.  */
1984       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1985 	return (void *)-1;
1986       /* This can happen with bitfields.  */
1987       if (ref->size != r.size)
1988 	return (void *)-1;
1989       *ref = r;
1990 
1991       /* Do not update last seen VUSE after translating.  */
1992       last_vuse_ptr = NULL;
1993 
1994       /* Keep looking for the adjusted *REF / VR pair.  */
1995       return NULL;
1996     }
1997 
1998   /* 6) For memcpy copies translate the reference through them if
1999      the copy kills ref.  */
2000   else if (vn_walk_kind == VN_WALKREWRITE
2001 	   && is_gimple_reg_type (vr->type)
2002 	   /* ???  Handle BCOPY as well.  */
2003 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2004 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2005 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2006 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2007 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2008 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2009 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2010 	   && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2011     {
2012       tree lhs, rhs;
2013       ao_ref r;
2014       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2015       vn_reference_op_s op;
2016       HOST_WIDE_INT at;
2017 
2018 
2019       /* Only handle non-variable, addressable refs.  */
2020       if (ref->size != maxsize
2021 	  || offset % BITS_PER_UNIT != 0
2022 	  || ref->size % BITS_PER_UNIT != 0)
2023 	return (void *)-1;
2024 
2025       /* Extract a pointer base and an offset for the destination.  */
2026       lhs = gimple_call_arg (def_stmt, 0);
2027       lhs_offset = 0;
2028       if (TREE_CODE (lhs) == SSA_NAME)
2029 	lhs = SSA_VAL (lhs);
2030       if (TREE_CODE (lhs) == ADDR_EXPR)
2031 	{
2032 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2033 						    &lhs_offset);
2034 	  if (!tem)
2035 	    return (void *)-1;
2036 	  if (TREE_CODE (tem) == MEM_REF
2037 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2038 	    {
2039 	      lhs = TREE_OPERAND (tem, 0);
2040 	      lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2041 	    }
2042 	  else if (DECL_P (tem))
2043 	    lhs = build_fold_addr_expr (tem);
2044 	  else
2045 	    return (void *)-1;
2046 	}
2047       if (TREE_CODE (lhs) != SSA_NAME
2048 	  && TREE_CODE (lhs) != ADDR_EXPR)
2049 	return (void *)-1;
2050 
2051       /* Extract a pointer base and an offset for the source.  */
2052       rhs = gimple_call_arg (def_stmt, 1);
2053       rhs_offset = 0;
2054       if (TREE_CODE (rhs) == SSA_NAME)
2055 	rhs = SSA_VAL (rhs);
2056       if (TREE_CODE (rhs) == ADDR_EXPR)
2057 	{
2058 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2059 						    &rhs_offset);
2060 	  if (!tem)
2061 	    return (void *)-1;
2062 	  if (TREE_CODE (tem) == MEM_REF
2063 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2064 	    {
2065 	      rhs = TREE_OPERAND (tem, 0);
2066 	      rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2067 	    }
2068 	  else if (DECL_P (tem))
2069 	    rhs = build_fold_addr_expr (tem);
2070 	  else
2071 	    return (void *)-1;
2072 	}
2073       if (TREE_CODE (rhs) != SSA_NAME
2074 	  && TREE_CODE (rhs) != ADDR_EXPR)
2075 	return (void *)-1;
2076 
2077       copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2078 
2079       /* The bases of the destination and the references have to agree.  */
2080       if ((TREE_CODE (base) != MEM_REF
2081 	   && !DECL_P (base))
2082 	  || (TREE_CODE (base) == MEM_REF
2083 	      && (TREE_OPERAND (base, 0) != lhs
2084 		  || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2085 	  || (DECL_P (base)
2086 	      && (TREE_CODE (lhs) != ADDR_EXPR
2087 		  || TREE_OPERAND (lhs, 0) != base)))
2088 	return (void *)-1;
2089 
2090       /* And the access has to be contained within the memcpy destination.  */
2091       at = offset / BITS_PER_UNIT;
2092       if (TREE_CODE (base) == MEM_REF)
2093 	at += tree_to_uhwi (TREE_OPERAND (base, 1));
2094       if (lhs_offset > at
2095 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2096 	return (void *)-1;
2097 
2098       /* Make room for 2 operands in the new reference.  */
2099       if (vr->operands.length () < 2)
2100 	{
2101 	  vec<vn_reference_op_s> old = vr->operands;
2102 	  vr->operands.safe_grow_cleared (2);
2103 	  if (old == shared_lookup_references
2104 	      && vr->operands != old)
2105 	    shared_lookup_references = vr->operands;
2106 	}
2107       else
2108 	vr->operands.truncate (2);
2109 
2110       /* The looked-through reference is a simple MEM_REF.  */
2111       memset (&op, 0, sizeof (op));
2112       op.type = vr->type;
2113       op.opcode = MEM_REF;
2114       op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2115       op.off = at - lhs_offset + rhs_offset;
2116       vr->operands[0] = op;
2117       op.type = TREE_TYPE (rhs);
2118       op.opcode = TREE_CODE (rhs);
2119       op.op0 = rhs;
2120       op.off = -1;
2121       vr->operands[1] = op;
2122       vr->hashcode = vn_reference_compute_hash (vr);
2123 
2124       /* Adjust *ref from the new operands.  */
2125       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2126 	return (void *)-1;
2127       /* This can happen with bitfields.  */
2128       if (ref->size != r.size)
2129 	return (void *)-1;
2130       *ref = r;
2131 
2132       /* Do not update last seen VUSE after translating.  */
2133       last_vuse_ptr = NULL;
2134 
2135       /* Keep looking for the adjusted *REF / VR pair.  */
2136       return NULL;
2137     }
2138 
2139   /* Bail out and stop walking.  */
2140   return (void *)-1;
2141 }
2142 
2143 /* Lookup a reference operation by it's parts, in the current hash table.
2144    Returns the resulting value number if it exists in the hash table,
2145    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2146    vn_reference_t stored in the hashtable if something is found.  */
2147 
2148 tree
2149 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2150 			    vec<vn_reference_op_s> operands,
2151 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2152 {
2153   struct vn_reference_s vr1;
2154   vn_reference_t tmp;
2155   tree cst;
2156 
2157   if (!vnresult)
2158     vnresult = &tmp;
2159   *vnresult = NULL;
2160 
2161   vr1.vuse = vuse_ssa_val (vuse);
2162   shared_lookup_references.truncate (0);
2163   shared_lookup_references.safe_grow (operands.length ());
2164   memcpy (shared_lookup_references.address (),
2165 	  operands.address (),
2166 	  sizeof (vn_reference_op_s)
2167 	  * operands.length ());
2168   vr1.operands = operands = shared_lookup_references
2169     = valueize_refs (shared_lookup_references);
2170   vr1.type = type;
2171   vr1.set = set;
2172   vr1.hashcode = vn_reference_compute_hash (&vr1);
2173   if ((cst = fully_constant_vn_reference_p (&vr1)))
2174     return cst;
2175 
2176   vn_reference_lookup_1 (&vr1, vnresult);
2177   if (!*vnresult
2178       && kind != VN_NOWALK
2179       && vr1.vuse)
2180     {
2181       ao_ref r;
2182       vn_walk_kind = kind;
2183       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2184 	*vnresult =
2185 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2186 						  vn_reference_lookup_2,
2187 						  vn_reference_lookup_3,
2188 						  vuse_ssa_val, &vr1);
2189       gcc_checking_assert (vr1.operands == shared_lookup_references);
2190     }
2191 
2192   if (*vnresult)
2193      return (*vnresult)->result;
2194 
2195   return NULL_TREE;
2196 }
2197 
2198 /* Lookup OP in the current hash table, and return the resulting value
2199    number if it exists in the hash table.  Return NULL_TREE if it does
2200    not exist in the hash table or if the result field of the structure
2201    was NULL..  VNRESULT will be filled in with the vn_reference_t
2202    stored in the hashtable if one exists.  When TBAA_P is false assume
2203    we are looking up a store and treat it as having alias-set zero.  */
2204 
2205 tree
2206 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2207 		     vn_reference_t *vnresult, bool tbaa_p)
2208 {
2209   vec<vn_reference_op_s> operands;
2210   struct vn_reference_s vr1;
2211   tree cst;
2212   bool valuezied_anything;
2213 
2214   if (vnresult)
2215     *vnresult = NULL;
2216 
2217   vr1.vuse = vuse_ssa_val (vuse);
2218   vr1.operands = operands
2219     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2220   vr1.type = TREE_TYPE (op);
2221   vr1.set = tbaa_p ? get_alias_set (op) : 0;
2222   vr1.hashcode = vn_reference_compute_hash (&vr1);
2223   if ((cst = fully_constant_vn_reference_p (&vr1)))
2224     return cst;
2225 
2226   if (kind != VN_NOWALK
2227       && vr1.vuse)
2228     {
2229       vn_reference_t wvnresult;
2230       ao_ref r;
2231       /* Make sure to use a valueized reference if we valueized anything.
2232          Otherwise preserve the full reference for advanced TBAA.  */
2233       if (!valuezied_anything
2234 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2235 					     vr1.operands))
2236 	ao_ref_init (&r, op);
2237       if (! tbaa_p)
2238 	r.ref_alias_set = r.base_alias_set = 0;
2239       vn_walk_kind = kind;
2240       wvnresult =
2241 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2242 						vn_reference_lookup_2,
2243 						vn_reference_lookup_3,
2244 						vuse_ssa_val, &vr1);
2245       gcc_checking_assert (vr1.operands == shared_lookup_references);
2246       if (wvnresult)
2247 	{
2248 	  if (vnresult)
2249 	    *vnresult = wvnresult;
2250 	  return wvnresult->result;
2251 	}
2252 
2253       return NULL_TREE;
2254     }
2255 
2256   return vn_reference_lookup_1 (&vr1, vnresult);
2257 }
2258 
2259 /* Lookup CALL in the current hash table and return the entry in
2260    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2261 
2262 void
2263 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2264 			  vn_reference_t vr)
2265 {
2266   if (vnresult)
2267     *vnresult = NULL;
2268 
2269   tree vuse = gimple_vuse (call);
2270 
2271   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2272   vr->operands = valueize_shared_reference_ops_from_call (call);
2273   vr->type = gimple_expr_type (call);
2274   vr->set = 0;
2275   vr->hashcode = vn_reference_compute_hash (vr);
2276   vn_reference_lookup_1 (vr, vnresult);
2277 }
2278 
2279 /* Insert OP into the current hash table with a value number of
2280    RESULT, and return the resulting reference structure we created.  */
2281 
2282 static vn_reference_t
2283 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2284 {
2285   vn_reference_s **slot;
2286   vn_reference_t vr1;
2287   bool tem;
2288 
2289   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2290   if (TREE_CODE (result) == SSA_NAME)
2291     vr1->value_id = VN_INFO (result)->value_id;
2292   else
2293     vr1->value_id = get_or_alloc_constant_value_id (result);
2294   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2295   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2296   vr1->type = TREE_TYPE (op);
2297   vr1->set = get_alias_set (op);
2298   vr1->hashcode = vn_reference_compute_hash (vr1);
2299   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2300   vr1->result_vdef = vdef;
2301 
2302   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2303 							INSERT);
2304 
2305   /* Because we lookup stores using vuses, and value number failures
2306      using the vdefs (see visit_reference_op_store for how and why),
2307      it's possible that on failure we may try to insert an already
2308      inserted store.  This is not wrong, there is no ssa name for a
2309      store that we could use as a differentiator anyway.  Thus, unlike
2310      the other lookup functions, you cannot gcc_assert (!*slot)
2311      here.  */
2312 
2313   /* But free the old slot in case of a collision.  */
2314   if (*slot)
2315     free_reference (*slot);
2316 
2317   *slot = vr1;
2318   return vr1;
2319 }
2320 
2321 /* Insert a reference by it's pieces into the current hash table with
2322    a value number of RESULT.  Return the resulting reference
2323    structure we created.  */
2324 
2325 vn_reference_t
2326 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2327 			    vec<vn_reference_op_s> operands,
2328 			    tree result, unsigned int value_id)
2329 
2330 {
2331   vn_reference_s **slot;
2332   vn_reference_t vr1;
2333 
2334   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2335   vr1->value_id = value_id;
2336   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2337   vr1->operands = valueize_refs (operands);
2338   vr1->type = type;
2339   vr1->set = set;
2340   vr1->hashcode = vn_reference_compute_hash (vr1);
2341   if (result && TREE_CODE (result) == SSA_NAME)
2342     result = SSA_VAL (result);
2343   vr1->result = result;
2344 
2345   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2346 							INSERT);
2347 
2348   /* At this point we should have all the things inserted that we have
2349      seen before, and we should never try inserting something that
2350      already exists.  */
2351   gcc_assert (!*slot);
2352   if (*slot)
2353     free_reference (*slot);
2354 
2355   *slot = vr1;
2356   return vr1;
2357 }
2358 
2359 /* Compute and return the hash value for nary operation VBO1.  */
2360 
2361 static hashval_t
2362 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2363 {
2364   inchash::hash hstate;
2365   unsigned i;
2366 
2367   for (i = 0; i < vno1->length; ++i)
2368     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2369       vno1->op[i] = SSA_VAL (vno1->op[i]);
2370 
2371   if (vno1->length == 2
2372       && commutative_tree_code (vno1->opcode)
2373       && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2374     {
2375       tree temp = vno1->op[0];
2376       vno1->op[0] = vno1->op[1];
2377       vno1->op[1] = temp;
2378     }
2379 
2380   hstate.add_int (vno1->opcode);
2381   for (i = 0; i < vno1->length; ++i)
2382     inchash::add_expr (vno1->op[i], hstate);
2383 
2384   return hstate.end ();
2385 }
2386 
2387 /* Compare nary operations VNO1 and VNO2 and return true if they are
2388    equivalent.  */
2389 
2390 bool
2391 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2392 {
2393   unsigned i;
2394 
2395   if (vno1->hashcode != vno2->hashcode)
2396     return false;
2397 
2398   if (vno1->length != vno2->length)
2399     return false;
2400 
2401   if (vno1->opcode != vno2->opcode
2402       || !types_compatible_p (vno1->type, vno2->type))
2403     return false;
2404 
2405   for (i = 0; i < vno1->length; ++i)
2406     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2407       return false;
2408 
2409   return true;
2410 }
2411 
2412 /* Initialize VNO from the pieces provided.  */
2413 
2414 static void
2415 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2416 			     enum tree_code code, tree type, tree *ops)
2417 {
2418   vno->opcode = code;
2419   vno->length = length;
2420   vno->type = type;
2421   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2422 }
2423 
2424 /* Initialize VNO from OP.  */
2425 
2426 static void
2427 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2428 {
2429   unsigned i;
2430 
2431   vno->opcode = TREE_CODE (op);
2432   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2433   vno->type = TREE_TYPE (op);
2434   for (i = 0; i < vno->length; ++i)
2435     vno->op[i] = TREE_OPERAND (op, i);
2436 }
2437 
2438 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2439 
2440 static unsigned int
2441 vn_nary_length_from_stmt (gimple stmt)
2442 {
2443   switch (gimple_assign_rhs_code (stmt))
2444     {
2445     case REALPART_EXPR:
2446     case IMAGPART_EXPR:
2447     case VIEW_CONVERT_EXPR:
2448       return 1;
2449 
2450     case BIT_FIELD_REF:
2451       return 3;
2452 
2453     case CONSTRUCTOR:
2454       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2455 
2456     default:
2457       return gimple_num_ops (stmt) - 1;
2458     }
2459 }
2460 
2461 /* Initialize VNO from STMT.  */
2462 
2463 static void
2464 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2465 {
2466   unsigned i;
2467 
2468   vno->opcode = gimple_assign_rhs_code (stmt);
2469   vno->type = gimple_expr_type (stmt);
2470   switch (vno->opcode)
2471     {
2472     case REALPART_EXPR:
2473     case IMAGPART_EXPR:
2474     case VIEW_CONVERT_EXPR:
2475       vno->length = 1;
2476       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2477       break;
2478 
2479     case BIT_FIELD_REF:
2480       vno->length = 3;
2481       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2482       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2483       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2484       break;
2485 
2486     case CONSTRUCTOR:
2487       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2488       for (i = 0; i < vno->length; ++i)
2489 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2490       break;
2491 
2492     default:
2493       gcc_checking_assert (!gimple_assign_single_p (stmt));
2494       vno->length = gimple_num_ops (stmt) - 1;
2495       for (i = 0; i < vno->length; ++i)
2496 	vno->op[i] = gimple_op (stmt, i + 1);
2497     }
2498 }
2499 
2500 /* Compute the hashcode for VNO and look for it in the hash table;
2501    return the resulting value number if it exists in the hash table.
2502    Return NULL_TREE if it does not exist in the hash table or if the
2503    result field of the operation is NULL.  VNRESULT will contain the
2504    vn_nary_op_t from the hashtable if it exists.  */
2505 
2506 static tree
2507 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2508 {
2509   vn_nary_op_s **slot;
2510 
2511   if (vnresult)
2512     *vnresult = NULL;
2513 
2514   vno->hashcode = vn_nary_op_compute_hash (vno);
2515   slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2516 						  NO_INSERT);
2517   if (!slot && current_info == optimistic_info)
2518     slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2519 						  NO_INSERT);
2520   if (!slot)
2521     return NULL_TREE;
2522   if (vnresult)
2523     *vnresult = *slot;
2524   return (*slot)->result;
2525 }
2526 
2527 /* Lookup a n-ary operation by its pieces and return the resulting value
2528    number if it exists in the hash table.  Return NULL_TREE if it does
2529    not exist in the hash table or if the result field of the operation
2530    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2531    if it exists.  */
2532 
2533 tree
2534 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2535 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2536 {
2537   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2538 				  sizeof_vn_nary_op (length));
2539   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2540   return vn_nary_op_lookup_1 (vno1, vnresult);
2541 }
2542 
2543 /* Lookup OP in the current hash table, and return the resulting value
2544    number if it exists in the hash table.  Return NULL_TREE if it does
2545    not exist in the hash table or if the result field of the operation
2546    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2547    if it exists.  */
2548 
2549 tree
2550 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2551 {
2552   vn_nary_op_t vno1
2553     = XALLOCAVAR (struct vn_nary_op_s,
2554 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2555   init_vn_nary_op_from_op (vno1, op);
2556   return vn_nary_op_lookup_1 (vno1, vnresult);
2557 }
2558 
2559 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2560    value number if it exists in the hash table.  Return NULL_TREE if
2561    it does not exist in the hash table.  VNRESULT will contain the
2562    vn_nary_op_t from the hashtable if it exists.  */
2563 
2564 tree
2565 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2566 {
2567   vn_nary_op_t vno1
2568     = XALLOCAVAR (struct vn_nary_op_s,
2569 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2570   init_vn_nary_op_from_stmt (vno1, stmt);
2571   return vn_nary_op_lookup_1 (vno1, vnresult);
2572 }
2573 
2574 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2575 
2576 static vn_nary_op_t
2577 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2578 {
2579   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2580 }
2581 
2582 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2583    obstack.  */
2584 
2585 static vn_nary_op_t
2586 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2587 {
2588   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2589 					       &current_info->nary_obstack);
2590 
2591   vno1->value_id = value_id;
2592   vno1->length = length;
2593   vno1->result = result;
2594 
2595   return vno1;
2596 }
2597 
2598 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2599    VNO->HASHCODE first.  */
2600 
2601 static vn_nary_op_t
2602 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2603 			bool compute_hash)
2604 {
2605   vn_nary_op_s **slot;
2606 
2607   if (compute_hash)
2608     vno->hashcode = vn_nary_op_compute_hash (vno);
2609 
2610   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2611   gcc_assert (!*slot);
2612 
2613   *slot = vno;
2614   return vno;
2615 }
2616 
2617 /* Insert a n-ary operation into the current hash table using it's
2618    pieces.  Return the vn_nary_op_t structure we created and put in
2619    the hashtable.  */
2620 
2621 vn_nary_op_t
2622 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2623 			  tree type, tree *ops,
2624 			  tree result, unsigned int value_id)
2625 {
2626   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2627   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2628   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2629 }
2630 
2631 /* Insert OP into the current hash table with a value number of
2632    RESULT.  Return the vn_nary_op_t structure we created and put in
2633    the hashtable.  */
2634 
2635 vn_nary_op_t
2636 vn_nary_op_insert (tree op, tree result)
2637 {
2638   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2639   vn_nary_op_t vno1;
2640 
2641   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2642   init_vn_nary_op_from_op (vno1, op);
2643   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2644 }
2645 
2646 /* Insert the rhs of STMT into the current hash table with a value number of
2647    RESULT.  */
2648 
2649 vn_nary_op_t
2650 vn_nary_op_insert_stmt (gimple stmt, tree result)
2651 {
2652   vn_nary_op_t vno1
2653     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2654 			result, VN_INFO (result)->value_id);
2655   init_vn_nary_op_from_stmt (vno1, stmt);
2656   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2657 }
2658 
2659 /* Compute a hashcode for PHI operation VP1 and return it.  */
2660 
2661 static inline hashval_t
2662 vn_phi_compute_hash (vn_phi_t vp1)
2663 {
2664   inchash::hash hstate (vp1->block->index);
2665   int i;
2666   tree phi1op;
2667   tree type;
2668 
2669   /* If all PHI arguments are constants we need to distinguish
2670      the PHI node via its type.  */
2671   type = vp1->type;
2672   hstate.merge_hash (vn_hash_type (type));
2673 
2674   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2675     {
2676       if (phi1op == VN_TOP)
2677 	continue;
2678       inchash::add_expr (phi1op, hstate);
2679     }
2680 
2681   return hstate.end ();
2682 }
2683 
2684 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2685 
2686 static int
2687 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2688 {
2689   if (vp1->hashcode != vp2->hashcode)
2690     return false;
2691 
2692   if (vp1->block == vp2->block)
2693     {
2694       int i;
2695       tree phi1op;
2696 
2697       /* If the PHI nodes do not have compatible types
2698 	 they are not the same.  */
2699       if (!types_compatible_p (vp1->type, vp2->type))
2700 	return false;
2701 
2702       /* Any phi in the same block will have it's arguments in the
2703 	 same edge order, because of how we store phi nodes.  */
2704       FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2705 	{
2706 	  tree phi2op = vp2->phiargs[i];
2707 	  if (phi1op == VN_TOP || phi2op == VN_TOP)
2708 	    continue;
2709 	  if (!expressions_equal_p (phi1op, phi2op))
2710 	    return false;
2711 	}
2712       return true;
2713     }
2714   return false;
2715 }
2716 
2717 static vec<tree> shared_lookup_phiargs;
2718 
2719 /* Lookup PHI in the current hash table, and return the resulting
2720    value number if it exists in the hash table.  Return NULL_TREE if
2721    it does not exist in the hash table. */
2722 
2723 static tree
2724 vn_phi_lookup (gimple phi)
2725 {
2726   vn_phi_s **slot;
2727   struct vn_phi_s vp1;
2728   unsigned i;
2729 
2730   shared_lookup_phiargs.truncate (0);
2731 
2732   /* Canonicalize the SSA_NAME's to their value number.  */
2733   for (i = 0; i < gimple_phi_num_args (phi); i++)
2734     {
2735       tree def = PHI_ARG_DEF (phi, i);
2736       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2737       shared_lookup_phiargs.safe_push (def);
2738     }
2739   vp1.type = TREE_TYPE (gimple_phi_result (phi));
2740   vp1.phiargs = shared_lookup_phiargs;
2741   vp1.block = gimple_bb (phi);
2742   vp1.hashcode = vn_phi_compute_hash (&vp1);
2743   slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2744 						  NO_INSERT);
2745   if (!slot && current_info == optimistic_info)
2746     slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2747 						  NO_INSERT);
2748   if (!slot)
2749     return NULL_TREE;
2750   return (*slot)->result;
2751 }
2752 
2753 /* Insert PHI into the current hash table with a value number of
2754    RESULT.  */
2755 
2756 static vn_phi_t
2757 vn_phi_insert (gimple phi, tree result)
2758 {
2759   vn_phi_s **slot;
2760   vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2761   unsigned i;
2762   vec<tree> args = vNULL;
2763 
2764   /* Canonicalize the SSA_NAME's to their value number.  */
2765   for (i = 0; i < gimple_phi_num_args (phi); i++)
2766     {
2767       tree def = PHI_ARG_DEF (phi, i);
2768       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2769       args.safe_push (def);
2770     }
2771   vp1->value_id = VN_INFO (result)->value_id;
2772   vp1->type = TREE_TYPE (gimple_phi_result (phi));
2773   vp1->phiargs = args;
2774   vp1->block = gimple_bb (phi);
2775   vp1->result = result;
2776   vp1->hashcode = vn_phi_compute_hash (vp1);
2777 
2778   slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2779 
2780   /* Because we iterate over phi operations more than once, it's
2781      possible the slot might already exist here, hence no assert.*/
2782   *slot = vp1;
2783   return vp1;
2784 }
2785 
2786 
2787 /* Print set of components in strongly connected component SCC to OUT. */
2788 
2789 static void
2790 print_scc (FILE *out, vec<tree> scc)
2791 {
2792   tree var;
2793   unsigned int i;
2794 
2795   fprintf (out, "SCC consists of:");
2796   FOR_EACH_VEC_ELT (scc, i, var)
2797     {
2798       fprintf (out, " ");
2799       print_generic_expr (out, var, 0);
2800     }
2801   fprintf (out, "\n");
2802 }
2803 
2804 /* Set the value number of FROM to TO, return true if it has changed
2805    as a result.  */
2806 
2807 static inline bool
2808 set_ssa_val_to (tree from, tree to)
2809 {
2810   tree currval = SSA_VAL (from);
2811   HOST_WIDE_INT toff, coff;
2812 
2813   /* The only thing we allow as value numbers are ssa_names
2814      and invariants.  So assert that here.  We don't allow VN_TOP
2815      as visiting a stmt should produce a value-number other than
2816      that.
2817      ???  Still VN_TOP can happen for unreachable code, so force
2818      it to varying in that case.  Not all code is prepared to
2819      get VN_TOP on valueization.  */
2820   if (to == VN_TOP)
2821     {
2822       if (dump_file && (dump_flags & TDF_DETAILS))
2823 	fprintf (dump_file, "Forcing value number to varying on "
2824 		 "receiving VN_TOP\n");
2825       to = from;
2826     }
2827 
2828   gcc_assert (to != NULL_TREE
2829 	      && ((TREE_CODE (to) == SSA_NAME
2830 		   && (to == from || SSA_VAL (to) == to))
2831 		  || is_gimple_min_invariant (to)));
2832 
2833   if (from != to)
2834     {
2835       if (currval == from)
2836 	{
2837 	  if (dump_file && (dump_flags & TDF_DETAILS))
2838 	    {
2839 	      fprintf (dump_file, "Not changing value number of ");
2840 	      print_generic_expr (dump_file, from, 0);
2841 	      fprintf (dump_file, " from VARYING to ");
2842 	      print_generic_expr (dump_file, to, 0);
2843 	      fprintf (dump_file, "\n");
2844 	    }
2845 	  return false;
2846 	}
2847       else if (TREE_CODE (to) == SSA_NAME
2848 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2849 	to = from;
2850     }
2851 
2852   if (dump_file && (dump_flags & TDF_DETAILS))
2853     {
2854       fprintf (dump_file, "Setting value number of ");
2855       print_generic_expr (dump_file, from, 0);
2856       fprintf (dump_file, " to ");
2857       print_generic_expr (dump_file, to, 0);
2858     }
2859 
2860   if (currval != to
2861       && !operand_equal_p (currval, to, 0)
2862       /* ???  For addresses involving volatile objects or types operand_equal_p
2863          does not reliably detect ADDR_EXPRs as equal.  We know we are only
2864 	 getting invariant gimple addresses here, so can use
2865 	 get_addr_base_and_unit_offset to do this comparison.  */
2866       && !(TREE_CODE (currval) == ADDR_EXPR
2867 	   && TREE_CODE (to) == ADDR_EXPR
2868 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2869 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2870 	   && coff == toff))
2871     {
2872       VN_INFO (from)->valnum = to;
2873       if (dump_file && (dump_flags & TDF_DETAILS))
2874 	fprintf (dump_file, " (changed)\n");
2875       return true;
2876     }
2877   if (dump_file && (dump_flags & TDF_DETAILS))
2878     fprintf (dump_file, "\n");
2879   return false;
2880 }
2881 
2882 /* Mark as processed all the definitions in the defining stmt of USE, or
2883    the USE itself.  */
2884 
2885 static void
2886 mark_use_processed (tree use)
2887 {
2888   ssa_op_iter iter;
2889   def_operand_p defp;
2890   gimple stmt = SSA_NAME_DEF_STMT (use);
2891 
2892   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2893     {
2894       VN_INFO (use)->use_processed = true;
2895       return;
2896     }
2897 
2898   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2899     {
2900       tree def = DEF_FROM_PTR (defp);
2901 
2902       VN_INFO (def)->use_processed = true;
2903     }
2904 }
2905 
2906 /* Set all definitions in STMT to value number to themselves.
2907    Return true if a value number changed. */
2908 
2909 static bool
2910 defs_to_varying (gimple stmt)
2911 {
2912   bool changed = false;
2913   ssa_op_iter iter;
2914   def_operand_p defp;
2915 
2916   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2917     {
2918       tree def = DEF_FROM_PTR (defp);
2919       changed |= set_ssa_val_to (def, def);
2920     }
2921   return changed;
2922 }
2923 
2924 static bool expr_has_constants (tree expr);
2925 
2926 /* Visit a copy between LHS and RHS, return true if the value number
2927    changed.  */
2928 
2929 static bool
2930 visit_copy (tree lhs, tree rhs)
2931 {
2932   /* The copy may have a more interesting constant filled expression
2933      (we don't, since we know our RHS is just an SSA name).  */
2934   VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2935   VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2936 
2937   /* And finally valueize.  */
2938   rhs = SSA_VAL (rhs);
2939 
2940   return set_ssa_val_to (lhs, rhs);
2941 }
2942 
2943 /* Visit a nary operator RHS, value number it, and return true if the
2944    value number of LHS has changed as a result.  */
2945 
2946 static bool
2947 visit_nary_op (tree lhs, gimple stmt)
2948 {
2949   bool changed = false;
2950   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2951 
2952   if (result)
2953     changed = set_ssa_val_to (lhs, result);
2954   else
2955     {
2956       changed = set_ssa_val_to (lhs, lhs);
2957       vn_nary_op_insert_stmt (stmt, lhs);
2958     }
2959 
2960   return changed;
2961 }
2962 
2963 /* Visit a call STMT storing into LHS.  Return true if the value number
2964    of the LHS has changed as a result.  */
2965 
2966 static bool
2967 visit_reference_op_call (tree lhs, gcall *stmt)
2968 {
2969   bool changed = false;
2970   struct vn_reference_s vr1;
2971   vn_reference_t vnresult = NULL;
2972   tree vdef = gimple_vdef (stmt);
2973 
2974   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
2975   if (lhs && TREE_CODE (lhs) != SSA_NAME)
2976     lhs = NULL_TREE;
2977 
2978   vn_reference_lookup_call (stmt, &vnresult, &vr1);
2979   if (vnresult)
2980     {
2981       if (vnresult->result_vdef && vdef)
2982 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2983       else if (vdef)
2984 	/* If the call was discovered to be pure or const reflect
2985 	   that as far as possible.  */
2986 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
2987 
2988       if (!vnresult->result && lhs)
2989 	vnresult->result = lhs;
2990 
2991       if (vnresult->result && lhs)
2992 	{
2993 	  changed |= set_ssa_val_to (lhs, vnresult->result);
2994 
2995 	  if (VN_INFO (vnresult->result)->has_constants)
2996 	    VN_INFO (lhs)->has_constants = true;
2997 	}
2998     }
2999   else
3000     {
3001       vn_reference_t vr2;
3002       vn_reference_s **slot;
3003       if (vdef)
3004 	changed |= set_ssa_val_to (vdef, vdef);
3005       if (lhs)
3006 	changed |= set_ssa_val_to (lhs, lhs);
3007       vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
3008       vr2->vuse = vr1.vuse;
3009       /* As we are not walking the virtual operand chain we know the
3010 	 shared_lookup_references are still original so we can re-use
3011 	 them here.  */
3012       vr2->operands = vr1.operands.copy ();
3013       vr2->type = vr1.type;
3014       vr2->set = vr1.set;
3015       vr2->hashcode = vr1.hashcode;
3016       vr2->result = lhs;
3017       vr2->result_vdef = vdef;
3018       slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3019 							    INSERT);
3020       gcc_assert (!*slot);
3021       *slot = vr2;
3022     }
3023 
3024   return changed;
3025 }
3026 
3027 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3028    and return true if the value number of the LHS has changed as a result.  */
3029 
3030 static bool
3031 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3032 {
3033   bool changed = false;
3034   tree last_vuse;
3035   tree result;
3036 
3037   last_vuse = gimple_vuse (stmt);
3038   last_vuse_ptr = &last_vuse;
3039   result = vn_reference_lookup (op, gimple_vuse (stmt),
3040 				default_vn_walk_kind, NULL, true);
3041   last_vuse_ptr = NULL;
3042 
3043   /* We handle type-punning through unions by value-numbering based
3044      on offset and size of the access.  Be prepared to handle a
3045      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3046   if (result
3047       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3048     {
3049       /* We will be setting the value number of lhs to the value number
3050 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3051 	 So first simplify and lookup this expression to see if it
3052 	 is already available.  */
3053       tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3054       if ((CONVERT_EXPR_P (val)
3055 	   || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3056 	  && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3057         {
3058 	  tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3059 	  if ((CONVERT_EXPR_P (tem)
3060 	       || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3061 	      && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3062 						    TREE_TYPE (val), tem)))
3063 	    val = tem;
3064 	}
3065       result = val;
3066       if (!is_gimple_min_invariant (val)
3067 	  && TREE_CODE (val) != SSA_NAME)
3068 	result = vn_nary_op_lookup (val, NULL);
3069       /* If the expression is not yet available, value-number lhs to
3070 	 a new SSA_NAME we create.  */
3071       if (!result)
3072         {
3073 	  result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3074 				       "vntemp");
3075 	  /* Initialize value-number information properly.  */
3076 	  VN_INFO_GET (result)->valnum = result;
3077 	  VN_INFO (result)->value_id = get_next_value_id ();
3078 	  VN_INFO (result)->expr = val;
3079 	  VN_INFO (result)->has_constants = expr_has_constants (val);
3080 	  VN_INFO (result)->needs_insertion = true;
3081 	  /* As all "inserted" statements are singleton SCCs, insert
3082 	     to the valid table.  This is strictly needed to
3083 	     avoid re-generating new value SSA_NAMEs for the same
3084 	     expression during SCC iteration over and over (the
3085 	     optimistic table gets cleared after each iteration).
3086 	     We do not need to insert into the optimistic table, as
3087 	     lookups there will fall back to the valid table.  */
3088 	  if (current_info == optimistic_info)
3089 	    {
3090 	      current_info = valid_info;
3091 	      vn_nary_op_insert (val, result);
3092 	      current_info = optimistic_info;
3093 	    }
3094 	  else
3095 	    vn_nary_op_insert (val, result);
3096 	  if (dump_file && (dump_flags & TDF_DETAILS))
3097 	    {
3098 	      fprintf (dump_file, "Inserting name ");
3099 	      print_generic_expr (dump_file, result, 0);
3100 	      fprintf (dump_file, " for expression ");
3101 	      print_generic_expr (dump_file, val, 0);
3102 	      fprintf (dump_file, "\n");
3103 	    }
3104 	}
3105     }
3106 
3107   if (result)
3108     {
3109       changed = set_ssa_val_to (lhs, result);
3110       if (TREE_CODE (result) == SSA_NAME
3111 	  && VN_INFO (result)->has_constants)
3112 	{
3113 	  VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3114 	  VN_INFO (lhs)->has_constants = true;
3115 	}
3116     }
3117   else
3118     {
3119       changed = set_ssa_val_to (lhs, lhs);
3120       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3121     }
3122 
3123   return changed;
3124 }
3125 
3126 
3127 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3128    and return true if the value number of the LHS has changed as a result.  */
3129 
3130 static bool
3131 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3132 {
3133   bool changed = false;
3134   vn_reference_t vnresult = NULL;
3135   tree assign;
3136   bool resultsame = false;
3137   tree vuse = gimple_vuse (stmt);
3138   tree vdef = gimple_vdef (stmt);
3139 
3140   if (TREE_CODE (op) == SSA_NAME)
3141     op = SSA_VAL (op);
3142 
3143   /* First we want to lookup using the *vuses* from the store and see
3144      if there the last store to this location with the same address
3145      had the same value.
3146 
3147      The vuses represent the memory state before the store.  If the
3148      memory state, address, and value of the store is the same as the
3149      last store to this location, then this store will produce the
3150      same memory state as that store.
3151 
3152      In this case the vdef versions for this store are value numbered to those
3153      vuse versions, since they represent the same memory state after
3154      this store.
3155 
3156      Otherwise, the vdefs for the store are used when inserting into
3157      the table, since the store generates a new memory state.  */
3158 
3159   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3160   if (vnresult
3161       && vnresult->result)
3162     {
3163       tree result = vnresult->result;
3164       if (TREE_CODE (result) == SSA_NAME)
3165 	result = SSA_VAL (result);
3166       resultsame = expressions_equal_p (result, op);
3167       if (resultsame)
3168 	{
3169 	  /* If the TBAA state isn't compatible for downstream reads
3170 	     we cannot value-number the VDEFs the same.  */
3171 	  alias_set_type set = get_alias_set (lhs);
3172 	  if (vnresult->set != set
3173 	      && ! alias_set_subset_of (set, vnresult->set))
3174 	    resultsame = false;
3175 	}
3176     }
3177 
3178   if (!resultsame)
3179     {
3180       /* Only perform the following when being called from PRE
3181 	 which embeds tail merging.  */
3182       if (default_vn_walk_kind == VN_WALK)
3183 	{
3184 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3185 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3186 	  if (vnresult)
3187 	    {
3188 	      VN_INFO (vdef)->use_processed = true;
3189 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
3190 	    }
3191 	}
3192 
3193       if (dump_file && (dump_flags & TDF_DETAILS))
3194 	{
3195 	  fprintf (dump_file, "No store match\n");
3196 	  fprintf (dump_file, "Value numbering store ");
3197 	  print_generic_expr (dump_file, lhs, 0);
3198 	  fprintf (dump_file, " to ");
3199 	  print_generic_expr (dump_file, op, 0);
3200 	  fprintf (dump_file, "\n");
3201 	}
3202       /* Have to set value numbers before insert, since insert is
3203 	 going to valueize the references in-place.  */
3204       if (vdef)
3205 	changed |= set_ssa_val_to (vdef, vdef);
3206 
3207       /* Do not insert structure copies into the tables.  */
3208       if (is_gimple_min_invariant (op)
3209 	  || is_gimple_reg (op))
3210         vn_reference_insert (lhs, op, vdef, NULL);
3211 
3212       /* Only perform the following when being called from PRE
3213 	 which embeds tail merging.  */
3214       if (default_vn_walk_kind == VN_WALK)
3215 	{
3216 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3217 	  vn_reference_insert (assign, lhs, vuse, vdef);
3218 	}
3219     }
3220   else
3221     {
3222       /* We had a match, so value number the vdef to have the value
3223 	 number of the vuse it came from.  */
3224 
3225       if (dump_file && (dump_flags & TDF_DETAILS))
3226 	fprintf (dump_file, "Store matched earlier value,"
3227 		 "value numbering store vdefs to matching vuses.\n");
3228 
3229       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3230     }
3231 
3232   return changed;
3233 }
3234 
3235 /* Visit and value number PHI, return true if the value number
3236    changed.  */
3237 
3238 static bool
3239 visit_phi (gimple phi)
3240 {
3241   bool changed = false;
3242   tree result;
3243   tree sameval = VN_TOP;
3244   bool allsame = true;
3245 
3246   /* TODO: We could check for this in init_sccvn, and replace this
3247      with a gcc_assert.  */
3248   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3249     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3250 
3251   /* See if all non-TOP arguments have the same value.  TOP is
3252      equivalent to everything, so we can ignore it.  */
3253   edge_iterator ei;
3254   edge e;
3255   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3256     if (e->flags & EDGE_EXECUTABLE)
3257       {
3258 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3259 
3260 	if (TREE_CODE (def) == SSA_NAME)
3261 	  def = SSA_VAL (def);
3262 	if (def == VN_TOP)
3263 	  continue;
3264 	if (sameval == VN_TOP)
3265 	  {
3266 	    sameval = def;
3267 	  }
3268 	else
3269 	  {
3270 	    if (!expressions_equal_p (def, sameval))
3271 	      {
3272 		allsame = false;
3273 		break;
3274 	      }
3275 	  }
3276       }
3277 
3278   /* If all value numbered to the same value, the phi node has that
3279      value.  */
3280   if (allsame)
3281     return set_ssa_val_to (PHI_RESULT (phi), sameval);
3282 
3283   /* Otherwise, see if it is equivalent to a phi node in this block.  */
3284   result = vn_phi_lookup (phi);
3285   if (result)
3286     changed = set_ssa_val_to (PHI_RESULT (phi), result);
3287   else
3288     {
3289       vn_phi_insert (phi, PHI_RESULT (phi));
3290       VN_INFO (PHI_RESULT (phi))->has_constants = false;
3291       VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3292       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3293     }
3294 
3295   return changed;
3296 }
3297 
3298 /* Return true if EXPR contains constants.  */
3299 
3300 static bool
3301 expr_has_constants (tree expr)
3302 {
3303   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3304     {
3305     case tcc_unary:
3306       return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3307 
3308     case tcc_binary:
3309       return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3310 	|| is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3311       /* Constants inside reference ops are rarely interesting, but
3312 	 it can take a lot of looking to find them.  */
3313     case tcc_reference:
3314     case tcc_declaration:
3315       return false;
3316     default:
3317       return is_gimple_min_invariant (expr);
3318     }
3319   return false;
3320 }
3321 
3322 /* Return true if STMT contains constants.  */
3323 
3324 static bool
3325 stmt_has_constants (gimple stmt)
3326 {
3327   tree tem;
3328 
3329   if (gimple_code (stmt) != GIMPLE_ASSIGN)
3330     return false;
3331 
3332   switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3333     {
3334     case GIMPLE_TERNARY_RHS:
3335       tem = gimple_assign_rhs3 (stmt);
3336       if (TREE_CODE (tem) == SSA_NAME)
3337 	tem = SSA_VAL (tem);
3338       if (is_gimple_min_invariant (tem))
3339 	return true;
3340       /* Fallthru.  */
3341 
3342     case GIMPLE_BINARY_RHS:
3343       tem = gimple_assign_rhs2 (stmt);
3344       if (TREE_CODE (tem) == SSA_NAME)
3345 	tem = SSA_VAL (tem);
3346       if (is_gimple_min_invariant (tem))
3347 	return true;
3348       /* Fallthru.  */
3349 
3350     case GIMPLE_SINGLE_RHS:
3351       /* Constants inside reference ops are rarely interesting, but
3352 	 it can take a lot of looking to find them.  */
3353     case GIMPLE_UNARY_RHS:
3354       tem = gimple_assign_rhs1 (stmt);
3355       if (TREE_CODE (tem) == SSA_NAME)
3356 	tem = SSA_VAL (tem);
3357       return is_gimple_min_invariant (tem);
3358 
3359     default:
3360       gcc_unreachable ();
3361     }
3362   return false;
3363 }
3364 
3365 /* Simplify the binary expression RHS, and return the result if
3366    simplified. */
3367 
3368 static tree
3369 simplify_binary_expression (gimple stmt)
3370 {
3371   tree result = NULL_TREE;
3372   tree op0 = gimple_assign_rhs1 (stmt);
3373   tree op1 = gimple_assign_rhs2 (stmt);
3374   enum tree_code code = gimple_assign_rhs_code (stmt);
3375 
3376   /* This will not catch every single case we could combine, but will
3377      catch those with constants.  The goal here is to simultaneously
3378      combine constants between expressions, but avoid infinite
3379      expansion of expressions during simplification.  */
3380   op0 = vn_valueize (op0);
3381   if (TREE_CODE (op0) == SSA_NAME
3382       && (VN_INFO (op0)->has_constants
3383 	  || TREE_CODE_CLASS (code) == tcc_comparison
3384 	  || code == COMPLEX_EXPR))
3385     op0 = vn_get_expr_for (op0);
3386 
3387   op1 = vn_valueize (op1);
3388   if (TREE_CODE (op1) == SSA_NAME
3389       && (VN_INFO (op1)->has_constants
3390 	  || code == COMPLEX_EXPR))
3391     op1 = vn_get_expr_for (op1);
3392 
3393   /* Pointer plus constant can be represented as invariant address.
3394      Do so to allow further propatation, see also tree forwprop.  */
3395   if (code == POINTER_PLUS_EXPR
3396       && tree_fits_uhwi_p (op1)
3397       && TREE_CODE (op0) == ADDR_EXPR
3398       && is_gimple_min_invariant (op0))
3399     return build_invariant_address (TREE_TYPE (op0),
3400 				    TREE_OPERAND (op0, 0),
3401 				    tree_to_uhwi (op1));
3402 
3403   /* Avoid folding if nothing changed.  */
3404   if (op0 == gimple_assign_rhs1 (stmt)
3405       && op1 == gimple_assign_rhs2 (stmt))
3406     return NULL_TREE;
3407 
3408   fold_defer_overflow_warnings ();
3409 
3410   result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3411   if (result)
3412     STRIP_USELESS_TYPE_CONVERSION (result);
3413 
3414   fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3415 				  stmt, 0);
3416 
3417   /* Make sure result is not a complex expression consisting
3418      of operators of operators (IE (a + b) + (a + c))
3419      Otherwise, we will end up with unbounded expressions if
3420      fold does anything at all.  */
3421   if (result && valid_gimple_rhs_p (result))
3422     return result;
3423 
3424   return NULL_TREE;
3425 }
3426 
3427 /* Simplify the unary expression RHS, and return the result if
3428    simplified. */
3429 
3430 static tree
3431 simplify_unary_expression (gassign *stmt)
3432 {
3433   tree result = NULL_TREE;
3434   tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3435   enum tree_code code = gimple_assign_rhs_code (stmt);
3436 
3437   /* We handle some tcc_reference codes here that are all
3438      GIMPLE_ASSIGN_SINGLE codes.  */
3439   if (code == REALPART_EXPR
3440       || code == IMAGPART_EXPR
3441       || code == VIEW_CONVERT_EXPR
3442       || code == BIT_FIELD_REF)
3443     op0 = TREE_OPERAND (op0, 0);
3444 
3445   orig_op0 = op0;
3446   op0 = vn_valueize (op0);
3447   if (TREE_CODE (op0) == SSA_NAME)
3448     {
3449       if (VN_INFO (op0)->has_constants)
3450 	op0 = vn_get_expr_for (op0);
3451       else if (CONVERT_EXPR_CODE_P (code)
3452 	       || code == REALPART_EXPR
3453 	       || code == IMAGPART_EXPR
3454 	       || code == VIEW_CONVERT_EXPR
3455 	       || code == BIT_FIELD_REF)
3456 	{
3457 	  /* We want to do tree-combining on conversion-like expressions.
3458 	     Make sure we feed only SSA_NAMEs or constants to fold though.  */
3459 	  tree tem = vn_get_expr_for (op0);
3460 	  if (UNARY_CLASS_P (tem)
3461 	      || BINARY_CLASS_P (tem)
3462 	      || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3463 	      || TREE_CODE (tem) == SSA_NAME
3464 	      || TREE_CODE (tem) == CONSTRUCTOR
3465 	      || is_gimple_min_invariant (tem))
3466 	    op0 = tem;
3467 	}
3468     }
3469 
3470   /* Avoid folding if nothing changed, but remember the expression.  */
3471   if (op0 == orig_op0)
3472     return NULL_TREE;
3473 
3474   if (code == BIT_FIELD_REF)
3475     {
3476       tree rhs = gimple_assign_rhs1 (stmt);
3477       result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3478 			     op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3479     }
3480   else
3481     result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3482   if (result)
3483     {
3484       STRIP_USELESS_TYPE_CONVERSION (result);
3485       if (valid_gimple_rhs_p (result))
3486         return result;
3487     }
3488 
3489   return NULL_TREE;
3490 }
3491 
3492 /* Try to simplify RHS using equivalences and constant folding.  */
3493 
3494 static tree
3495 try_to_simplify (gassign *stmt)
3496 {
3497   enum tree_code code = gimple_assign_rhs_code (stmt);
3498   tree tem;
3499 
3500   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3501      in this case, there is no point in doing extra work.  */
3502   if (code == SSA_NAME)
3503     return NULL_TREE;
3504 
3505   /* First try constant folding based on our current lattice.  */
3506   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3507   if (tem
3508       && (TREE_CODE (tem) == SSA_NAME
3509 	  || is_gimple_min_invariant (tem)))
3510     return tem;
3511 
3512   /* If that didn't work try combining multiple statements.  */
3513   switch (TREE_CODE_CLASS (code))
3514     {
3515     case tcc_reference:
3516       /* Fallthrough for some unary codes that can operate on registers.  */
3517       if (!(code == REALPART_EXPR
3518 	    || code == IMAGPART_EXPR
3519 	    || code == VIEW_CONVERT_EXPR
3520 	    || code == BIT_FIELD_REF))
3521 	break;
3522       /* We could do a little more with unary ops, if they expand
3523 	 into binary ops, but it's debatable whether it is worth it. */
3524     case tcc_unary:
3525       return simplify_unary_expression (stmt);
3526 
3527     case tcc_comparison:
3528     case tcc_binary:
3529       return simplify_binary_expression (stmt);
3530 
3531     default:
3532       break;
3533     }
3534 
3535   return NULL_TREE;
3536 }
3537 
3538 /* Visit and value number USE, return true if the value number
3539    changed. */
3540 
3541 static bool
3542 visit_use (tree use)
3543 {
3544   bool changed = false;
3545   gimple stmt = SSA_NAME_DEF_STMT (use);
3546 
3547   mark_use_processed (use);
3548 
3549   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3550   if (dump_file && (dump_flags & TDF_DETAILS)
3551       && !SSA_NAME_IS_DEFAULT_DEF (use))
3552     {
3553       fprintf (dump_file, "Value numbering ");
3554       print_generic_expr (dump_file, use, 0);
3555       fprintf (dump_file, " stmt = ");
3556       print_gimple_stmt (dump_file, stmt, 0, 0);
3557     }
3558 
3559   /* Handle uninitialized uses.  */
3560   if (SSA_NAME_IS_DEFAULT_DEF (use))
3561     changed = set_ssa_val_to (use, use);
3562   else
3563     {
3564       if (gimple_code (stmt) == GIMPLE_PHI)
3565 	changed = visit_phi (stmt);
3566       else if (gimple_has_volatile_ops (stmt))
3567 	changed = defs_to_varying (stmt);
3568       else if (is_gimple_assign (stmt))
3569 	{
3570 	  enum tree_code code = gimple_assign_rhs_code (stmt);
3571 	  tree lhs = gimple_assign_lhs (stmt);
3572 	  tree rhs1 = gimple_assign_rhs1 (stmt);
3573 	  tree simplified;
3574 
3575 	  /* Shortcut for copies. Simplifying copies is pointless,
3576 	     since we copy the expression and value they represent.  */
3577 	  if (code == SSA_NAME
3578 	      && TREE_CODE (lhs) == SSA_NAME)
3579 	    {
3580 	      changed = visit_copy (lhs, rhs1);
3581 	      goto done;
3582 	    }
3583 	  simplified = try_to_simplify (as_a <gassign *> (stmt));
3584 	  if (simplified)
3585 	    {
3586 	      if (dump_file && (dump_flags & TDF_DETAILS))
3587 		{
3588 		  fprintf (dump_file, "RHS ");
3589 		  print_gimple_expr (dump_file, stmt, 0, 0);
3590 		  fprintf (dump_file, " simplified to ");
3591 		  print_generic_expr (dump_file, simplified, 0);
3592 		  if (TREE_CODE (lhs) == SSA_NAME)
3593 		    fprintf (dump_file, " has constants %d\n",
3594 			     expr_has_constants (simplified));
3595 		  else
3596 		    fprintf (dump_file, "\n");
3597 		}
3598 	    }
3599 	  /* Setting value numbers to constants will occasionally
3600 	     screw up phi congruence because constants are not
3601 	     uniquely associated with a single ssa name that can be
3602 	     looked up.  */
3603 	  if (simplified
3604 	      && is_gimple_min_invariant (simplified)
3605 	      && TREE_CODE (lhs) == SSA_NAME)
3606 	    {
3607 	      VN_INFO (lhs)->expr = simplified;
3608 	      VN_INFO (lhs)->has_constants = true;
3609 	      changed = set_ssa_val_to (lhs, simplified);
3610 	      goto done;
3611 	    }
3612 	  else if (simplified
3613 		   && TREE_CODE (simplified) == SSA_NAME
3614 		   && TREE_CODE (lhs) == SSA_NAME)
3615 	    {
3616 	      changed = visit_copy (lhs, simplified);
3617 	      goto done;
3618 	    }
3619 	  else if (simplified)
3620 	    {
3621 	      if (TREE_CODE (lhs) == SSA_NAME)
3622 		{
3623 		  VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3624 		  /* We have to unshare the expression or else
3625 		     valuizing may change the IL stream.  */
3626 		  VN_INFO (lhs)->expr = unshare_expr (simplified);
3627 		}
3628 	    }
3629 	  else if (stmt_has_constants (stmt)
3630 		   && TREE_CODE (lhs) == SSA_NAME)
3631 	    VN_INFO (lhs)->has_constants = true;
3632 	  else if (TREE_CODE (lhs) == SSA_NAME)
3633 	    {
3634 	      /* We reset expr and constantness here because we may
3635 		 have been value numbering optimistically, and
3636 		 iterating. They may become non-constant in this case,
3637 		 even if they were optimistically constant. */
3638 
3639 	      VN_INFO (lhs)->has_constants = false;
3640 	      VN_INFO (lhs)->expr = NULL_TREE;
3641 	    }
3642 
3643 	  if ((TREE_CODE (lhs) == SSA_NAME
3644 	       /* We can substitute SSA_NAMEs that are live over
3645 		  abnormal edges with their constant value.  */
3646 	       && !(gimple_assign_copy_p (stmt)
3647 		    && is_gimple_min_invariant (rhs1))
3648 	       && !(simplified
3649 		    && is_gimple_min_invariant (simplified))
3650 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3651 	      /* Stores or copies from SSA_NAMEs that are live over
3652 		 abnormal edges are a problem.  */
3653 	      || (code == SSA_NAME
3654 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3655 	    changed = defs_to_varying (stmt);
3656 	  else if (REFERENCE_CLASS_P (lhs)
3657 		   || DECL_P (lhs))
3658 	    changed = visit_reference_op_store (lhs, rhs1, stmt);
3659 	  else if (TREE_CODE (lhs) == SSA_NAME)
3660 	    {
3661 	      if ((gimple_assign_copy_p (stmt)
3662 		   && is_gimple_min_invariant (rhs1))
3663 		  || (simplified
3664 		      && is_gimple_min_invariant (simplified)))
3665 		{
3666 		  VN_INFO (lhs)->has_constants = true;
3667 		  if (simplified)
3668 		    changed = set_ssa_val_to (lhs, simplified);
3669 		  else
3670 		    changed = set_ssa_val_to (lhs, rhs1);
3671 		}
3672 	      else
3673 		{
3674 		  /* First try to lookup the simplified expression.  */
3675 		  if (simplified)
3676 		    {
3677 		      enum gimple_rhs_class rhs_class;
3678 
3679 
3680 		      rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3681 		      if ((rhs_class == GIMPLE_UNARY_RHS
3682 			   || rhs_class == GIMPLE_BINARY_RHS
3683 			   || rhs_class == GIMPLE_TERNARY_RHS)
3684 			  && valid_gimple_rhs_p (simplified))
3685 			{
3686 			  tree result = vn_nary_op_lookup (simplified, NULL);
3687 			  if (result)
3688 			    {
3689 			      changed = set_ssa_val_to (lhs, result);
3690 			      goto done;
3691 			    }
3692 			}
3693 		    }
3694 
3695 		  /* Otherwise visit the original statement.  */
3696 		  switch (vn_get_stmt_kind (stmt))
3697 		    {
3698 		    case VN_NARY:
3699 		      changed = visit_nary_op (lhs, stmt);
3700 		      break;
3701 		    case VN_REFERENCE:
3702 		      changed = visit_reference_op_load (lhs, rhs1, stmt);
3703 		      break;
3704 		    default:
3705 		      changed = defs_to_varying (stmt);
3706 		      break;
3707 		    }
3708 		}
3709 	    }
3710 	  else
3711 	    changed = defs_to_varying (stmt);
3712 	}
3713       else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3714 	{
3715 	  tree lhs = gimple_call_lhs (stmt);
3716 	  if (lhs && TREE_CODE (lhs) == SSA_NAME)
3717 	    {
3718 	      /* Try constant folding based on our current lattice.  */
3719 	      tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3720 								vn_valueize);
3721 	      if (simplified)
3722 		{
3723 		  if (dump_file && (dump_flags & TDF_DETAILS))
3724 		    {
3725 		      fprintf (dump_file, "call ");
3726 		      print_gimple_expr (dump_file, stmt, 0, 0);
3727 		      fprintf (dump_file, " simplified to ");
3728 		      print_generic_expr (dump_file, simplified, 0);
3729 		      if (TREE_CODE (lhs) == SSA_NAME)
3730 			fprintf (dump_file, " has constants %d\n",
3731 				 expr_has_constants (simplified));
3732 		      else
3733 			fprintf (dump_file, "\n");
3734 		    }
3735 		}
3736 	      /* Setting value numbers to constants will occasionally
3737 		 screw up phi congruence because constants are not
3738 		 uniquely associated with a single ssa name that can be
3739 		 looked up.  */
3740 	      if (simplified
3741 		  && is_gimple_min_invariant (simplified))
3742 		{
3743 		  VN_INFO (lhs)->expr = simplified;
3744 		  VN_INFO (lhs)->has_constants = true;
3745 		  changed = set_ssa_val_to (lhs, simplified);
3746 		  if (gimple_vdef (stmt))
3747 		    changed |= set_ssa_val_to (gimple_vdef (stmt),
3748 					       SSA_VAL (gimple_vuse (stmt)));
3749 		  goto done;
3750 		}
3751 	      else if (simplified
3752 		       && TREE_CODE (simplified) == SSA_NAME)
3753 		{
3754 		  changed = visit_copy (lhs, simplified);
3755 		  if (gimple_vdef (stmt))
3756 		    changed |= set_ssa_val_to (gimple_vdef (stmt),
3757 					       SSA_VAL (gimple_vuse (stmt)));
3758 		  goto done;
3759 		}
3760 	      else
3761 		{
3762 		  if (stmt_has_constants (stmt))
3763 		    VN_INFO (lhs)->has_constants = true;
3764 		  else
3765 		    {
3766 		      /* We reset expr and constantness here because we may
3767 			 have been value numbering optimistically, and
3768 			 iterating.  They may become non-constant in this case,
3769 			 even if they were optimistically constant.  */
3770 		      VN_INFO (lhs)->has_constants = false;
3771 		      VN_INFO (lhs)->expr = NULL_TREE;
3772 		    }
3773 
3774 		  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3775 		    {
3776 		      changed = defs_to_varying (stmt);
3777 		      goto done;
3778 		    }
3779 		}
3780 	    }
3781 
3782 	  if (!gimple_call_internal_p (stmt)
3783 	      && (/* Calls to the same function with the same vuse
3784 		     and the same operands do not necessarily return the same
3785 		     value, unless they're pure or const.  */
3786 		  gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3787 		  /* If calls have a vdef, subsequent calls won't have
3788 		     the same incoming vuse.  So, if 2 calls with vdef have the
3789 		     same vuse, we know they're not subsequent.
3790 		     We can value number 2 calls to the same function with the
3791 		     same vuse and the same operands which are not subsequent
3792 		     the same, because there is no code in the program that can
3793 		     compare the 2 values...  */
3794 		  || (gimple_vdef (stmt)
3795 		      /* ... unless the call returns a pointer which does
3796 		         not alias with anything else.  In which case the
3797 			 information that the values are distinct are encoded
3798 			 in the IL.  */
3799 		      && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3800 		      /* Only perform the following when being called from PRE
3801 			 which embeds tail merging.  */
3802 		      && default_vn_walk_kind == VN_WALK)))
3803 	    changed = visit_reference_op_call (lhs, call_stmt);
3804 	  else
3805 	    changed = defs_to_varying (stmt);
3806 	}
3807       else
3808 	changed = defs_to_varying (stmt);
3809     }
3810  done:
3811   return changed;
3812 }
3813 
3814 /* Compare two operands by reverse postorder index */
3815 
3816 static int
3817 compare_ops (const void *pa, const void *pb)
3818 {
3819   const tree opa = *((const tree *)pa);
3820   const tree opb = *((const tree *)pb);
3821   gimple opstmta = SSA_NAME_DEF_STMT (opa);
3822   gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3823   basic_block bba;
3824   basic_block bbb;
3825 
3826   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3827     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3828   else if (gimple_nop_p (opstmta))
3829     return -1;
3830   else if (gimple_nop_p (opstmtb))
3831     return 1;
3832 
3833   bba = gimple_bb (opstmta);
3834   bbb = gimple_bb (opstmtb);
3835 
3836   if (!bba && !bbb)
3837     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3838   else if (!bba)
3839     return -1;
3840   else if (!bbb)
3841     return 1;
3842 
3843   if (bba == bbb)
3844     {
3845       if (gimple_code (opstmta) == GIMPLE_PHI
3846 	  && gimple_code (opstmtb) == GIMPLE_PHI)
3847 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3848       else if (gimple_code (opstmta) == GIMPLE_PHI)
3849 	return -1;
3850       else if (gimple_code (opstmtb) == GIMPLE_PHI)
3851 	return 1;
3852       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3853         return gimple_uid (opstmta) - gimple_uid (opstmtb);
3854       else
3855 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3856     }
3857   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3858 }
3859 
3860 /* Sort an array containing members of a strongly connected component
3861    SCC so that the members are ordered by RPO number.
3862    This means that when the sort is complete, iterating through the
3863    array will give you the members in RPO order.  */
3864 
3865 static void
3866 sort_scc (vec<tree> scc)
3867 {
3868   scc.qsort (compare_ops);
3869 }
3870 
3871 /* Insert the no longer used nary ONARY to the hash INFO.  */
3872 
3873 static void
3874 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3875 {
3876   size_t size = sizeof_vn_nary_op (onary->length);
3877   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3878 					       &info->nary_obstack);
3879   memcpy (nary, onary, size);
3880   vn_nary_op_insert_into (nary, info->nary, false);
3881 }
3882 
3883 /* Insert the no longer used phi OPHI to the hash INFO.  */
3884 
3885 static void
3886 copy_phi (vn_phi_t ophi, vn_tables_t info)
3887 {
3888   vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3889   vn_phi_s **slot;
3890   memcpy (phi, ophi, sizeof (*phi));
3891   ophi->phiargs.create (0);
3892   slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3893   gcc_assert (!*slot);
3894   *slot = phi;
3895 }
3896 
3897 /* Insert the no longer used reference OREF to the hash INFO.  */
3898 
3899 static void
3900 copy_reference (vn_reference_t oref, vn_tables_t info)
3901 {
3902   vn_reference_t ref;
3903   vn_reference_s **slot;
3904   ref = (vn_reference_t) pool_alloc (info->references_pool);
3905   memcpy (ref, oref, sizeof (*ref));
3906   oref->operands.create (0);
3907   slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3908   if (*slot)
3909     free_reference (*slot);
3910   *slot = ref;
3911 }
3912 
3913 /* Process a strongly connected component in the SSA graph.  */
3914 
3915 static void
3916 process_scc (vec<tree> scc)
3917 {
3918   tree var;
3919   unsigned int i;
3920   unsigned int iterations = 0;
3921   bool changed = true;
3922   vn_nary_op_iterator_type hin;
3923   vn_phi_iterator_type hip;
3924   vn_reference_iterator_type hir;
3925   vn_nary_op_t nary;
3926   vn_phi_t phi;
3927   vn_reference_t ref;
3928 
3929   /* If the SCC has a single member, just visit it.  */
3930   if (scc.length () == 1)
3931     {
3932       tree use = scc[0];
3933       if (VN_INFO (use)->use_processed)
3934 	return;
3935       /* We need to make sure it doesn't form a cycle itself, which can
3936 	 happen for self-referential PHI nodes.  In that case we would
3937 	 end up inserting an expression with VN_TOP operands into the
3938 	 valid table which makes us derive bogus equivalences later.
3939 	 The cheapest way to check this is to assume it for all PHI nodes.  */
3940       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3941 	/* Fallthru to iteration.  */ ;
3942       else
3943 	{
3944 	  visit_use (use);
3945 	  return;
3946 	}
3947     }
3948 
3949   if (dump_file && (dump_flags & TDF_DETAILS))
3950     print_scc (dump_file, scc);
3951 
3952   /* Iterate over the SCC with the optimistic table until it stops
3953      changing.  */
3954   current_info = optimistic_info;
3955   while (changed)
3956     {
3957       changed = false;
3958       iterations++;
3959       if (dump_file && (dump_flags & TDF_DETAILS))
3960 	fprintf (dump_file, "Starting iteration %d\n", iterations);
3961       /* As we are value-numbering optimistically we have to
3962 	 clear the expression tables and the simplified expressions
3963 	 in each iteration until we converge.  */
3964       optimistic_info->nary->empty ();
3965       optimistic_info->phis->empty ();
3966       optimistic_info->references->empty ();
3967       obstack_free (&optimistic_info->nary_obstack, NULL);
3968       gcc_obstack_init (&optimistic_info->nary_obstack);
3969       empty_alloc_pool (optimistic_info->phis_pool);
3970       empty_alloc_pool (optimistic_info->references_pool);
3971       FOR_EACH_VEC_ELT (scc, i, var)
3972 	VN_INFO (var)->expr = NULL_TREE;
3973       FOR_EACH_VEC_ELT (scc, i, var)
3974 	changed |= visit_use (var);
3975     }
3976 
3977   if (dump_file && (dump_flags & TDF_DETAILS))
3978     fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3979   statistics_histogram_event (cfun, "SCC iterations", iterations);
3980 
3981   /* Finally, copy the contents of the no longer used optimistic
3982      table to the valid table.  */
3983   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3984     copy_nary (nary, valid_info);
3985   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3986     copy_phi (phi, valid_info);
3987   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3988 			       ref, vn_reference_t, hir)
3989     copy_reference (ref, valid_info);
3990 
3991   current_info = valid_info;
3992 }
3993 
3994 
3995 /* Pop the components of the found SCC for NAME off the SCC stack
3996    and process them.  Returns true if all went well, false if
3997    we run into resource limits.  */
3998 
3999 static bool
4000 extract_and_process_scc_for_name (tree name)
4001 {
4002   auto_vec<tree> scc;
4003   tree x;
4004 
4005   /* Found an SCC, pop the components off the SCC stack and
4006      process them.  */
4007   do
4008     {
4009       x = sccstack.pop ();
4010 
4011       VN_INFO (x)->on_sccstack = false;
4012       scc.safe_push (x);
4013     } while (x != name);
4014 
4015   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
4016   if (scc.length ()
4017       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4018     {
4019       if (dump_file)
4020 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4021 		 "SCC size %u exceeding %u\n", scc.length (),
4022 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4023 
4024       return false;
4025     }
4026 
4027   if (scc.length () > 1)
4028     sort_scc (scc);
4029 
4030   process_scc (scc);
4031 
4032   return true;
4033 }
4034 
4035 /* Depth first search on NAME to discover and process SCC's in the SSA
4036    graph.
4037    Execution of this algorithm relies on the fact that the SCC's are
4038    popped off the stack in topological order.
4039    Returns true if successful, false if we stopped processing SCC's due
4040    to resource constraints.  */
4041 
4042 static bool
4043 DFS (tree name)
4044 {
4045   vec<ssa_op_iter> itervec = vNULL;
4046   vec<tree> namevec = vNULL;
4047   use_operand_p usep = NULL;
4048   gimple defstmt;
4049   tree use;
4050   ssa_op_iter iter;
4051 
4052 start_over:
4053   /* SCC info */
4054   VN_INFO (name)->dfsnum = next_dfs_num++;
4055   VN_INFO (name)->visited = true;
4056   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4057 
4058   sccstack.safe_push (name);
4059   VN_INFO (name)->on_sccstack = true;
4060   defstmt = SSA_NAME_DEF_STMT (name);
4061 
4062   /* Recursively DFS on our operands, looking for SCC's.  */
4063   if (!gimple_nop_p (defstmt))
4064     {
4065       /* Push a new iterator.  */
4066       if (gphi *phi = dyn_cast <gphi *> (defstmt))
4067 	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4068       else
4069 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4070     }
4071   else
4072     clear_and_done_ssa_iter (&iter);
4073 
4074   while (1)
4075     {
4076       /* If we are done processing uses of a name, go up the stack
4077 	 of iterators and process SCCs as we found them.  */
4078       if (op_iter_done (&iter))
4079 	{
4080 	  /* See if we found an SCC.  */
4081 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4082 	    if (!extract_and_process_scc_for_name (name))
4083 	      {
4084 		namevec.release ();
4085 		itervec.release ();
4086 		return false;
4087 	      }
4088 
4089 	  /* Check if we are done.  */
4090 	  if (namevec.is_empty ())
4091 	    {
4092 	      namevec.release ();
4093 	      itervec.release ();
4094 	      return true;
4095 	    }
4096 
4097 	  /* Restore the last use walker and continue walking there.  */
4098 	  use = name;
4099 	  name = namevec.pop ();
4100 	  memcpy (&iter, &itervec.last (),
4101 		  sizeof (ssa_op_iter));
4102 	  itervec.pop ();
4103 	  goto continue_walking;
4104 	}
4105 
4106       use = USE_FROM_PTR (usep);
4107 
4108       /* Since we handle phi nodes, we will sometimes get
4109 	 invariants in the use expression.  */
4110       if (TREE_CODE (use) == SSA_NAME)
4111 	{
4112 	  if (! (VN_INFO (use)->visited))
4113 	    {
4114 	      /* Recurse by pushing the current use walking state on
4115 		 the stack and starting over.  */
4116 	      itervec.safe_push (iter);
4117 	      namevec.safe_push (name);
4118 	      name = use;
4119 	      goto start_over;
4120 
4121 continue_walking:
4122 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4123 					 VN_INFO (use)->low);
4124 	    }
4125 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4126 	      && VN_INFO (use)->on_sccstack)
4127 	    {
4128 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4129 					 VN_INFO (name)->low);
4130 	    }
4131 	}
4132 
4133       usep = op_iter_next_use (&iter);
4134     }
4135 }
4136 
4137 /* Allocate a value number table.  */
4138 
4139 static void
4140 allocate_vn_table (vn_tables_t table)
4141 {
4142   table->phis = new vn_phi_table_type (23);
4143   table->nary = new vn_nary_op_table_type (23);
4144   table->references = new vn_reference_table_type (23);
4145 
4146   gcc_obstack_init (&table->nary_obstack);
4147   table->phis_pool = create_alloc_pool ("VN phis",
4148 					sizeof (struct vn_phi_s),
4149 					30);
4150   table->references_pool = create_alloc_pool ("VN references",
4151 					      sizeof (struct vn_reference_s),
4152 					      30);
4153 }
4154 
4155 /* Free a value number table.  */
4156 
4157 static void
4158 free_vn_table (vn_tables_t table)
4159 {
4160   delete table->phis;
4161   table->phis = NULL;
4162   delete table->nary;
4163   table->nary = NULL;
4164   delete table->references;
4165   table->references = NULL;
4166   obstack_free (&table->nary_obstack, NULL);
4167   free_alloc_pool (table->phis_pool);
4168   free_alloc_pool (table->references_pool);
4169 }
4170 
4171 static void
4172 init_scc_vn (void)
4173 {
4174   size_t i;
4175   int j;
4176   int *rpo_numbers_temp;
4177 
4178   calculate_dominance_info (CDI_DOMINATORS);
4179   sccstack.create (0);
4180   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4181 
4182   constant_value_ids = BITMAP_ALLOC (NULL);
4183 
4184   next_dfs_num = 1;
4185   next_value_id = 1;
4186 
4187   vn_ssa_aux_table.create (num_ssa_names + 1);
4188   /* VEC_alloc doesn't actually grow it to the right size, it just
4189      preallocates the space to do so.  */
4190   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4191   gcc_obstack_init (&vn_ssa_aux_obstack);
4192 
4193   shared_lookup_phiargs.create (0);
4194   shared_lookup_references.create (0);
4195   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4196   rpo_numbers_temp =
4197     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4198   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4199 
4200   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4201      the i'th block in RPO order is bb.  We want to map bb's to RPO
4202      numbers, so we need to rearrange this array.  */
4203   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4204     rpo_numbers[rpo_numbers_temp[j]] = j;
4205 
4206   XDELETE (rpo_numbers_temp);
4207 
4208   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4209 
4210   /* Create the VN_INFO structures, and initialize value numbers to
4211      TOP.  */
4212   for (i = 0; i < num_ssa_names; i++)
4213     {
4214       tree name = ssa_name (i);
4215       if (name)
4216 	{
4217 	  VN_INFO_GET (name)->valnum = VN_TOP;
4218 	  VN_INFO (name)->expr = NULL_TREE;
4219 	  VN_INFO (name)->value_id = 0;
4220 	}
4221     }
4222 
4223   renumber_gimple_stmt_uids ();
4224 
4225   /* Create the valid and optimistic value numbering tables.  */
4226   valid_info = XCNEW (struct vn_tables_s);
4227   allocate_vn_table (valid_info);
4228   optimistic_info = XCNEW (struct vn_tables_s);
4229   allocate_vn_table (optimistic_info);
4230 }
4231 
4232 void
4233 free_scc_vn (void)
4234 {
4235   size_t i;
4236 
4237   delete constant_to_value_id;
4238   constant_to_value_id = NULL;
4239   BITMAP_FREE (constant_value_ids);
4240   shared_lookup_phiargs.release ();
4241   shared_lookup_references.release ();
4242   XDELETEVEC (rpo_numbers);
4243 
4244   for (i = 0; i < num_ssa_names; i++)
4245     {
4246       tree name = ssa_name (i);
4247       if (name
4248 	  && VN_INFO (name)->needs_insertion)
4249 	release_ssa_name (name);
4250     }
4251   obstack_free (&vn_ssa_aux_obstack, NULL);
4252   vn_ssa_aux_table.release ();
4253 
4254   sccstack.release ();
4255   free_vn_table (valid_info);
4256   XDELETE (valid_info);
4257   free_vn_table (optimistic_info);
4258   XDELETE (optimistic_info);
4259 }
4260 
4261 /* Set *ID according to RESULT.  */
4262 
4263 static void
4264 set_value_id_for_result (tree result, unsigned int *id)
4265 {
4266   if (result && TREE_CODE (result) == SSA_NAME)
4267     *id = VN_INFO (result)->value_id;
4268   else if (result && is_gimple_min_invariant (result))
4269     *id = get_or_alloc_constant_value_id (result);
4270   else
4271     *id = get_next_value_id ();
4272 }
4273 
4274 /* Set the value ids in the valid hash tables.  */
4275 
4276 static void
4277 set_hashtable_value_ids (void)
4278 {
4279   vn_nary_op_iterator_type hin;
4280   vn_phi_iterator_type hip;
4281   vn_reference_iterator_type hir;
4282   vn_nary_op_t vno;
4283   vn_reference_t vr;
4284   vn_phi_t vp;
4285 
4286   /* Now set the value ids of the things we had put in the hash
4287      table.  */
4288 
4289   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4290     set_value_id_for_result (vno->result, &vno->value_id);
4291 
4292   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4293     set_value_id_for_result (vp->result, &vp->value_id);
4294 
4295   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4296 			       hir)
4297     set_value_id_for_result (vr->result, &vr->value_id);
4298 }
4299 
4300 class cond_dom_walker : public dom_walker
4301 {
4302 public:
4303   cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4304 
4305   virtual void before_dom_children (basic_block);
4306 
4307   bool fail;
4308 };
4309 
4310 void
4311 cond_dom_walker::before_dom_children (basic_block bb)
4312 {
4313   edge e;
4314   edge_iterator ei;
4315 
4316   if (fail)
4317     return;
4318 
4319   /* If any of the predecessor edges that do not come from blocks dominated
4320      by us are still marked as possibly executable consider this block
4321      reachable.  */
4322   bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4323   FOR_EACH_EDGE (e, ei, bb->preds)
4324     if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4325       reachable |= (e->flags & EDGE_EXECUTABLE);
4326 
4327   /* If the block is not reachable all outgoing edges are not
4328      executable.  */
4329   if (!reachable)
4330     {
4331       if (dump_file && (dump_flags & TDF_DETAILS))
4332 	fprintf (dump_file, "Marking all outgoing edges of unreachable "
4333 		 "BB %d as not executable\n", bb->index);
4334 
4335       FOR_EACH_EDGE (e, ei, bb->succs)
4336 	e->flags &= ~EDGE_EXECUTABLE;
4337       return;
4338     }
4339 
4340   gimple stmt = last_stmt (bb);
4341   if (!stmt)
4342     return;
4343 
4344   enum gimple_code code = gimple_code (stmt);
4345   if (code != GIMPLE_COND
4346       && code != GIMPLE_SWITCH
4347       && code != GIMPLE_GOTO)
4348     return;
4349 
4350   if (dump_file && (dump_flags & TDF_DETAILS))
4351     {
4352       fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4353 	       bb->index);
4354       print_gimple_stmt (dump_file, stmt, 0, 0);
4355     }
4356 
4357   /* Value-number the last stmts SSA uses.  */
4358   ssa_op_iter i;
4359   tree op;
4360   FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4361     if (VN_INFO (op)->visited == false
4362 	&& !DFS (op))
4363       {
4364 	fail = true;
4365 	return;
4366       }
4367 
4368   /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4369      if value-numbering can prove they are not reachable.  Handling
4370      computed gotos is also possible.  */
4371   tree val;
4372   switch (code)
4373     {
4374     case GIMPLE_COND:
4375       {
4376 	tree lhs = gimple_cond_lhs (stmt);
4377 	tree rhs = gimple_cond_rhs (stmt);
4378 	/* Work hard in computing the condition and take into account
4379 	   the valueization of the defining stmt.  */
4380 	if (TREE_CODE (lhs) == SSA_NAME)
4381 	  lhs = vn_get_expr_for (lhs);
4382 	if (TREE_CODE (rhs) == SSA_NAME)
4383 	  rhs = vn_get_expr_for (rhs);
4384 	val = fold_binary (gimple_cond_code (stmt),
4385 			   boolean_type_node, lhs, rhs);
4386 	break;
4387       }
4388     case GIMPLE_SWITCH:
4389       val = gimple_switch_index (as_a <gswitch *> (stmt));
4390       break;
4391     case GIMPLE_GOTO:
4392       val = gimple_goto_dest (stmt);
4393       break;
4394     default:
4395       gcc_unreachable ();
4396     }
4397   if (!val)
4398     return;
4399 
4400   edge taken = find_taken_edge (bb, vn_valueize (val));
4401   if (!taken)
4402     return;
4403 
4404   if (dump_file && (dump_flags & TDF_DETAILS))
4405     fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4406 	     "not executable\n", bb->index, bb->index, taken->dest->index);
4407 
4408   FOR_EACH_EDGE (e, ei, bb->succs)
4409     if (e != taken)
4410       e->flags &= ~EDGE_EXECUTABLE;
4411 }
4412 
4413 /* Do SCCVN.  Returns true if it finished, false if we bailed out
4414    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4415    how we use the alias oracle walking during the VN process.  */
4416 
4417 bool
4418 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4419 {
4420   basic_block bb;
4421   size_t i;
4422   tree param;
4423 
4424   default_vn_walk_kind = default_vn_walk_kind_;
4425 
4426   init_scc_vn ();
4427   current_info = valid_info;
4428 
4429   for (param = DECL_ARGUMENTS (current_function_decl);
4430        param;
4431        param = DECL_CHAIN (param))
4432     {
4433       tree def = ssa_default_def (cfun, param);
4434       if (def)
4435 	{
4436 	  VN_INFO (def)->visited = true;
4437 	  VN_INFO (def)->valnum = def;
4438 	}
4439     }
4440 
4441   /* Mark all edges as possibly executable.  */
4442   FOR_ALL_BB_FN (bb, cfun)
4443     {
4444       edge_iterator ei;
4445       edge e;
4446       FOR_EACH_EDGE (e, ei, bb->succs)
4447 	e->flags |= EDGE_EXECUTABLE;
4448     }
4449 
4450   /* Walk all blocks in dominator order, value-numbering the last stmts
4451      SSA uses and decide whether outgoing edges are not executable.  */
4452   cond_dom_walker walker;
4453   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4454   if (walker.fail)
4455     {
4456       free_scc_vn ();
4457       return false;
4458     }
4459 
4460   /* Value-number remaining SSA names.  */
4461   for (i = 1; i < num_ssa_names; ++i)
4462     {
4463       tree name = ssa_name (i);
4464       if (name
4465 	  && VN_INFO (name)->visited == false
4466 	  && !has_zero_uses (name))
4467 	if (!DFS (name))
4468 	  {
4469 	    free_scc_vn ();
4470 	    return false;
4471 	  }
4472     }
4473 
4474   /* Initialize the value ids.  */
4475 
4476   for (i = 1; i < num_ssa_names; ++i)
4477     {
4478       tree name = ssa_name (i);
4479       vn_ssa_aux_t info;
4480       if (!name)
4481 	continue;
4482       info = VN_INFO (name);
4483       if (info->valnum == name
4484 	  || info->valnum == VN_TOP)
4485 	info->value_id = get_next_value_id ();
4486       else if (is_gimple_min_invariant (info->valnum))
4487 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
4488     }
4489 
4490   /* Propagate.  */
4491   for (i = 1; i < num_ssa_names; ++i)
4492     {
4493       tree name = ssa_name (i);
4494       vn_ssa_aux_t info;
4495       if (!name)
4496 	continue;
4497       info = VN_INFO (name);
4498       if (TREE_CODE (info->valnum) == SSA_NAME
4499 	  && info->valnum != name
4500 	  && info->value_id != VN_INFO (info->valnum)->value_id)
4501 	info->value_id = VN_INFO (info->valnum)->value_id;
4502     }
4503 
4504   set_hashtable_value_ids ();
4505 
4506   if (dump_file && (dump_flags & TDF_DETAILS))
4507     {
4508       fprintf (dump_file, "Value numbers:\n");
4509       for (i = 0; i < num_ssa_names; i++)
4510 	{
4511 	  tree name = ssa_name (i);
4512 	  if (name
4513 	      && VN_INFO (name)->visited
4514 	      && SSA_VAL (name) != name)
4515 	    {
4516 	      print_generic_expr (dump_file, name, 0);
4517 	      fprintf (dump_file, " = ");
4518 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
4519 	      fprintf (dump_file, "\n");
4520 	    }
4521 	}
4522     }
4523 
4524   return true;
4525 }
4526 
4527 /* Return the maximum value id we have ever seen.  */
4528 
4529 unsigned int
4530 get_max_value_id (void)
4531 {
4532   return next_value_id;
4533 }
4534 
4535 /* Return the next unique value id.  */
4536 
4537 unsigned int
4538 get_next_value_id (void)
4539 {
4540   return next_value_id++;
4541 }
4542 
4543 
4544 /* Compare two expressions E1 and E2 and return true if they are equal.  */
4545 
4546 bool
4547 expressions_equal_p (tree e1, tree e2)
4548 {
4549   /* The obvious case.  */
4550   if (e1 == e2)
4551     return true;
4552 
4553   /* If only one of them is null, they cannot be equal.  */
4554   if (!e1 || !e2)
4555     return false;
4556 
4557   /* Now perform the actual comparison.  */
4558   if (TREE_CODE (e1) == TREE_CODE (e2)
4559       && operand_equal_p (e1, e2, OEP_PURE_SAME))
4560     return true;
4561 
4562   return false;
4563 }
4564 
4565 
4566 /* Return true if the nary operation NARY may trap.  This is a copy
4567    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4568 
4569 bool
4570 vn_nary_may_trap (vn_nary_op_t nary)
4571 {
4572   tree type;
4573   tree rhs2 = NULL_TREE;
4574   bool honor_nans = false;
4575   bool honor_snans = false;
4576   bool fp_operation = false;
4577   bool honor_trapv = false;
4578   bool handled, ret;
4579   unsigned i;
4580 
4581   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4582       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4583       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4584     {
4585       type = nary->type;
4586       fp_operation = FLOAT_TYPE_P (type);
4587       if (fp_operation)
4588 	{
4589 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4590 	  honor_snans = flag_signaling_nans != 0;
4591 	}
4592       else if (INTEGRAL_TYPE_P (type)
4593 	       && TYPE_OVERFLOW_TRAPS (type))
4594 	honor_trapv = true;
4595     }
4596   if (nary->length >= 2)
4597     rhs2 = nary->op[1];
4598   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4599 				       honor_trapv,
4600 				       honor_nans, honor_snans, rhs2,
4601 				       &handled);
4602   if (handled
4603       && ret)
4604     return true;
4605 
4606   for (i = 0; i < nary->length; ++i)
4607     if (tree_could_trap_p (nary->op[i]))
4608       return true;
4609 
4610   return false;
4611 }
4612