xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-sccvn.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* SCC value numbering for trees
2    Copyright (C) 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Daniel Berlin <dan@dberlin.org>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
48 
49 /* This algorithm is based on the SCC algorithm presented by Keith
50    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51    (http://citeseer.ist.psu.edu/41805.html).  In
52    straight line code, it is equivalent to a regular hash based value
53    numbering that is performed in reverse postorder.
54 
55    For code with cycles, there are two alternatives, both of which
56    require keeping the hashtables separate from the actual list of
57    value numbers for SSA names.
58 
59    1. Iterate value numbering in an RPO walk of the blocks, removing
60    all the entries from the hashtable after each iteration (but
61    keeping the SSA name->value number mapping between iterations).
62    Iterate until it does not change.
63 
64    2. Perform value numbering as part of an SCC walk on the SSA graph,
65    iterating only the cycles in the SSA graph until they do not change
66    (using a separate, optimistic hashtable for value numbering the SCC
67    operands).
68 
69    The second is not just faster in practice (because most SSA graph
70    cycles do not involve all the variables in the graph), it also has
71    some nice properties.
72 
73    One of these nice properties is that when we pop an SCC off the
74    stack, we are guaranteed to have processed all the operands coming from
75    *outside of that SCC*, so we do not need to do anything special to
76    ensure they have value numbers.
77 
78    Another nice property is that the SCC walk is done as part of a DFS
79    of the SSA graph, which makes it easy to perform combining and
80    simplifying operations at the same time.
81 
82    The code below is deliberately written in a way that makes it easy
83    to separate the SCC walk from the other work it does.
84 
85    In order to propagate constants through the code, we track which
86    expressions contain constants, and use those while folding.  In
87    theory, we could also track expressions whose value numbers are
88    replaced, in case we end up folding based on expression
89    identities.
90 
91    In order to value number memory, we assign value numbers to vuses.
92    This enables us to note that, for example, stores to the same
93    address of the same value from the same starting memory states are
94    equivalent.
95    TODO:
96 
97    1. We can iterate only the changing portions of the SCC's, but
98    I have not seen an SCC big enough for this to be a win.
99    2. If you differentiate between phi nodes for loops and phi nodes
100    for if-then-else, you can properly consider phi nodes in different
101    blocks for equivalence.
102    3. We could value number vuses in more cases, particularly, whole
103    structure copies.
104 */
105 
106 /* The set of hashtables and alloc_pool's for their items.  */
107 
108 typedef struct vn_tables_s
109 {
110   htab_t nary;
111   htab_t phis;
112   htab_t references;
113   struct obstack nary_obstack;
114   alloc_pool phis_pool;
115   alloc_pool references_pool;
116 } *vn_tables_t;
117 
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120 
121 
122 /* Valid hashtables storing information we have proven to be
123    correct.  */
124 
125 static vn_tables_t valid_info;
126 
127 /* Optimistic hashtables storing information we are making assumptions about
128    during iterations.  */
129 
130 static vn_tables_t optimistic_info;
131 
132 /* Pointer to the set of hashtables that is currently being used.
133    Should always point to either the optimistic_info, or the
134    valid_info.  */
135 
136 static vn_tables_t current_info;
137 
138 
139 /* Reverse post order index for each basic block.  */
140 
141 static int *rpo_numbers;
142 
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 
145 /* This represents the top of the VN lattice, which is the universal
146    value.  */
147 
148 tree VN_TOP;
149 
150 /* Unique counter for our value ids.  */
151 
152 static unsigned int next_value_id;
153 
154 /* Next DFS number and the stack for strongly connected component
155    detection. */
156 
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159 
160 static bool may_insert;
161 
162 
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
165 
166 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
167    are allocated on an obstack for locality reasons, and to free them
168    without looping over the VEC.  */
169 
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
172 
173 /* Return the value numbering information for a given SSA name.  */
174 
175 vn_ssa_aux_t
176 VN_INFO (tree name)
177 {
178   vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 				SSA_NAME_VERSION (name));
180   gcc_assert (res);
181   return res;
182 }
183 
184 /* Set the value numbering info for a given SSA name to a given
185    value.  */
186 
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
189 {
190   VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 	       SSA_NAME_VERSION (name), value);
192 }
193 
194 /* Initialize the value numbering info for a given SSA name.
195    This should be called just once for every SSA name.  */
196 
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
199 {
200   vn_ssa_aux_t newinfo;
201 
202   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204   if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205     VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 		   SSA_NAME_VERSION (name) + 1);
207   VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 	       SSA_NAME_VERSION (name), newinfo);
209   return newinfo;
210 }
211 
212 
213 /* Get the representative expression for the SSA_NAME NAME.  Returns
214    the representative SSA_NAME if there is no expression associated with it.  */
215 
216 tree
217 vn_get_expr_for (tree name)
218 {
219   vn_ssa_aux_t vn = VN_INFO (name);
220   gimple def_stmt;
221   tree expr = NULL_TREE;
222 
223   if (vn->valnum == VN_TOP)
224     return name;
225 
226   /* If the value-number is a constant it is the representative
227      expression.  */
228   if (TREE_CODE (vn->valnum) != SSA_NAME)
229     return vn->valnum;
230 
231   /* Get to the information of the value of this SSA_NAME.  */
232   vn = VN_INFO (vn->valnum);
233 
234   /* If the value-number is a constant it is the representative
235      expression.  */
236   if (TREE_CODE (vn->valnum) != SSA_NAME)
237     return vn->valnum;
238 
239   /* Else if we have an expression, return it.  */
240   if (vn->expr != NULL_TREE)
241     return vn->expr;
242 
243   /* Otherwise use the defining statement to build the expression.  */
244   def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245 
246   /* If the value number is a default-definition or a PHI result
247      use it directly.  */
248   if (gimple_nop_p (def_stmt)
249       || gimple_code (def_stmt) == GIMPLE_PHI)
250     return vn->valnum;
251 
252   if (!is_gimple_assign (def_stmt))
253     return vn->valnum;
254 
255   /* FIXME tuples.  This is incomplete and likely will miss some
256      simplifications.  */
257   switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
258     {
259     case tcc_reference:
260       if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 	   || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 	   || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 	  && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 	expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 			    gimple_expr_type (def_stmt),
266 			    TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267       break;
268 
269     case tcc_unary:
270       expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 			  gimple_expr_type (def_stmt),
272 			  gimple_assign_rhs1 (def_stmt));
273       break;
274 
275     case tcc_binary:
276       expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 			  gimple_expr_type (def_stmt),
278 			  gimple_assign_rhs1 (def_stmt),
279 			  gimple_assign_rhs2 (def_stmt));
280       break;
281 
282     default:;
283     }
284   if (expr == NULL_TREE)
285     return vn->valnum;
286 
287   /* Cache the expression.  */
288   vn->expr = expr;
289 
290   return expr;
291 }
292 
293 
294 /* Free a phi operation structure VP.  */
295 
296 static void
297 free_phi (void *vp)
298 {
299   vn_phi_t phi = (vn_phi_t) vp;
300   VEC_free (tree, heap, phi->phiargs);
301 }
302 
303 /* Free a reference operation structure VP.  */
304 
305 static void
306 free_reference (void *vp)
307 {
308   vn_reference_t vr = (vn_reference_t) vp;
309   VEC_free (vn_reference_op_s, heap, vr->operands);
310 }
311 
312 /* Hash table equality function for vn_constant_t.  */
313 
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
316 {
317   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318   const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319 
320   if (vc1->hashcode != vc2->hashcode)
321     return false;
322 
323   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 }
325 
326 /* Hash table hash function for vn_constant_t.  */
327 
328 static hashval_t
329 vn_constant_hash (const void *p1)
330 {
331   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332   return vc1->hashcode;
333 }
334 
335 /* Lookup a value id for CONSTANT and return it.  If it does not
336    exist returns 0.  */
337 
338 unsigned int
339 get_constant_value_id (tree constant)
340 {
341   void **slot;
342   struct vn_constant_s vc;
343 
344   vc.hashcode = vn_hash_constant_with_type (constant);
345   vc.constant = constant;
346   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 				   vc.hashcode, NO_INSERT);
348   if (slot)
349     return ((vn_constant_t)*slot)->value_id;
350   return 0;
351 }
352 
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354    new one and return it.  If it does exist, return it.  */
355 
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
358 {
359   void **slot;
360   struct vn_constant_s vc;
361   vn_constant_t vcp;
362 
363   vc.hashcode = vn_hash_constant_with_type (constant);
364   vc.constant = constant;
365   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
366 				   vc.hashcode, INSERT);
367   if (*slot)
368     return ((vn_constant_t)*slot)->value_id;
369 
370   vcp = XNEW (struct vn_constant_s);
371   vcp->hashcode = vc.hashcode;
372   vcp->constant = constant;
373   vcp->value_id = get_next_value_id ();
374   *slot = (void *) vcp;
375   bitmap_set_bit (constant_value_ids, vcp->value_id);
376   return vcp->value_id;
377 }
378 
379 /* Return true if V is a value id for a constant.  */
380 
381 bool
382 value_id_constant_p (unsigned int v)
383 {
384   return bitmap_bit_p (constant_value_ids, v);
385 }
386 
387 /* Compare two reference operands P1 and P2 for equality.  Return true if
388    they are equal, and false otherwise.  */
389 
390 static int
391 vn_reference_op_eq (const void *p1, const void *p2)
392 {
393   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
394   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
395 
396   return vro1->opcode == vro2->opcode
397     && types_compatible_p (vro1->type, vro2->type)
398     && expressions_equal_p (vro1->op0, vro2->op0)
399     && expressions_equal_p (vro1->op1, vro2->op1)
400     && expressions_equal_p (vro1->op2, vro2->op2);
401 }
402 
403 /* Compute the hash for a reference operand VRO1.  */
404 
405 static hashval_t
406 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
407 {
408   result = iterative_hash_hashval_t (vro1->opcode, result);
409   if (vro1->op0)
410     result = iterative_hash_expr (vro1->op0, result);
411   if (vro1->op1)
412     result = iterative_hash_expr (vro1->op1, result);
413   if (vro1->op2)
414     result = iterative_hash_expr (vro1->op2, result);
415   return result;
416 }
417 
418 /* Return the hashcode for a given reference operation P1.  */
419 
420 static hashval_t
421 vn_reference_hash (const void *p1)
422 {
423   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
424   return vr1->hashcode;
425 }
426 
427 /* Compute a hash for the reference operation VR1 and return it.  */
428 
429 hashval_t
430 vn_reference_compute_hash (const vn_reference_t vr1)
431 {
432   hashval_t result = 0;
433   int i;
434   vn_reference_op_t vro;
435 
436   for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437     result = vn_reference_op_compute_hash (vro, result);
438   if (vr1->vuse)
439     result += SSA_NAME_VERSION (vr1->vuse);
440 
441   return result;
442 }
443 
444 /* Return true if reference operations P1 and P2 are equivalent.  This
445    means they have the same set of operands and vuses.  */
446 
447 int
448 vn_reference_eq (const void *p1, const void *p2)
449 {
450   int i;
451   vn_reference_op_t vro;
452 
453   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
454   const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
455   if (vr1->hashcode != vr2->hashcode)
456     return false;
457 
458   /* Early out if this is not a hash collision.  */
459   if (vr1->hashcode != vr2->hashcode)
460     return false;
461 
462   /* The VOP needs to be the same.  */
463   if (vr1->vuse != vr2->vuse)
464     return false;
465 
466   /* If the operands are the same we are done.  */
467   if (vr1->operands == vr2->operands)
468     return true;
469 
470   /* We require that address operands be canonicalized in a way that
471      two memory references will have the same operands if they are
472      equivalent.  */
473   if (VEC_length (vn_reference_op_s, vr1->operands)
474       != VEC_length (vn_reference_op_s, vr2->operands))
475     return false;
476 
477   for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
478     if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
479 			     vro))
480       return false;
481 
482   return true;
483 }
484 
485 /* Copy the operations present in load/store REF into RESULT, a vector of
486    vn_reference_op_s's.  */
487 
488 void
489 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
490 {
491   if (TREE_CODE (ref) == TARGET_MEM_REF)
492     {
493       vn_reference_op_s temp;
494       tree base;
495 
496       base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
497       if (!base)
498 	base = build_int_cst (ptr_type_node, 0);
499 
500       memset (&temp, 0, sizeof (temp));
501       /* We do not care for spurious type qualifications.  */
502       temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
503       temp.opcode = TREE_CODE (ref);
504       temp.op0 = TMR_INDEX (ref);
505       temp.op1 = TMR_STEP (ref);
506       temp.op2 = TMR_OFFSET (ref);
507       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
508 
509       memset (&temp, 0, sizeof (temp));
510       temp.type = NULL_TREE;
511       temp.opcode = TREE_CODE (base);
512       temp.op0 = base;
513       temp.op1 = TMR_ORIGINAL (ref);
514       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
515       return;
516     }
517 
518   /* For non-calls, store the information that makes up the address.  */
519 
520   while (ref)
521     {
522       vn_reference_op_s temp;
523 
524       memset (&temp, 0, sizeof (temp));
525       /* We do not care for spurious type qualifications.  */
526       temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
527       temp.opcode = TREE_CODE (ref);
528 
529       switch (temp.opcode)
530 	{
531 	case ALIGN_INDIRECT_REF:
532 	case INDIRECT_REF:
533 	  /* The only operand is the address, which gets its own
534 	     vn_reference_op_s structure.  */
535 	  break;
536 	case MISALIGNED_INDIRECT_REF:
537 	  temp.op0 = TREE_OPERAND (ref, 1);
538 	  break;
539 	case BIT_FIELD_REF:
540 	  /* Record bits and position.  */
541 	  temp.op0 = TREE_OPERAND (ref, 1);
542 	  temp.op1 = TREE_OPERAND (ref, 2);
543 	  break;
544 	case COMPONENT_REF:
545 	  /* The field decl is enough to unambiguously specify the field,
546 	     a matching type is not necessary and a mismatching type
547 	     is always a spurious difference.  */
548 	  temp.type = NULL_TREE;
549 	  temp.op0 = TREE_OPERAND (ref, 1);
550 	  temp.op1 = TREE_OPERAND (ref, 2);
551 	  /* If this is a reference to a union member, record the union
552 	     member size as operand.  Do so only if we are doing
553 	     expression insertion (during FRE), as PRE currently gets
554 	     confused with this.  */
555 	  if (may_insert
556 	      && temp.op1 == NULL_TREE
557 	      && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
558 	      && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
559 	      && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
560 	      && host_integerp (DECL_SIZE (temp.op0), 0))
561 	    temp.op0 = DECL_SIZE (temp.op0);
562 	  break;
563 	case ARRAY_RANGE_REF:
564 	case ARRAY_REF:
565 	  /* Record index as operand.  */
566 	  temp.op0 = TREE_OPERAND (ref, 1);
567 	  /* Always record lower bounds and element size.  */
568 	  temp.op1 = array_ref_low_bound (ref);
569 	  temp.op2 = array_ref_element_size (ref);
570 	  break;
571 	case STRING_CST:
572 	case INTEGER_CST:
573 	case COMPLEX_CST:
574 	case VECTOR_CST:
575 	case REAL_CST:
576 	case CONSTRUCTOR:
577 	case VAR_DECL:
578 	case PARM_DECL:
579 	case CONST_DECL:
580 	case RESULT_DECL:
581 	case SSA_NAME:
582 	  temp.op0 = ref;
583 	  break;
584 	case ADDR_EXPR:
585 	  if (is_gimple_min_invariant (ref))
586 	    {
587 	      temp.op0 = ref;
588 	      break;
589 	    }
590 	  /* Fallthrough.  */
591 	  /* These are only interesting for their operands, their
592 	     existence, and their type.  They will never be the last
593 	     ref in the chain of references (IE they require an
594 	     operand), so we don't have to put anything
595 	     for op* as it will be handled by the iteration  */
596 	case IMAGPART_EXPR:
597 	case REALPART_EXPR:
598 	case VIEW_CONVERT_EXPR:
599 	  break;
600 	default:
601 	  gcc_unreachable ();
602 	}
603       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
604 
605       if (REFERENCE_CLASS_P (ref)
606 	  || (TREE_CODE (ref) == ADDR_EXPR
607 	      && !is_gimple_min_invariant (ref)))
608 	ref = TREE_OPERAND (ref, 0);
609       else
610 	ref = NULL_TREE;
611     }
612 }
613 
614 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
615    operands in *OPS, the reference alias set SET and the reference type TYPE.
616    Return true if something useful was produced.  */
617 
618 bool
619 ao_ref_init_from_vn_reference (ao_ref *ref,
620 			       alias_set_type set, tree type,
621 			       VEC (vn_reference_op_s, heap) *ops)
622 {
623   vn_reference_op_t op;
624   unsigned i;
625   tree base = NULL_TREE;
626   tree *op0_p = &base;
627   HOST_WIDE_INT offset = 0;
628   HOST_WIDE_INT max_size;
629   HOST_WIDE_INT size = -1;
630   tree size_tree = NULL_TREE;
631 
632   /* First get the final access size from just the outermost expression.  */
633   op = VEC_index (vn_reference_op_s, ops, 0);
634   if (op->opcode == COMPONENT_REF)
635     {
636       if (TREE_CODE (op->op0) == INTEGER_CST)
637 	size_tree = op->op0;
638       else
639 	size_tree = DECL_SIZE (op->op0);
640     }
641   else if (op->opcode == BIT_FIELD_REF)
642     size_tree = op->op0;
643   else
644     {
645       enum machine_mode mode = TYPE_MODE (type);
646       if (mode == BLKmode)
647 	size_tree = TYPE_SIZE (type);
648       else
649         size = GET_MODE_BITSIZE (mode);
650     }
651   if (size_tree != NULL_TREE)
652     {
653       if (!host_integerp (size_tree, 1))
654 	size = -1;
655       else
656 	size = TREE_INT_CST_LOW (size_tree);
657     }
658 
659   /* Initially, maxsize is the same as the accessed element size.
660      In the following it will only grow (or become -1).  */
661   max_size = size;
662 
663   /* Compute cumulative bit-offset for nested component-refs and array-refs,
664      and find the ultimate containing object.  */
665   for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
666     {
667       switch (op->opcode)
668 	{
669 	/* These may be in the reference ops, but we cannot do anything
670 	   sensible with them here.  */
671 	case CALL_EXPR:
672 	case ADDR_EXPR:
673 	  return false;
674 
675 	/* Record the base objects.  */
676 	case ALIGN_INDIRECT_REF:
677 	case INDIRECT_REF:
678 	  *op0_p = build1 (op->opcode, op->type, NULL_TREE);
679 	  op0_p = &TREE_OPERAND (*op0_p, 0);
680 	  break;
681 
682 	case MISALIGNED_INDIRECT_REF:
683 	  *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
684 			   NULL_TREE, op->op0);
685 	  op0_p = &TREE_OPERAND (*op0_p, 0);
686 	  break;
687 
688 	case VAR_DECL:
689 	case PARM_DECL:
690 	case RESULT_DECL:
691 	case SSA_NAME:
692 	  *op0_p = op->op0;
693 	  break;
694 
695 	/* And now the usual component-reference style ops.  */
696 	case BIT_FIELD_REF:
697 	  offset += tree_low_cst (op->op1, 0);
698 	  break;
699 
700 	case COMPONENT_REF:
701 	  {
702 	    tree field = op->op0;
703 	    /* We do not have a complete COMPONENT_REF tree here so we
704 	       cannot use component_ref_field_offset.  Do the interesting
705 	       parts manually.  */
706 
707 	    /* Our union trick, done for offset zero only.  */
708 	    if (TREE_CODE (field) == INTEGER_CST)
709 	      ;
710 	    else if (op->op1
711 		     || !host_integerp (DECL_FIELD_OFFSET (field), 1))
712 	      max_size = -1;
713 	    else
714 	      {
715 		offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
716 			   * BITS_PER_UNIT);
717 		offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
718 	      }
719 	    break;
720 	  }
721 
722 	case ARRAY_RANGE_REF:
723 	case ARRAY_REF:
724 	  /* We recorded the lower bound and the element size.  */
725 	  if (!host_integerp (op->op0, 0)
726 	      || !host_integerp (op->op1, 0)
727 	      || !host_integerp (op->op2, 0))
728 	    max_size = -1;
729 	  else
730 	    {
731 	      HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
732 	      hindex -= TREE_INT_CST_LOW (op->op1);
733 	      hindex *= TREE_INT_CST_LOW (op->op2);
734 	      hindex *= BITS_PER_UNIT;
735 	      offset += hindex;
736 	    }
737 	  break;
738 
739 	case REALPART_EXPR:
740 	  break;
741 
742 	case IMAGPART_EXPR:
743 	  offset += size;
744 	  break;
745 
746 	case VIEW_CONVERT_EXPR:
747 	  break;
748 
749 	case STRING_CST:
750 	case INTEGER_CST:
751 	case COMPLEX_CST:
752 	case VECTOR_CST:
753 	case REAL_CST:
754 	case CONSTRUCTOR:
755 	case CONST_DECL:
756 	  return false;
757 
758 	default:
759 	  return false;
760 	}
761     }
762 
763   if (base == NULL_TREE)
764     return false;
765 
766   ref->ref = NULL_TREE;
767   ref->base = base;
768   ref->offset = offset;
769   ref->size = size;
770   ref->max_size = max_size;
771   ref->ref_alias_set = set;
772   ref->base_alias_set = -1;
773 
774   return true;
775 }
776 
777 /* Copy the operations present in load/store/call REF into RESULT, a vector of
778    vn_reference_op_s's.  */
779 
780 void
781 copy_reference_ops_from_call (gimple call,
782 			      VEC(vn_reference_op_s, heap) **result)
783 {
784   vn_reference_op_s temp;
785   unsigned i;
786 
787   /* Copy the type, opcode, function being called and static chain.  */
788   memset (&temp, 0, sizeof (temp));
789   temp.type = gimple_call_return_type (call);
790   temp.opcode = CALL_EXPR;
791   temp.op0 = gimple_call_fn (call);
792   temp.op1 = gimple_call_chain (call);
793   VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
794 
795   /* Copy the call arguments.  As they can be references as well,
796      just chain them together.  */
797   for (i = 0; i < gimple_call_num_args (call); ++i)
798     {
799       tree callarg = gimple_call_arg (call, i);
800       copy_reference_ops_from_ref (callarg, result);
801     }
802 }
803 
804 /* Create a vector of vn_reference_op_s structures from REF, a
805    REFERENCE_CLASS_P tree.  The vector is not shared. */
806 
807 static VEC(vn_reference_op_s, heap) *
808 create_reference_ops_from_ref (tree ref)
809 {
810   VEC (vn_reference_op_s, heap) *result = NULL;
811 
812   copy_reference_ops_from_ref (ref, &result);
813   return result;
814 }
815 
816 /* Create a vector of vn_reference_op_s structures from CALL, a
817    call statement.  The vector is not shared.  */
818 
819 static VEC(vn_reference_op_s, heap) *
820 create_reference_ops_from_call (gimple call)
821 {
822   VEC (vn_reference_op_s, heap) *result = NULL;
823 
824   copy_reference_ops_from_call (call, &result);
825   return result;
826 }
827 
828 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
829    *I_P to point to the last element of the replacement.  */
830 void
831 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
832 			    unsigned int *i_p)
833 {
834   VEC(vn_reference_op_s, heap) *mem = NULL;
835   vn_reference_op_t op;
836   unsigned int i = *i_p;
837   unsigned int j;
838 
839   /* Get ops for the addressed object.  */
840   op = VEC_index (vn_reference_op_s, *ops, i);
841   /* ???  If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
842      around it to avoid later ICEs.  */
843   if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
844       && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
845     {
846       vn_reference_op_s aref;
847       tree dom;
848       aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
849       aref.opcode = ARRAY_REF;
850       aref.op0 = integer_zero_node;
851       if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
852 	  && TYPE_MIN_VALUE (dom))
853 	aref.op0 = TYPE_MIN_VALUE (dom);
854       aref.op1 = aref.op0;
855       aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
856       VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
857     }
858   copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
859 
860   /* Do the replacement - we should have at least one op in mem now.  */
861   if (VEC_length (vn_reference_op_s, mem) == 1)
862     {
863       VEC_replace (vn_reference_op_s, *ops, i - 1,
864 		   VEC_index (vn_reference_op_s, mem, 0));
865       VEC_ordered_remove (vn_reference_op_s, *ops, i);
866       i--;
867     }
868   else if (VEC_length (vn_reference_op_s, mem) == 2)
869     {
870       VEC_replace (vn_reference_op_s, *ops, i - 1,
871 		   VEC_index (vn_reference_op_s, mem, 0));
872       VEC_replace (vn_reference_op_s, *ops, i,
873 		   VEC_index (vn_reference_op_s, mem, 1));
874     }
875   else if (VEC_length (vn_reference_op_s, mem) > 2)
876     {
877       VEC_replace (vn_reference_op_s, *ops, i - 1,
878 		   VEC_index (vn_reference_op_s, mem, 0));
879       VEC_replace (vn_reference_op_s, *ops, i,
880 		   VEC_index (vn_reference_op_s, mem, 1));
881       /* ???  There is no VEC_splice.  */
882       for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
883 	VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
884     }
885   else
886     gcc_unreachable ();
887 
888   VEC_free (vn_reference_op_s, heap, mem);
889   *i_p = i;
890 }
891 
892 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
893    structures into their value numbers.  This is done in-place, and
894    the vector passed in is returned.  */
895 
896 static VEC (vn_reference_op_s, heap) *
897 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
898 {
899   vn_reference_op_t vro;
900   unsigned int i;
901 
902   for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
903     {
904       if (vro->opcode == SSA_NAME
905 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
906 	{
907 	  vro->op0 = SSA_VAL (vro->op0);
908 	  /* If it transforms from an SSA_NAME to a constant, update
909 	     the opcode.  */
910 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
911 	    vro->opcode = TREE_CODE (vro->op0);
912 	  /* If it transforms from an SSA_NAME to an address, fold with
913 	     a preceding indirect reference.  */
914 	  if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
915 	      && VEC_index (vn_reference_op_s,
916 			    orig, i - 1)->opcode == INDIRECT_REF)
917 	    {
918 	      vn_reference_fold_indirect (&orig, &i);
919 	      continue;
920 	    }
921 	}
922       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
923 	vro->op1 = SSA_VAL (vro->op1);
924       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
925 	vro->op2 = SSA_VAL (vro->op2);
926     }
927 
928   return orig;
929 }
930 
931 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
932 
933 /* Create a vector of vn_reference_op_s structures from REF, a
934    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
935    this function.  */
936 
937 static VEC(vn_reference_op_s, heap) *
938 valueize_shared_reference_ops_from_ref (tree ref)
939 {
940   if (!ref)
941     return NULL;
942   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
943   copy_reference_ops_from_ref (ref, &shared_lookup_references);
944   shared_lookup_references = valueize_refs (shared_lookup_references);
945   return shared_lookup_references;
946 }
947 
948 /* Create a vector of vn_reference_op_s structures from CALL, a
949    call statement.  The vector is shared among all callers of
950    this function.  */
951 
952 static VEC(vn_reference_op_s, heap) *
953 valueize_shared_reference_ops_from_call (gimple call)
954 {
955   if (!call)
956     return NULL;
957   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
958   copy_reference_ops_from_call (call, &shared_lookup_references);
959   shared_lookup_references = valueize_refs (shared_lookup_references);
960   return shared_lookup_references;
961 }
962 
963 /* Lookup a SCCVN reference operation VR in the current hash table.
964    Returns the resulting value number if it exists in the hash table,
965    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
966    vn_reference_t stored in the hashtable if something is found.  */
967 
968 static tree
969 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
970 {
971   void **slot;
972   hashval_t hash;
973 
974   hash = vr->hashcode;
975   slot = htab_find_slot_with_hash (current_info->references, vr,
976 				   hash, NO_INSERT);
977   if (!slot && current_info == optimistic_info)
978     slot = htab_find_slot_with_hash (valid_info->references, vr,
979 				     hash, NO_INSERT);
980   if (slot)
981     {
982       if (vnresult)
983 	*vnresult = (vn_reference_t)*slot;
984       return ((vn_reference_t)*slot)->result;
985     }
986 
987   return NULL_TREE;
988 }
989 
990 static tree *last_vuse_ptr;
991 static vn_lookup_kind vn_walk_kind;
992 static vn_lookup_kind default_vn_walk_kind;
993 
994 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
995    with the current VUSE and performs the expression lookup.  */
996 
997 static void *
998 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
999 {
1000   vn_reference_t vr = (vn_reference_t)vr_;
1001   void **slot;
1002   hashval_t hash;
1003 
1004   if (last_vuse_ptr)
1005     *last_vuse_ptr = vuse;
1006 
1007   /* Fixup vuse and hash.  */
1008   if (vr->vuse)
1009     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1010   vr->vuse = SSA_VAL (vuse);
1011   if (vr->vuse)
1012     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1013 
1014   hash = vr->hashcode;
1015   slot = htab_find_slot_with_hash (current_info->references, vr,
1016 				   hash, NO_INSERT);
1017   if (!slot && current_info == optimistic_info)
1018     slot = htab_find_slot_with_hash (valid_info->references, vr,
1019 				     hash, NO_INSERT);
1020   if (slot)
1021     return *slot;
1022 
1023   return NULL;
1024 }
1025 
1026 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1027    from the statement defining VUSE and if not successful tries to
1028    translate *REFP and VR_ through an aggregate copy at the defintion
1029    of VUSE.  */
1030 
1031 static void *
1032 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1033 {
1034   vn_reference_t vr = (vn_reference_t)vr_;
1035   gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1036   tree fndecl;
1037   tree base;
1038   HOST_WIDE_INT offset, maxsize;
1039 
1040   base = ao_ref_base (ref);
1041   offset = ref->offset;
1042   maxsize = ref->max_size;
1043 
1044   /* If we cannot constrain the size of the reference we cannot
1045      test if anything kills it.  */
1046   if (maxsize == -1)
1047     return (void *)-1;
1048 
1049   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1050      from that defintion.
1051      1) Memset.  */
1052   if (is_gimple_reg_type (vr->type)
1053       && is_gimple_call (def_stmt)
1054       && (fndecl = gimple_call_fndecl (def_stmt))
1055       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1056       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1057       && integer_zerop (gimple_call_arg (def_stmt, 1))
1058       && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1059       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1060     {
1061       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1062       tree base2;
1063       HOST_WIDE_INT offset2, size2, maxsize2;
1064       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1065       size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1066       if ((unsigned HOST_WIDE_INT)size2 / 8
1067 	  == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1068 	  && maxsize2 != -1
1069 	  && operand_equal_p (base, base2, 0)
1070 	  && offset2 <= offset
1071 	  && offset2 + size2 >= offset + maxsize)
1072 	{
1073 	  tree val = fold_convert (vr->type, integer_zero_node);
1074 	  unsigned int value_id = get_or_alloc_constant_value_id (val);
1075 	  return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1076 					     VEC_copy (vn_reference_op_s,
1077 						       heap, vr->operands),
1078 					     val, value_id);
1079 	}
1080     }
1081 
1082   /* 2) Assignment from an empty CONSTRUCTOR.  */
1083   else if (is_gimple_reg_type (vr->type)
1084 	   && gimple_assign_single_p (def_stmt)
1085 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1086 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1087     {
1088       tree base2;
1089       HOST_WIDE_INT offset2, size2, maxsize2;
1090       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1091 				       &offset2, &size2, &maxsize2);
1092       if (maxsize2 != -1
1093 	  && operand_equal_p (base, base2, 0)
1094 	  && offset2 <= offset
1095 	  && offset2 + size2 >= offset + maxsize)
1096 	{
1097 	  tree val = fold_convert (vr->type, integer_zero_node);
1098 	  unsigned int value_id = get_or_alloc_constant_value_id (val);
1099 	  return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1100 					     VEC_copy (vn_reference_op_s,
1101 						       heap, vr->operands),
1102 					     val, value_id);
1103 	}
1104     }
1105 
1106   /* For aggregate copies translate the reference through them if
1107      the copy kills ref.  */
1108   else if (vn_walk_kind == VN_WALKREWRITE
1109 	   && gimple_assign_single_p (def_stmt)
1110 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1111 	       || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1112 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1113     {
1114       tree base2;
1115       HOST_WIDE_INT offset2, size2, maxsize2;
1116       int i, j;
1117       VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1118       vn_reference_op_t vro;
1119       ao_ref r;
1120 
1121       /* See if the assignment kills REF.  */
1122       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1123 				       &offset2, &size2, &maxsize2);
1124       if (maxsize2 == -1
1125 	  || !operand_equal_p (base, base2, 0)
1126 	  || offset2 > offset
1127 	  || offset2 + size2 < offset + maxsize)
1128 	return (void *)-1;
1129 
1130       /* Find the common base of ref and the lhs.  */
1131       copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1132       i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1133       j = VEC_length (vn_reference_op_s, lhs) - 1;
1134       while (j >= 0 && i >= 0
1135 	     && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1136 					       vr->operands, i),
1137 				    VEC_index (vn_reference_op_s, lhs, j)))
1138 	{
1139 	  i--;
1140 	  j--;
1141 	}
1142 
1143       VEC_free (vn_reference_op_s, heap, lhs);
1144       /* i now points to the first additional op.
1145 	 ???  LHS may not be completely contained in VR, one or more
1146 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1147 	 try handling outermost VIEW_CONVERT_EXPRs.  */
1148       if (j != -1)
1149 	return (void *)-1;
1150 
1151       /* Now re-write REF to be based on the rhs of the assignment.  */
1152       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1153       /* We need to pre-pend vr->operands[0..i] to rhs.  */
1154       if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1155 	  > VEC_length (vn_reference_op_s, vr->operands))
1156 	{
1157 	  VEC (vn_reference_op_s, heap) *old = vr->operands;
1158 	  VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1159 			 i + 1 + VEC_length (vn_reference_op_s, rhs));
1160 	  if (old == shared_lookup_references
1161 	      && vr->operands != old)
1162 	    shared_lookup_references = NULL;
1163 	}
1164       else
1165 	VEC_truncate (vn_reference_op_s, vr->operands,
1166 		      i + 1 + VEC_length (vn_reference_op_s, rhs));
1167       for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1168 	VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1169       VEC_free (vn_reference_op_s, heap, rhs);
1170       vr->hashcode = vn_reference_compute_hash (vr);
1171 
1172       /* Adjust *ref from the new operands.  */
1173       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1174 	return (void *)-1;
1175       /* This can happen with bitfields.  */
1176       if (ref->size != r.size)
1177 	return (void *)-1;
1178       *ref = r;
1179 
1180       /* Do not update last seen VUSE after translating.  */
1181       last_vuse_ptr = NULL;
1182 
1183       /* Keep looking for the adjusted *REF / VR pair.  */
1184       return NULL;
1185     }
1186 
1187   /* Bail out and stop walking.  */
1188   return (void *)-1;
1189 }
1190 
1191 /* Lookup a reference operation by it's parts, in the current hash table.
1192    Returns the resulting value number if it exists in the hash table,
1193    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1194    vn_reference_t stored in the hashtable if something is found.  */
1195 
1196 tree
1197 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1198 			    VEC (vn_reference_op_s, heap) *operands,
1199 			    vn_reference_t *vnresult, vn_lookup_kind kind)
1200 {
1201   struct vn_reference_s vr1;
1202   vn_reference_t tmp;
1203 
1204   if (!vnresult)
1205     vnresult = &tmp;
1206   *vnresult = NULL;
1207 
1208   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1209   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1210   VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1211 		 VEC_length (vn_reference_op_s, operands));
1212   memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1213 	  VEC_address (vn_reference_op_s, operands),
1214 	  sizeof (vn_reference_op_s)
1215 	  * VEC_length (vn_reference_op_s, operands));
1216   vr1.operands = operands = shared_lookup_references
1217     = valueize_refs (shared_lookup_references);
1218   vr1.type = type;
1219   vr1.set = set;
1220   vr1.hashcode = vn_reference_compute_hash (&vr1);
1221   vn_reference_lookup_1 (&vr1, vnresult);
1222 
1223   if (!*vnresult
1224       && kind != VN_NOWALK
1225       && vr1.vuse)
1226     {
1227       ao_ref r;
1228       vn_walk_kind = kind;
1229       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1230 	*vnresult =
1231 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1232 						  vn_reference_lookup_2,
1233 						  vn_reference_lookup_3, &vr1);
1234       if (vr1.operands != operands)
1235 	VEC_free (vn_reference_op_s, heap, vr1.operands);
1236     }
1237 
1238   if (*vnresult)
1239      return (*vnresult)->result;
1240 
1241   return NULL_TREE;
1242 }
1243 
1244 /* Lookup OP in the current hash table, and return the resulting value
1245    number if it exists in the hash table.  Return NULL_TREE if it does
1246    not exist in the hash table or if the result field of the structure
1247    was NULL..  VNRESULT will be filled in with the vn_reference_t
1248    stored in the hashtable if one exists.  */
1249 
1250 tree
1251 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1252 		     vn_reference_t *vnresult)
1253 {
1254   VEC (vn_reference_op_s, heap) *operands;
1255   struct vn_reference_s vr1;
1256 
1257   if (vnresult)
1258     *vnresult = NULL;
1259 
1260   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1261   vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1262   vr1.type = TREE_TYPE (op);
1263   vr1.set = get_alias_set (op);
1264   vr1.hashcode = vn_reference_compute_hash (&vr1);
1265 
1266   if (kind != VN_NOWALK
1267       && vr1.vuse)
1268     {
1269       vn_reference_t wvnresult;
1270       ao_ref r;
1271       ao_ref_init (&r, op);
1272       vn_walk_kind = kind;
1273       wvnresult =
1274 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1275 						vn_reference_lookup_2,
1276 						vn_reference_lookup_3, &vr1);
1277       if (vr1.operands != operands)
1278 	VEC_free (vn_reference_op_s, heap, vr1.operands);
1279       if (wvnresult)
1280 	{
1281 	  if (vnresult)
1282 	    *vnresult = wvnresult;
1283 	  return wvnresult->result;
1284 	}
1285 
1286       return NULL_TREE;
1287     }
1288 
1289   return vn_reference_lookup_1 (&vr1, vnresult);
1290 }
1291 
1292 
1293 /* Insert OP into the current hash table with a value number of
1294    RESULT, and return the resulting reference structure we created.  */
1295 
1296 vn_reference_t
1297 vn_reference_insert (tree op, tree result, tree vuse)
1298 {
1299   void **slot;
1300   vn_reference_t vr1;
1301 
1302   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1303   if (TREE_CODE (result) == SSA_NAME)
1304     vr1->value_id = VN_INFO (result)->value_id;
1305   else
1306     vr1->value_id = get_or_alloc_constant_value_id (result);
1307   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1308   vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1309   vr1->type = TREE_TYPE (op);
1310   vr1->set = get_alias_set (op);
1311   vr1->hashcode = vn_reference_compute_hash (vr1);
1312   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1313 
1314   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1315 				   INSERT);
1316 
1317   /* Because we lookup stores using vuses, and value number failures
1318      using the vdefs (see visit_reference_op_store for how and why),
1319      it's possible that on failure we may try to insert an already
1320      inserted store.  This is not wrong, there is no ssa name for a
1321      store that we could use as a differentiator anyway.  Thus, unlike
1322      the other lookup functions, you cannot gcc_assert (!*slot)
1323      here.  */
1324 
1325   /* But free the old slot in case of a collision.  */
1326   if (*slot)
1327     free_reference (*slot);
1328 
1329   *slot = vr1;
1330   return vr1;
1331 }
1332 
1333 /* Insert a reference by it's pieces into the current hash table with
1334    a value number of RESULT.  Return the resulting reference
1335    structure we created.  */
1336 
1337 vn_reference_t
1338 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1339 			    VEC (vn_reference_op_s, heap) *operands,
1340 			    tree result, unsigned int value_id)
1341 
1342 {
1343   void **slot;
1344   vn_reference_t vr1;
1345 
1346   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1347   vr1->value_id = value_id;
1348   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1349   vr1->operands = valueize_refs (operands);
1350   vr1->type = type;
1351   vr1->set = set;
1352   vr1->hashcode = vn_reference_compute_hash (vr1);
1353   if (result && TREE_CODE (result) == SSA_NAME)
1354     result = SSA_VAL (result);
1355   vr1->result = result;
1356 
1357   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1358 				   INSERT);
1359 
1360   /* At this point we should have all the things inserted that we have
1361      seen before, and we should never try inserting something that
1362      already exists.  */
1363   gcc_assert (!*slot);
1364   if (*slot)
1365     free_reference (*slot);
1366 
1367   *slot = vr1;
1368   return vr1;
1369 }
1370 
1371 /* Compute and return the hash value for nary operation VBO1.  */
1372 
1373 hashval_t
1374 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1375 {
1376   hashval_t hash;
1377   unsigned i;
1378 
1379   for (i = 0; i < vno1->length; ++i)
1380     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1381       vno1->op[i] = SSA_VAL (vno1->op[i]);
1382 
1383   if (vno1->length == 2
1384       && commutative_tree_code (vno1->opcode)
1385       && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1386     {
1387       tree temp = vno1->op[0];
1388       vno1->op[0] = vno1->op[1];
1389       vno1->op[1] = temp;
1390     }
1391 
1392   hash = iterative_hash_hashval_t (vno1->opcode, 0);
1393   for (i = 0; i < vno1->length; ++i)
1394     hash = iterative_hash_expr (vno1->op[i], hash);
1395 
1396   return hash;
1397 }
1398 
1399 /* Return the computed hashcode for nary operation P1.  */
1400 
1401 static hashval_t
1402 vn_nary_op_hash (const void *p1)
1403 {
1404   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1405   return vno1->hashcode;
1406 }
1407 
1408 /* Compare nary operations P1 and P2 and return true if they are
1409    equivalent.  */
1410 
1411 int
1412 vn_nary_op_eq (const void *p1, const void *p2)
1413 {
1414   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1415   const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1416   unsigned i;
1417 
1418   if (vno1->hashcode != vno2->hashcode)
1419     return false;
1420 
1421   if (vno1->opcode != vno2->opcode
1422       || !types_compatible_p (vno1->type, vno2->type))
1423     return false;
1424 
1425   for (i = 0; i < vno1->length; ++i)
1426     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1427       return false;
1428 
1429   return true;
1430 }
1431 
1432 /* Lookup a n-ary operation by its pieces and return the resulting value
1433    number if it exists in the hash table.  Return NULL_TREE if it does
1434    not exist in the hash table or if the result field of the operation
1435    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1436    if it exists.  */
1437 
1438 tree
1439 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1440 			  tree type, tree op0, tree op1, tree op2,
1441 			  tree op3, vn_nary_op_t *vnresult)
1442 {
1443   void **slot;
1444   struct vn_nary_op_s vno1;
1445   if (vnresult)
1446     *vnresult = NULL;
1447   vno1.opcode = code;
1448   vno1.length = length;
1449   vno1.type = type;
1450   vno1.op[0] = op0;
1451   vno1.op[1] = op1;
1452   vno1.op[2] = op2;
1453   vno1.op[3] = op3;
1454   vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1455   slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1456 				   NO_INSERT);
1457   if (!slot && current_info == optimistic_info)
1458     slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1459 				     NO_INSERT);
1460   if (!slot)
1461     return NULL_TREE;
1462   if (vnresult)
1463     *vnresult = (vn_nary_op_t)*slot;
1464   return ((vn_nary_op_t)*slot)->result;
1465 }
1466 
1467 /* Lookup OP in the current hash table, and return the resulting value
1468    number if it exists in the hash table.  Return NULL_TREE if it does
1469    not exist in the hash table or if the result field of the operation
1470    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1471    if it exists.  */
1472 
1473 tree
1474 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1475 {
1476   void **slot;
1477   struct vn_nary_op_s vno1;
1478   unsigned i;
1479 
1480   if (vnresult)
1481     *vnresult = NULL;
1482   vno1.opcode = TREE_CODE (op);
1483   vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1484   vno1.type = TREE_TYPE (op);
1485   for (i = 0; i < vno1.length; ++i)
1486     vno1.op[i] = TREE_OPERAND (op, i);
1487   vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1488   slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1489 				   NO_INSERT);
1490   if (!slot && current_info == optimistic_info)
1491     slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1492 				     NO_INSERT);
1493   if (!slot)
1494     return NULL_TREE;
1495   if (vnresult)
1496     *vnresult = (vn_nary_op_t)*slot;
1497   return ((vn_nary_op_t)*slot)->result;
1498 }
1499 
1500 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1501    value number if it exists in the hash table.  Return NULL_TREE if
1502    it does not exist in the hash table.  VNRESULT will contain the
1503    vn_nary_op_t from the hashtable if it exists.  */
1504 
1505 tree
1506 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1507 {
1508   void **slot;
1509   struct vn_nary_op_s vno1;
1510   unsigned i;
1511 
1512   if (vnresult)
1513     *vnresult = NULL;
1514   vno1.opcode = gimple_assign_rhs_code (stmt);
1515   vno1.length = gimple_num_ops (stmt) - 1;
1516   vno1.type = gimple_expr_type (stmt);
1517   for (i = 0; i < vno1.length; ++i)
1518     vno1.op[i] = gimple_op (stmt, i + 1);
1519   if (vno1.opcode == REALPART_EXPR
1520       || vno1.opcode == IMAGPART_EXPR
1521       || vno1.opcode == VIEW_CONVERT_EXPR)
1522     vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1523   vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1524   slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1525 				   NO_INSERT);
1526   if (!slot && current_info == optimistic_info)
1527     slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1528 				     NO_INSERT);
1529   if (!slot)
1530     return NULL_TREE;
1531   if (vnresult)
1532     *vnresult = (vn_nary_op_t)*slot;
1533   return ((vn_nary_op_t)*slot)->result;
1534 }
1535 
1536 /* Insert a n-ary operation into the current hash table using it's
1537    pieces.  Return the vn_nary_op_t structure we created and put in
1538    the hashtable.  */
1539 
1540 vn_nary_op_t
1541 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1542 			  tree type, tree op0,
1543 			  tree op1, tree op2, tree op3,
1544 			  tree result,
1545 			  unsigned int value_id)
1546 {
1547   void **slot;
1548   vn_nary_op_t vno1;
1549 
1550   vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1551 				       (sizeof (struct vn_nary_op_s)
1552 					- sizeof (tree) * (4 - length)));
1553   vno1->value_id = value_id;
1554   vno1->opcode = code;
1555   vno1->length = length;
1556   vno1->type = type;
1557   if (length >= 1)
1558     vno1->op[0] = op0;
1559   if (length >= 2)
1560     vno1->op[1] = op1;
1561   if (length >= 3)
1562     vno1->op[2] = op2;
1563   if (length >= 4)
1564     vno1->op[3] = op3;
1565   vno1->result = result;
1566   vno1->hashcode = vn_nary_op_compute_hash (vno1);
1567   slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1568 				   INSERT);
1569   gcc_assert (!*slot);
1570 
1571   *slot = vno1;
1572   return vno1;
1573 
1574 }
1575 
1576 /* Insert OP into the current hash table with a value number of
1577    RESULT.  Return the vn_nary_op_t structure we created and put in
1578    the hashtable.  */
1579 
1580 vn_nary_op_t
1581 vn_nary_op_insert (tree op, tree result)
1582 {
1583   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1584   void **slot;
1585   vn_nary_op_t vno1;
1586   unsigned i;
1587 
1588   vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1589 			(sizeof (struct vn_nary_op_s)
1590 			 - sizeof (tree) * (4 - length)));
1591   vno1->value_id = VN_INFO (result)->value_id;
1592   vno1->opcode = TREE_CODE (op);
1593   vno1->length = length;
1594   vno1->type = TREE_TYPE (op);
1595   for (i = 0; i < vno1->length; ++i)
1596     vno1->op[i] = TREE_OPERAND (op, i);
1597   vno1->result = result;
1598   vno1->hashcode = vn_nary_op_compute_hash (vno1);
1599   slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1600 				   INSERT);
1601   gcc_assert (!*slot);
1602 
1603   *slot = vno1;
1604   return vno1;
1605 }
1606 
1607 /* Insert the rhs of STMT into the current hash table with a value number of
1608    RESULT.  */
1609 
1610 vn_nary_op_t
1611 vn_nary_op_insert_stmt (gimple stmt, tree result)
1612 {
1613   unsigned length = gimple_num_ops (stmt) - 1;
1614   void **slot;
1615   vn_nary_op_t vno1;
1616   unsigned i;
1617 
1618   vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1619 				       (sizeof (struct vn_nary_op_s)
1620 					- sizeof (tree) * (4 - length)));
1621   vno1->value_id = VN_INFO (result)->value_id;
1622   vno1->opcode = gimple_assign_rhs_code (stmt);
1623   vno1->length = length;
1624   vno1->type = gimple_expr_type (stmt);
1625   for (i = 0; i < vno1->length; ++i)
1626     vno1->op[i] = gimple_op (stmt, i + 1);
1627   if (vno1->opcode == REALPART_EXPR
1628       || vno1->opcode == IMAGPART_EXPR
1629       || vno1->opcode == VIEW_CONVERT_EXPR)
1630     vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1631   vno1->result = result;
1632   vno1->hashcode = vn_nary_op_compute_hash (vno1);
1633   slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1634 				   INSERT);
1635   gcc_assert (!*slot);
1636 
1637   *slot = vno1;
1638   return vno1;
1639 }
1640 
1641 /* Compute a hashcode for PHI operation VP1 and return it.  */
1642 
1643 static inline hashval_t
1644 vn_phi_compute_hash (vn_phi_t vp1)
1645 {
1646   hashval_t result;
1647   int i;
1648   tree phi1op;
1649   tree type;
1650 
1651   result = vp1->block->index;
1652 
1653   /* If all PHI arguments are constants we need to distinguish
1654      the PHI node via its type.  */
1655   type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1656   result += (INTEGRAL_TYPE_P (type)
1657 	     + (INTEGRAL_TYPE_P (type)
1658 		? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1659 
1660   for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1661     {
1662       if (phi1op == VN_TOP)
1663 	continue;
1664       result = iterative_hash_expr (phi1op, result);
1665     }
1666 
1667   return result;
1668 }
1669 
1670 /* Return the computed hashcode for phi operation P1.  */
1671 
1672 static hashval_t
1673 vn_phi_hash (const void *p1)
1674 {
1675   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1676   return vp1->hashcode;
1677 }
1678 
1679 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
1680 
1681 static int
1682 vn_phi_eq (const void *p1, const void *p2)
1683 {
1684   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1685   const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1686 
1687   if (vp1->hashcode != vp2->hashcode)
1688     return false;
1689 
1690   if (vp1->block == vp2->block)
1691     {
1692       int i;
1693       tree phi1op;
1694 
1695       /* If the PHI nodes do not have compatible types
1696 	 they are not the same.  */
1697       if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1698 			       TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1699 	return false;
1700 
1701       /* Any phi in the same block will have it's arguments in the
1702 	 same edge order, because of how we store phi nodes.  */
1703       for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1704 	{
1705 	  tree phi2op = VEC_index (tree, vp2->phiargs, i);
1706 	  if (phi1op == VN_TOP || phi2op == VN_TOP)
1707 	    continue;
1708 	  if (!expressions_equal_p (phi1op, phi2op))
1709 	    return false;
1710 	}
1711       return true;
1712     }
1713   return false;
1714 }
1715 
1716 static VEC(tree, heap) *shared_lookup_phiargs;
1717 
1718 /* Lookup PHI in the current hash table, and return the resulting
1719    value number if it exists in the hash table.  Return NULL_TREE if
1720    it does not exist in the hash table. */
1721 
1722 static tree
1723 vn_phi_lookup (gimple phi)
1724 {
1725   void **slot;
1726   struct vn_phi_s vp1;
1727   unsigned i;
1728 
1729   VEC_truncate (tree, shared_lookup_phiargs, 0);
1730 
1731   /* Canonicalize the SSA_NAME's to their value number.  */
1732   for (i = 0; i < gimple_phi_num_args (phi); i++)
1733     {
1734       tree def = PHI_ARG_DEF (phi, i);
1735       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1736       VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1737     }
1738   vp1.phiargs = shared_lookup_phiargs;
1739   vp1.block = gimple_bb (phi);
1740   vp1.hashcode = vn_phi_compute_hash (&vp1);
1741   slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1742 				   NO_INSERT);
1743   if (!slot && current_info == optimistic_info)
1744     slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1745 				     NO_INSERT);
1746   if (!slot)
1747     return NULL_TREE;
1748   return ((vn_phi_t)*slot)->result;
1749 }
1750 
1751 /* Insert PHI into the current hash table with a value number of
1752    RESULT.  */
1753 
1754 static vn_phi_t
1755 vn_phi_insert (gimple phi, tree result)
1756 {
1757   void **slot;
1758   vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1759   unsigned i;
1760   VEC (tree, heap) *args = NULL;
1761 
1762   /* Canonicalize the SSA_NAME's to their value number.  */
1763   for (i = 0; i < gimple_phi_num_args (phi); i++)
1764     {
1765       tree def = PHI_ARG_DEF (phi, i);
1766       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1767       VEC_safe_push (tree, heap, args, def);
1768     }
1769   vp1->value_id = VN_INFO (result)->value_id;
1770   vp1->phiargs = args;
1771   vp1->block = gimple_bb (phi);
1772   vp1->result = result;
1773   vp1->hashcode = vn_phi_compute_hash (vp1);
1774 
1775   slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1776 				   INSERT);
1777 
1778   /* Because we iterate over phi operations more than once, it's
1779      possible the slot might already exist here, hence no assert.*/
1780   *slot = vp1;
1781   return vp1;
1782 }
1783 
1784 
1785 /* Print set of components in strongly connected component SCC to OUT. */
1786 
1787 static void
1788 print_scc (FILE *out, VEC (tree, heap) *scc)
1789 {
1790   tree var;
1791   unsigned int i;
1792 
1793   fprintf (out, "SCC consists of: ");
1794   for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1795     {
1796       print_generic_expr (out, var, 0);
1797       fprintf (out, " ");
1798     }
1799   fprintf (out, "\n");
1800 }
1801 
1802 /* Set the value number of FROM to TO, return true if it has changed
1803    as a result.  */
1804 
1805 static inline bool
1806 set_ssa_val_to (tree from, tree to)
1807 {
1808   tree currval = SSA_VAL (from);
1809 
1810   if (from != to)
1811     {
1812       if (currval == from)
1813 	{
1814 	  if (dump_file && (dump_flags & TDF_DETAILS))
1815 	    {
1816 	      fprintf (dump_file, "Not changing value number of ");
1817 	      print_generic_expr (dump_file, from, 0);
1818 	      fprintf (dump_file, " from VARYING to ");
1819 	      print_generic_expr (dump_file, to, 0);
1820 	      fprintf (dump_file, "\n");
1821 	    }
1822 	  return false;
1823 	}
1824       else if (TREE_CODE (to) == SSA_NAME
1825 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1826 	to = from;
1827     }
1828 
1829   /* The only thing we allow as value numbers are VN_TOP, ssa_names
1830      and invariants.  So assert that here.  */
1831   gcc_assert (to != NULL_TREE
1832 	      && (to == VN_TOP
1833 		  || TREE_CODE (to) == SSA_NAME
1834 		  || is_gimple_min_invariant (to)));
1835 
1836   if (dump_file && (dump_flags & TDF_DETAILS))
1837     {
1838       fprintf (dump_file, "Setting value number of ");
1839       print_generic_expr (dump_file, from, 0);
1840       fprintf (dump_file, " to ");
1841       print_generic_expr (dump_file, to, 0);
1842     }
1843 
1844   if (currval != to  && !operand_equal_p (currval, to, OEP_PURE_SAME))
1845     {
1846       VN_INFO (from)->valnum = to;
1847       if (dump_file && (dump_flags & TDF_DETAILS))
1848 	fprintf (dump_file, " (changed)\n");
1849       return true;
1850     }
1851   if (dump_file && (dump_flags & TDF_DETAILS))
1852     fprintf (dump_file, "\n");
1853   return false;
1854 }
1855 
1856 /* Set all definitions in STMT to value number to themselves.
1857    Return true if a value number changed. */
1858 
1859 static bool
1860 defs_to_varying (gimple stmt)
1861 {
1862   bool changed = false;
1863   ssa_op_iter iter;
1864   def_operand_p defp;
1865 
1866   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1867     {
1868       tree def = DEF_FROM_PTR (defp);
1869 
1870       VN_INFO (def)->use_processed = true;
1871       changed |= set_ssa_val_to (def, def);
1872     }
1873   return changed;
1874 }
1875 
1876 static bool expr_has_constants (tree expr);
1877 static tree valueize_expr (tree expr);
1878 
1879 /* Visit a copy between LHS and RHS, return true if the value number
1880    changed.  */
1881 
1882 static bool
1883 visit_copy (tree lhs, tree rhs)
1884 {
1885   /* Follow chains of copies to their destination.  */
1886   while (TREE_CODE (rhs) == SSA_NAME
1887 	 && SSA_VAL (rhs) != rhs)
1888     rhs = SSA_VAL (rhs);
1889 
1890   /* The copy may have a more interesting constant filled expression
1891      (we don't, since we know our RHS is just an SSA name).  */
1892   if (TREE_CODE (rhs) == SSA_NAME)
1893     {
1894       VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1895       VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1896     }
1897 
1898   return set_ssa_val_to (lhs, rhs);
1899 }
1900 
1901 /* Visit a unary operator RHS, value number it, and return true if the
1902    value number of LHS has changed as a result.  */
1903 
1904 static bool
1905 visit_unary_op (tree lhs, gimple stmt)
1906 {
1907   bool changed = false;
1908   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1909 
1910   if (result)
1911     {
1912       changed = set_ssa_val_to (lhs, result);
1913     }
1914   else
1915     {
1916       changed = set_ssa_val_to (lhs, lhs);
1917       vn_nary_op_insert_stmt (stmt, lhs);
1918     }
1919 
1920   return changed;
1921 }
1922 
1923 /* Visit a binary operator RHS, value number it, and return true if the
1924    value number of LHS has changed as a result.  */
1925 
1926 static bool
1927 visit_binary_op (tree lhs, gimple stmt)
1928 {
1929   bool changed = false;
1930   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1931 
1932   if (result)
1933     {
1934       changed = set_ssa_val_to (lhs, result);
1935     }
1936   else
1937     {
1938       changed = set_ssa_val_to (lhs, lhs);
1939       vn_nary_op_insert_stmt (stmt, lhs);
1940     }
1941 
1942   return changed;
1943 }
1944 
1945 /* Visit a call STMT storing into LHS.  Return true if the value number
1946    of the LHS has changed as a result.  */
1947 
1948 static bool
1949 visit_reference_op_call (tree lhs, gimple stmt)
1950 {
1951   bool changed = false;
1952   struct vn_reference_s vr1;
1953   tree result;
1954   tree vuse = gimple_vuse (stmt);
1955 
1956   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1957   vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1958   vr1.type = gimple_expr_type (stmt);
1959   vr1.set = 0;
1960   vr1.hashcode = vn_reference_compute_hash (&vr1);
1961   result = vn_reference_lookup_1 (&vr1, NULL);
1962   if (result)
1963     {
1964       changed = set_ssa_val_to (lhs, result);
1965       if (TREE_CODE (result) == SSA_NAME
1966 	  && VN_INFO (result)->has_constants)
1967 	VN_INFO (lhs)->has_constants = true;
1968     }
1969   else
1970     {
1971       void **slot;
1972       vn_reference_t vr2;
1973       changed = set_ssa_val_to (lhs, lhs);
1974       vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1975       vr2->vuse = vr1.vuse;
1976       vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1977       vr2->type = vr1.type;
1978       vr2->set = vr1.set;
1979       vr2->hashcode = vr1.hashcode;
1980       vr2->result = lhs;
1981       slot = htab_find_slot_with_hash (current_info->references,
1982 				       vr2, vr2->hashcode, INSERT);
1983       if (*slot)
1984 	free_reference (*slot);
1985       *slot = vr2;
1986     }
1987 
1988   return changed;
1989 }
1990 
1991 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1992    and return true if the value number of the LHS has changed as a result.  */
1993 
1994 static bool
1995 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1996 {
1997   bool changed = false;
1998   tree last_vuse;
1999   tree result;
2000 
2001   last_vuse = gimple_vuse (stmt);
2002   last_vuse_ptr = &last_vuse;
2003   result = vn_reference_lookup (op, gimple_vuse (stmt),
2004 				default_vn_walk_kind, NULL);
2005   last_vuse_ptr = NULL;
2006 
2007   /* If we have a VCE, try looking up its operand as it might be stored in
2008      a different type.  */
2009   if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2010     result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2011     				  default_vn_walk_kind, NULL);
2012 
2013   /* We handle type-punning through unions by value-numbering based
2014      on offset and size of the access.  Be prepared to handle a
2015      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
2016   if (result
2017       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2018     {
2019       /* We will be setting the value number of lhs to the value number
2020 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2021 	 So first simplify and lookup this expression to see if it
2022 	 is already available.  */
2023       tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2024       if ((CONVERT_EXPR_P (val)
2025 	   || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2026 	  && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2027         {
2028 	  tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2029 	  if ((CONVERT_EXPR_P (tem)
2030 	       || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2031 	      && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2032 						    TREE_TYPE (val), tem)))
2033 	    val = tem;
2034 	}
2035       result = val;
2036       if (!is_gimple_min_invariant (val)
2037 	  && TREE_CODE (val) != SSA_NAME)
2038 	result = vn_nary_op_lookup (val, NULL);
2039       /* If the expression is not yet available, value-number lhs to
2040 	 a new SSA_NAME we create.  */
2041       if (!result && may_insert)
2042         {
2043 	  result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2044 	  /* Initialize value-number information properly.  */
2045 	  VN_INFO_GET (result)->valnum = result;
2046 	  VN_INFO (result)->value_id = get_next_value_id ();
2047 	  VN_INFO (result)->expr = val;
2048 	  VN_INFO (result)->has_constants = expr_has_constants (val);
2049 	  VN_INFO (result)->needs_insertion = true;
2050 	  /* As all "inserted" statements are singleton SCCs, insert
2051 	     to the valid table.  This is strictly needed to
2052 	     avoid re-generating new value SSA_NAMEs for the same
2053 	     expression during SCC iteration over and over (the
2054 	     optimistic table gets cleared after each iteration).
2055 	     We do not need to insert into the optimistic table, as
2056 	     lookups there will fall back to the valid table.  */
2057 	  if (current_info == optimistic_info)
2058 	    {
2059 	      current_info = valid_info;
2060 	      vn_nary_op_insert (val, result);
2061 	      current_info = optimistic_info;
2062 	    }
2063 	  else
2064 	    vn_nary_op_insert (val, result);
2065 	  if (dump_file && (dump_flags & TDF_DETAILS))
2066 	    {
2067 	      fprintf (dump_file, "Inserting name ");
2068 	      print_generic_expr (dump_file, result, 0);
2069 	      fprintf (dump_file, " for expression ");
2070 	      print_generic_expr (dump_file, val, 0);
2071 	      fprintf (dump_file, "\n");
2072 	    }
2073 	}
2074     }
2075 
2076   if (result)
2077     {
2078       changed = set_ssa_val_to (lhs, result);
2079       if (TREE_CODE (result) == SSA_NAME
2080 	  && VN_INFO (result)->has_constants)
2081 	{
2082 	  VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2083 	  VN_INFO (lhs)->has_constants = true;
2084 	}
2085     }
2086   else
2087     {
2088       changed = set_ssa_val_to (lhs, lhs);
2089       vn_reference_insert (op, lhs, last_vuse);
2090     }
2091 
2092   return changed;
2093 }
2094 
2095 
2096 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2097    and return true if the value number of the LHS has changed as a result.  */
2098 
2099 static bool
2100 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2101 {
2102   bool changed = false;
2103   tree result;
2104   bool resultsame = false;
2105 
2106   /* First we want to lookup using the *vuses* from the store and see
2107      if there the last store to this location with the same address
2108      had the same value.
2109 
2110      The vuses represent the memory state before the store.  If the
2111      memory state, address, and value of the store is the same as the
2112      last store to this location, then this store will produce the
2113      same memory state as that store.
2114 
2115      In this case the vdef versions for this store are value numbered to those
2116      vuse versions, since they represent the same memory state after
2117      this store.
2118 
2119      Otherwise, the vdefs for the store are used when inserting into
2120      the table, since the store generates a new memory state.  */
2121 
2122   result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2123 
2124   if (result)
2125     {
2126       if (TREE_CODE (result) == SSA_NAME)
2127 	result = SSA_VAL (result);
2128       if (TREE_CODE (op) == SSA_NAME)
2129 	op = SSA_VAL (op);
2130       resultsame = expressions_equal_p (result, op);
2131     }
2132 
2133   if (!result || !resultsame)
2134     {
2135       tree vdef;
2136 
2137       if (dump_file && (dump_flags & TDF_DETAILS))
2138 	{
2139 	  fprintf (dump_file, "No store match\n");
2140 	  fprintf (dump_file, "Value numbering store ");
2141 	  print_generic_expr (dump_file, lhs, 0);
2142 	  fprintf (dump_file, " to ");
2143 	  print_generic_expr (dump_file, op, 0);
2144 	  fprintf (dump_file, "\n");
2145 	}
2146       /* Have to set value numbers before insert, since insert is
2147 	 going to valueize the references in-place.  */
2148       if ((vdef = gimple_vdef (stmt)))
2149 	{
2150 	  VN_INFO (vdef)->use_processed = true;
2151 	  changed |= set_ssa_val_to (vdef, vdef);
2152 	}
2153 
2154       /* Do not insert structure copies into the tables.  */
2155       if (is_gimple_min_invariant (op)
2156 	  || is_gimple_reg (op))
2157         vn_reference_insert (lhs, op, vdef);
2158     }
2159   else
2160     {
2161       /* We had a match, so value number the vdef to have the value
2162 	 number of the vuse it came from.  */
2163       tree def, use;
2164 
2165       if (dump_file && (dump_flags & TDF_DETAILS))
2166 	fprintf (dump_file, "Store matched earlier value,"
2167 		 "value numbering store vdefs to matching vuses.\n");
2168 
2169       def = gimple_vdef (stmt);
2170       use = gimple_vuse (stmt);
2171 
2172       VN_INFO (def)->use_processed = true;
2173       changed |= set_ssa_val_to (def, SSA_VAL (use));
2174     }
2175 
2176   return changed;
2177 }
2178 
2179 /* Visit and value number PHI, return true if the value number
2180    changed.  */
2181 
2182 static bool
2183 visit_phi (gimple phi)
2184 {
2185   bool changed = false;
2186   tree result;
2187   tree sameval = VN_TOP;
2188   bool allsame = true;
2189   unsigned i;
2190 
2191   /* TODO: We could check for this in init_sccvn, and replace this
2192      with a gcc_assert.  */
2193   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2194     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2195 
2196   /* See if all non-TOP arguments have the same value.  TOP is
2197      equivalent to everything, so we can ignore it.  */
2198   for (i = 0; i < gimple_phi_num_args (phi); i++)
2199     {
2200       tree def = PHI_ARG_DEF (phi, i);
2201 
2202       if (TREE_CODE (def) == SSA_NAME)
2203 	def = SSA_VAL (def);
2204       if (def == VN_TOP)
2205 	continue;
2206       if (sameval == VN_TOP)
2207 	{
2208 	  sameval = def;
2209 	}
2210       else
2211 	{
2212 	  if (!expressions_equal_p (def, sameval))
2213 	    {
2214 	      allsame = false;
2215 	      break;
2216 	    }
2217 	}
2218     }
2219 
2220   /* If all value numbered to the same value, the phi node has that
2221      value.  */
2222   if (allsame)
2223     {
2224       if (is_gimple_min_invariant (sameval))
2225 	{
2226 	  VN_INFO (PHI_RESULT (phi))->has_constants = true;
2227 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
2228 	}
2229       else
2230 	{
2231 	  VN_INFO (PHI_RESULT (phi))->has_constants = false;
2232 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
2233 	}
2234 
2235       if (TREE_CODE (sameval) == SSA_NAME)
2236 	return visit_copy (PHI_RESULT (phi), sameval);
2237 
2238       return set_ssa_val_to (PHI_RESULT (phi), sameval);
2239     }
2240 
2241   /* Otherwise, see if it is equivalent to a phi node in this block.  */
2242   result = vn_phi_lookup (phi);
2243   if (result)
2244     {
2245       if (TREE_CODE (result) == SSA_NAME)
2246 	changed = visit_copy (PHI_RESULT (phi), result);
2247       else
2248 	changed = set_ssa_val_to (PHI_RESULT (phi), result);
2249     }
2250   else
2251     {
2252       vn_phi_insert (phi, PHI_RESULT (phi));
2253       VN_INFO (PHI_RESULT (phi))->has_constants = false;
2254       VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2255       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2256     }
2257 
2258   return changed;
2259 }
2260 
2261 /* Return true if EXPR contains constants.  */
2262 
2263 static bool
2264 expr_has_constants (tree expr)
2265 {
2266   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2267     {
2268     case tcc_unary:
2269       return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2270 
2271     case tcc_binary:
2272       return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2273 	|| is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2274       /* Constants inside reference ops are rarely interesting, but
2275 	 it can take a lot of looking to find them.  */
2276     case tcc_reference:
2277     case tcc_declaration:
2278       return false;
2279     default:
2280       return is_gimple_min_invariant (expr);
2281     }
2282   return false;
2283 }
2284 
2285 /* Return true if STMT contains constants.  */
2286 
2287 static bool
2288 stmt_has_constants (gimple stmt)
2289 {
2290   if (gimple_code (stmt) != GIMPLE_ASSIGN)
2291     return false;
2292 
2293   switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2294     {
2295     case GIMPLE_UNARY_RHS:
2296       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2297 
2298     case GIMPLE_BINARY_RHS:
2299       return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2300 	      || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2301     case GIMPLE_SINGLE_RHS:
2302       /* Constants inside reference ops are rarely interesting, but
2303 	 it can take a lot of looking to find them.  */
2304       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2305     default:
2306       gcc_unreachable ();
2307     }
2308   return false;
2309 }
2310 
2311 /* Replace SSA_NAMES in expr with their value numbers, and return the
2312    result.
2313    This is performed in place. */
2314 
2315 static tree
2316 valueize_expr (tree expr)
2317 {
2318   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2319     {
2320     case tcc_unary:
2321       if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2322 	  && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2323 	TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2324       break;
2325     case tcc_binary:
2326       if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2327 	  && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2328 	TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2329       if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2330 	  && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2331 	TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2332       break;
2333     default:
2334       break;
2335     }
2336   return expr;
2337 }
2338 
2339 /* Simplify the binary expression RHS, and return the result if
2340    simplified. */
2341 
2342 static tree
2343 simplify_binary_expression (gimple stmt)
2344 {
2345   tree result = NULL_TREE;
2346   tree op0 = gimple_assign_rhs1 (stmt);
2347   tree op1 = gimple_assign_rhs2 (stmt);
2348 
2349   /* This will not catch every single case we could combine, but will
2350      catch those with constants.  The goal here is to simultaneously
2351      combine constants between expressions, but avoid infinite
2352      expansion of expressions during simplification.  */
2353   if (TREE_CODE (op0) == SSA_NAME)
2354     {
2355       if (VN_INFO (op0)->has_constants
2356 	  || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2357 	op0 = valueize_expr (vn_get_expr_for (op0));
2358       else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2359 	op0 = SSA_VAL (op0);
2360     }
2361 
2362   if (TREE_CODE (op1) == SSA_NAME)
2363     {
2364       if (VN_INFO (op1)->has_constants)
2365 	op1 = valueize_expr (vn_get_expr_for (op1));
2366       else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2367 	op1 = SSA_VAL (op1);
2368     }
2369 
2370   /* Avoid folding if nothing changed.  */
2371   if (op0 == gimple_assign_rhs1 (stmt)
2372       && op1 == gimple_assign_rhs2 (stmt))
2373     return NULL_TREE;
2374 
2375   fold_defer_overflow_warnings ();
2376 
2377   result = fold_binary (gimple_assign_rhs_code (stmt),
2378 		        gimple_expr_type (stmt), op0, op1);
2379   if (result)
2380     STRIP_USELESS_TYPE_CONVERSION (result);
2381 
2382   fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2383 				  stmt, 0);
2384 
2385   /* Make sure result is not a complex expression consisting
2386      of operators of operators (IE (a + b) + (a + c))
2387      Otherwise, we will end up with unbounded expressions if
2388      fold does anything at all.  */
2389   if (result && valid_gimple_rhs_p (result))
2390     return result;
2391 
2392   return NULL_TREE;
2393 }
2394 
2395 /* Simplify the unary expression RHS, and return the result if
2396    simplified. */
2397 
2398 static tree
2399 simplify_unary_expression (gimple stmt)
2400 {
2401   tree result = NULL_TREE;
2402   tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2403 
2404   /* We handle some tcc_reference codes here that are all
2405      GIMPLE_ASSIGN_SINGLE codes.  */
2406   if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2407       || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2408       || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2409     op0 = TREE_OPERAND (op0, 0);
2410 
2411   if (TREE_CODE (op0) != SSA_NAME)
2412     return NULL_TREE;
2413 
2414   orig_op0 = op0;
2415   if (VN_INFO (op0)->has_constants)
2416     op0 = valueize_expr (vn_get_expr_for (op0));
2417   else if (gimple_assign_cast_p (stmt)
2418 	   || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2419 	   || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2420 	   || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2421     {
2422       /* We want to do tree-combining on conversion-like expressions.
2423          Make sure we feed only SSA_NAMEs or constants to fold though.  */
2424       tree tem = valueize_expr (vn_get_expr_for (op0));
2425       if (UNARY_CLASS_P (tem)
2426 	  || BINARY_CLASS_P (tem)
2427 	  || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2428 	  || TREE_CODE (tem) == SSA_NAME
2429 	  || is_gimple_min_invariant (tem))
2430 	op0 = tem;
2431     }
2432 
2433   /* Avoid folding if nothing changed, but remember the expression.  */
2434   if (op0 == orig_op0)
2435     return NULL_TREE;
2436 
2437   result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2438 				       gimple_expr_type (stmt), op0);
2439   if (result)
2440     {
2441       STRIP_USELESS_TYPE_CONVERSION (result);
2442       if (valid_gimple_rhs_p (result))
2443         return result;
2444     }
2445 
2446   return NULL_TREE;
2447 }
2448 
2449 /* Try to simplify RHS using equivalences and constant folding.  */
2450 
2451 static tree
2452 try_to_simplify (gimple stmt)
2453 {
2454   tree tem;
2455 
2456   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
2457      in this case, there is no point in doing extra work.  */
2458   if (gimple_assign_copy_p (stmt)
2459       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2460     return NULL_TREE;
2461 
2462   switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2463     {
2464     case tcc_declaration:
2465       tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2466       if (tem)
2467 	return tem;
2468       break;
2469 
2470     case tcc_reference:
2471       /* Do not do full-blown reference lookup here, but simplify
2472 	 reads from constant aggregates.  */
2473       tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2474       if (tem)
2475 	return tem;
2476 
2477       /* Fallthrough for some codes that can operate on registers.  */
2478       if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2479 	    || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2480 	    || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2481 	break;
2482       /* We could do a little more with unary ops, if they expand
2483 	 into binary ops, but it's debatable whether it is worth it. */
2484     case tcc_unary:
2485       return simplify_unary_expression (stmt);
2486       break;
2487     case tcc_comparison:
2488     case tcc_binary:
2489       return simplify_binary_expression (stmt);
2490       break;
2491     default:
2492       break;
2493     }
2494 
2495   return NULL_TREE;
2496 }
2497 
2498 /* Visit and value number USE, return true if the value number
2499    changed. */
2500 
2501 static bool
2502 visit_use (tree use)
2503 {
2504   bool changed = false;
2505   gimple stmt = SSA_NAME_DEF_STMT (use);
2506 
2507   VN_INFO (use)->use_processed = true;
2508 
2509   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2510   if (dump_file && (dump_flags & TDF_DETAILS)
2511       && !SSA_NAME_IS_DEFAULT_DEF (use))
2512     {
2513       fprintf (dump_file, "Value numbering ");
2514       print_generic_expr (dump_file, use, 0);
2515       fprintf (dump_file, " stmt = ");
2516       print_gimple_stmt (dump_file, stmt, 0, 0);
2517     }
2518 
2519   /* Handle uninitialized uses.  */
2520   if (SSA_NAME_IS_DEFAULT_DEF (use))
2521     changed = set_ssa_val_to (use, use);
2522   else
2523     {
2524       if (gimple_code (stmt) == GIMPLE_PHI)
2525 	changed = visit_phi (stmt);
2526       else if (!gimple_has_lhs (stmt)
2527 	       || gimple_has_volatile_ops (stmt)
2528 	       || stmt_could_throw_p (stmt))
2529 	changed = defs_to_varying (stmt);
2530       else if (is_gimple_assign (stmt))
2531 	{
2532 	  tree lhs = gimple_assign_lhs (stmt);
2533 	  tree simplified;
2534 
2535 	  /* Shortcut for copies. Simplifying copies is pointless,
2536 	     since we copy the expression and value they represent.  */
2537 	  if (gimple_assign_copy_p (stmt)
2538 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2539 	      && TREE_CODE (lhs) == SSA_NAME)
2540 	    {
2541 	      changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2542 	      goto done;
2543 	    }
2544 	  simplified = try_to_simplify (stmt);
2545 	  if (simplified)
2546 	    {
2547 	      if (dump_file && (dump_flags & TDF_DETAILS))
2548 		{
2549 		  fprintf (dump_file, "RHS ");
2550 		  print_gimple_expr (dump_file, stmt, 0, 0);
2551 		  fprintf (dump_file, " simplified to ");
2552 		  print_generic_expr (dump_file, simplified, 0);
2553 		  if (TREE_CODE (lhs) == SSA_NAME)
2554 		    fprintf (dump_file, " has constants %d\n",
2555 			     expr_has_constants (simplified));
2556 		  else
2557 		    fprintf (dump_file, "\n");
2558 		}
2559 	    }
2560 	  /* Setting value numbers to constants will occasionally
2561 	     screw up phi congruence because constants are not
2562 	     uniquely associated with a single ssa name that can be
2563 	     looked up.  */
2564 	  if (simplified
2565 	      && is_gimple_min_invariant (simplified)
2566 	      && TREE_CODE (lhs) == SSA_NAME)
2567 	    {
2568 	      VN_INFO (lhs)->expr = simplified;
2569 	      VN_INFO (lhs)->has_constants = true;
2570 	      changed = set_ssa_val_to (lhs, simplified);
2571 	      goto done;
2572 	    }
2573 	  else if (simplified
2574 		   && TREE_CODE (simplified) == SSA_NAME
2575 		   && TREE_CODE (lhs) == SSA_NAME)
2576 	    {
2577 	      changed = visit_copy (lhs, simplified);
2578 	      goto done;
2579 	    }
2580 	  else if (simplified)
2581 	    {
2582 	      if (TREE_CODE (lhs) == SSA_NAME)
2583 		{
2584 		  VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2585 		  /* We have to unshare the expression or else
2586 		     valuizing may change the IL stream.  */
2587 		  VN_INFO (lhs)->expr = unshare_expr (simplified);
2588 		}
2589 	    }
2590 	  else if (stmt_has_constants (stmt)
2591 		   && TREE_CODE (lhs) == SSA_NAME)
2592 	    VN_INFO (lhs)->has_constants = true;
2593 	  else if (TREE_CODE (lhs) == SSA_NAME)
2594 	    {
2595 	      /* We reset expr and constantness here because we may
2596 		 have been value numbering optimistically, and
2597 		 iterating. They may become non-constant in this case,
2598 		 even if they were optimistically constant. */
2599 
2600 	      VN_INFO (lhs)->has_constants = false;
2601 	      VN_INFO (lhs)->expr = NULL_TREE;
2602 	    }
2603 
2604 	  if ((TREE_CODE (lhs) == SSA_NAME
2605 	       /* We can substitute SSA_NAMEs that are live over
2606 		  abnormal edges with their constant value.  */
2607 	       && !(gimple_assign_copy_p (stmt)
2608 		    && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2609 	       && !(simplified
2610 		    && is_gimple_min_invariant (simplified))
2611 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2612 	      /* Stores or copies from SSA_NAMEs that are live over
2613 		 abnormal edges are a problem.  */
2614 	      || (gimple_assign_single_p (stmt)
2615 		  && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2616 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2617 	    changed = defs_to_varying (stmt);
2618 	  else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2619 	    {
2620 	      changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2621 	    }
2622 	  else if (TREE_CODE (lhs) == SSA_NAME)
2623 	    {
2624 	      if ((gimple_assign_copy_p (stmt)
2625 		   && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2626 		  || (simplified
2627 		      && is_gimple_min_invariant (simplified)))
2628 		{
2629 		  VN_INFO (lhs)->has_constants = true;
2630 		  if (simplified)
2631 		    changed = set_ssa_val_to (lhs, simplified);
2632 		  else
2633 		    changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2634 		}
2635 	      else
2636 		{
2637 		  switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2638 		    {
2639 		    case GIMPLE_UNARY_RHS:
2640 		      changed = visit_unary_op (lhs, stmt);
2641 		      break;
2642 		    case GIMPLE_BINARY_RHS:
2643 		      changed = visit_binary_op (lhs, stmt);
2644 		      break;
2645 		    case GIMPLE_SINGLE_RHS:
2646 		      switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2647 			{
2648 			case tcc_reference:
2649 			  /* VOP-less references can go through unary case.  */
2650 			  if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2651 			       || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2652 			       || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2653 			      && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2654 			    {
2655 			      changed = visit_unary_op (lhs, stmt);
2656 			      break;
2657 			    }
2658 			  /* Fallthrough.  */
2659 			case tcc_declaration:
2660 			  changed = visit_reference_op_load
2661 			      (lhs, gimple_assign_rhs1 (stmt), stmt);
2662 			  break;
2663 			case tcc_expression:
2664 			  if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2665 			    {
2666 			      changed = visit_unary_op (lhs, stmt);
2667 			      break;
2668 			    }
2669 			  /* Fallthrough.  */
2670 			default:
2671 			  changed = defs_to_varying (stmt);
2672 			}
2673 		      break;
2674 		    default:
2675 		      changed = defs_to_varying (stmt);
2676 		      break;
2677 		    }
2678 		}
2679 	    }
2680 	  else
2681 	    changed = defs_to_varying (stmt);
2682 	}
2683       else if (is_gimple_call (stmt))
2684 	{
2685 	  tree lhs = gimple_call_lhs (stmt);
2686 
2687 	  /* ???  We could try to simplify calls.  */
2688 
2689 	  if (stmt_has_constants (stmt)
2690 	      && TREE_CODE (lhs) == SSA_NAME)
2691 	    VN_INFO (lhs)->has_constants = true;
2692 	  else if (TREE_CODE (lhs) == SSA_NAME)
2693 	    {
2694 	      /* We reset expr and constantness here because we may
2695 		 have been value numbering optimistically, and
2696 		 iterating. They may become non-constant in this case,
2697 		 even if they were optimistically constant. */
2698 	      VN_INFO (lhs)->has_constants = false;
2699 	      VN_INFO (lhs)->expr = NULL_TREE;
2700 	    }
2701 
2702 	  if (TREE_CODE (lhs) == SSA_NAME
2703 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2704 	    changed = defs_to_varying (stmt);
2705 	  /* ???  We should handle stores from calls.  */
2706 	  else if (TREE_CODE (lhs) == SSA_NAME)
2707 	    {
2708 	      if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2709 		changed = visit_reference_op_call (lhs, stmt);
2710 	      else
2711 		changed = defs_to_varying (stmt);
2712 	    }
2713 	  else
2714 	    changed = defs_to_varying (stmt);
2715 	}
2716     }
2717  done:
2718   return changed;
2719 }
2720 
2721 /* Compare two operands by reverse postorder index */
2722 
2723 static int
2724 compare_ops (const void *pa, const void *pb)
2725 {
2726   const tree opa = *((const tree *)pa);
2727   const tree opb = *((const tree *)pb);
2728   gimple opstmta = SSA_NAME_DEF_STMT (opa);
2729   gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2730   basic_block bba;
2731   basic_block bbb;
2732 
2733   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2734     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2735   else if (gimple_nop_p (opstmta))
2736     return -1;
2737   else if (gimple_nop_p (opstmtb))
2738     return 1;
2739 
2740   bba = gimple_bb (opstmta);
2741   bbb = gimple_bb (opstmtb);
2742 
2743   if (!bba && !bbb)
2744     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2745   else if (!bba)
2746     return -1;
2747   else if (!bbb)
2748     return 1;
2749 
2750   if (bba == bbb)
2751     {
2752       if (gimple_code (opstmta) == GIMPLE_PHI
2753 	  && gimple_code (opstmtb) == GIMPLE_PHI)
2754 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2755       else if (gimple_code (opstmta) == GIMPLE_PHI)
2756 	return -1;
2757       else if (gimple_code (opstmtb) == GIMPLE_PHI)
2758 	return 1;
2759       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2760         return gimple_uid (opstmta) - gimple_uid (opstmtb);
2761       else
2762 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2763     }
2764   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2765 }
2766 
2767 /* Sort an array containing members of a strongly connected component
2768    SCC so that the members are ordered by RPO number.
2769    This means that when the sort is complete, iterating through the
2770    array will give you the members in RPO order.  */
2771 
2772 static void
2773 sort_scc (VEC (tree, heap) *scc)
2774 {
2775   qsort (VEC_address (tree, scc),
2776 	 VEC_length (tree, scc),
2777 	 sizeof (tree),
2778 	 compare_ops);
2779 }
2780 
2781 /* Insert the no longer used nary *ENTRY to the current hash.  */
2782 
2783 static int
2784 copy_nary (void **entry, void *data ATTRIBUTE_UNUSED)
2785 {
2786   vn_nary_op_t onary = (vn_nary_op_t) *entry;
2787   size_t size = (sizeof (struct vn_nary_op_s)
2788 		 - sizeof (tree) * (4 - onary->length));
2789   vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
2790 						    size);
2791   void **slot;
2792   memcpy (nary, onary, size);
2793   slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode,
2794 				   INSERT);
2795   gcc_assert (!*slot);
2796   *slot = nary;
2797   return 1;
2798 }
2799 
2800 /* Insert the no longer used phi *ENTRY to the current hash.  */
2801 
2802 static int
2803 copy_phis (void **entry, void *data ATTRIBUTE_UNUSED)
2804 {
2805   vn_phi_t ophi = (vn_phi_t) *entry;
2806   vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool);
2807   void **slot;
2808   memcpy (phi, ophi, sizeof (*phi));
2809   ophi->phiargs = NULL;
2810   slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode,
2811 				   INSERT);
2812   *slot = phi;
2813   return 1;
2814 }
2815 
2816 /* Insert the no longer used reference *ENTRY to the current hash.  */
2817 
2818 static int
2819 copy_references (void **entry, void *data ATTRIBUTE_UNUSED)
2820 {
2821   vn_reference_t oref = (vn_reference_t) *entry;
2822   vn_reference_t ref;
2823   void **slot;
2824   ref = (vn_reference_t) pool_alloc (current_info->references_pool);
2825   memcpy (ref, oref, sizeof (*ref));
2826   oref->operands = NULL;
2827   slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode,
2828 				   INSERT);
2829   if (*slot)
2830     free_reference (*slot);
2831   *slot = ref;
2832   return 1;
2833 }
2834 
2835 /* Process a strongly connected component in the SSA graph.  */
2836 
2837 static void
2838 process_scc (VEC (tree, heap) *scc)
2839 {
2840   /* If the SCC has a single member, just visit it.  */
2841 
2842   if (VEC_length (tree, scc) == 1)
2843     {
2844       tree use = VEC_index (tree, scc, 0);
2845       if (!VN_INFO (use)->use_processed)
2846 	visit_use (use);
2847     }
2848   else
2849     {
2850       tree var;
2851       unsigned int i;
2852       unsigned int iterations = 0;
2853       bool changed = true;
2854 
2855       /* Iterate over the SCC with the optimistic table until it stops
2856 	 changing.  */
2857       current_info = optimistic_info;
2858       while (changed)
2859 	{
2860 	  changed = false;
2861 	  iterations++;
2862 	  /* As we are value-numbering optimistically we have to
2863 	     clear the expression tables and the simplified expressions
2864 	     in each iteration until we converge.  */
2865 	  htab_empty (optimistic_info->nary);
2866 	  htab_empty (optimistic_info->phis);
2867 	  htab_empty (optimistic_info->references);
2868 	  obstack_free (&optimistic_info->nary_obstack, NULL);
2869 	  gcc_obstack_init (&optimistic_info->nary_obstack);
2870 	  empty_alloc_pool (optimistic_info->phis_pool);
2871 	  empty_alloc_pool (optimistic_info->references_pool);
2872 	  for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2873 	    VN_INFO (var)->expr = NULL_TREE;
2874 	  for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2875 	    changed |= visit_use (var);
2876 	}
2877 
2878       statistics_histogram_event (cfun, "SCC iterations", iterations);
2879 
2880       /* Finally, copy the contents of the no longer used optimistic
2881 	 table to the valid table.  */
2882       current_info = valid_info;
2883       htab_traverse (optimistic_info->nary, copy_nary, NULL);
2884       htab_traverse (optimistic_info->phis, copy_phis, NULL);
2885       htab_traverse (optimistic_info->references, copy_references, NULL);
2886     }
2887 }
2888 
2889 DEF_VEC_O(ssa_op_iter);
2890 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2891 
2892 /* Pop the components of the found SCC for NAME off the SCC stack
2893    and process them.  Returns true if all went well, false if
2894    we run into resource limits.  */
2895 
2896 static bool
2897 extract_and_process_scc_for_name (tree name)
2898 {
2899   VEC (tree, heap) *scc = NULL;
2900   tree x;
2901 
2902   /* Found an SCC, pop the components off the SCC stack and
2903      process them.  */
2904   do
2905     {
2906       x = VEC_pop (tree, sccstack);
2907 
2908       VN_INFO (x)->on_sccstack = false;
2909       VEC_safe_push (tree, heap, scc, x);
2910     } while (x != name);
2911 
2912   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
2913   if (VEC_length (tree, scc)
2914       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2915     {
2916       if (dump_file)
2917 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2918 		 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2919 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2920       return false;
2921     }
2922 
2923   if (VEC_length (tree, scc) > 1)
2924     sort_scc (scc);
2925 
2926   if (dump_file && (dump_flags & TDF_DETAILS))
2927     print_scc (dump_file, scc);
2928 
2929   process_scc (scc);
2930 
2931   VEC_free (tree, heap, scc);
2932 
2933   return true;
2934 }
2935 
2936 /* Depth first search on NAME to discover and process SCC's in the SSA
2937    graph.
2938    Execution of this algorithm relies on the fact that the SCC's are
2939    popped off the stack in topological order.
2940    Returns true if successful, false if we stopped processing SCC's due
2941    to resource constraints.  */
2942 
2943 static bool
2944 DFS (tree name)
2945 {
2946   VEC(ssa_op_iter, heap) *itervec = NULL;
2947   VEC(tree, heap) *namevec = NULL;
2948   use_operand_p usep = NULL;
2949   gimple defstmt;
2950   tree use;
2951   ssa_op_iter iter;
2952 
2953 start_over:
2954   /* SCC info */
2955   VN_INFO (name)->dfsnum = next_dfs_num++;
2956   VN_INFO (name)->visited = true;
2957   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2958 
2959   VEC_safe_push (tree, heap, sccstack, name);
2960   VN_INFO (name)->on_sccstack = true;
2961   defstmt = SSA_NAME_DEF_STMT (name);
2962 
2963   /* Recursively DFS on our operands, looking for SCC's.  */
2964   if (!gimple_nop_p (defstmt))
2965     {
2966       /* Push a new iterator.  */
2967       if (gimple_code (defstmt) == GIMPLE_PHI)
2968 	usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2969       else
2970 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2971     }
2972   else
2973     clear_and_done_ssa_iter (&iter);
2974 
2975   while (1)
2976     {
2977       /* If we are done processing uses of a name, go up the stack
2978 	 of iterators and process SCCs as we found them.  */
2979       if (op_iter_done (&iter))
2980 	{
2981 	  /* See if we found an SCC.  */
2982 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2983 	    if (!extract_and_process_scc_for_name (name))
2984 	      {
2985 		VEC_free (tree, heap, namevec);
2986 		VEC_free (ssa_op_iter, heap, itervec);
2987 		return false;
2988 	      }
2989 
2990 	  /* Check if we are done.  */
2991 	  if (VEC_empty (tree, namevec))
2992 	    {
2993 	      VEC_free (tree, heap, namevec);
2994 	      VEC_free (ssa_op_iter, heap, itervec);
2995 	      return true;
2996 	    }
2997 
2998 	  /* Restore the last use walker and continue walking there.  */
2999 	  use = name;
3000 	  name = VEC_pop (tree, namevec);
3001 	  memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3002 		  sizeof (ssa_op_iter));
3003 	  VEC_pop (ssa_op_iter, itervec);
3004 	  goto continue_walking;
3005 	}
3006 
3007       use = USE_FROM_PTR (usep);
3008 
3009       /* Since we handle phi nodes, we will sometimes get
3010 	 invariants in the use expression.  */
3011       if (TREE_CODE (use) == SSA_NAME)
3012 	{
3013 	  if (! (VN_INFO (use)->visited))
3014 	    {
3015 	      /* Recurse by pushing the current use walking state on
3016 		 the stack and starting over.  */
3017 	      VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3018 	      VEC_safe_push(tree, heap, namevec, name);
3019 	      name = use;
3020 	      goto start_over;
3021 
3022 continue_walking:
3023 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3024 					 VN_INFO (use)->low);
3025 	    }
3026 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3027 	      && VN_INFO (use)->on_sccstack)
3028 	    {
3029 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3030 					 VN_INFO (name)->low);
3031 	    }
3032 	}
3033 
3034       usep = op_iter_next_use (&iter);
3035     }
3036 }
3037 
3038 /* Allocate a value number table.  */
3039 
3040 static void
3041 allocate_vn_table (vn_tables_t table)
3042 {
3043   table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3044   table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3045   table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3046 				   free_reference);
3047 
3048   gcc_obstack_init (&table->nary_obstack);
3049   table->phis_pool = create_alloc_pool ("VN phis",
3050 					sizeof (struct vn_phi_s),
3051 					30);
3052   table->references_pool = create_alloc_pool ("VN references",
3053 					      sizeof (struct vn_reference_s),
3054 					      30);
3055 }
3056 
3057 /* Free a value number table.  */
3058 
3059 static void
3060 free_vn_table (vn_tables_t table)
3061 {
3062   htab_delete (table->phis);
3063   htab_delete (table->nary);
3064   htab_delete (table->references);
3065   obstack_free (&table->nary_obstack, NULL);
3066   free_alloc_pool (table->phis_pool);
3067   free_alloc_pool (table->references_pool);
3068 }
3069 
3070 static void
3071 init_scc_vn (void)
3072 {
3073   size_t i;
3074   int j;
3075   int *rpo_numbers_temp;
3076 
3077   calculate_dominance_info (CDI_DOMINATORS);
3078   sccstack = NULL;
3079   constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3080 				  free);
3081 
3082   constant_value_ids = BITMAP_ALLOC (NULL);
3083 
3084   next_dfs_num = 1;
3085   next_value_id = 1;
3086 
3087   vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3088   /* VEC_alloc doesn't actually grow it to the right size, it just
3089      preallocates the space to do so.  */
3090   VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3091   gcc_obstack_init (&vn_ssa_aux_obstack);
3092 
3093   shared_lookup_phiargs = NULL;
3094   shared_lookup_references = NULL;
3095   rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3096   rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3097   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3098 
3099   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3100      the i'th block in RPO order is bb.  We want to map bb's to RPO
3101      numbers, so we need to rearrange this array.  */
3102   for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3103     rpo_numbers[rpo_numbers_temp[j]] = j;
3104 
3105   XDELETE (rpo_numbers_temp);
3106 
3107   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3108 
3109   /* Create the VN_INFO structures, and initialize value numbers to
3110      TOP.  */
3111   for (i = 0; i < num_ssa_names; i++)
3112     {
3113       tree name = ssa_name (i);
3114       if (name)
3115 	{
3116 	  VN_INFO_GET (name)->valnum = VN_TOP;
3117 	  VN_INFO (name)->expr = NULL_TREE;
3118 	  VN_INFO (name)->value_id = 0;
3119 	}
3120     }
3121 
3122   renumber_gimple_stmt_uids ();
3123 
3124   /* Create the valid and optimistic value numbering tables.  */
3125   valid_info = XCNEW (struct vn_tables_s);
3126   allocate_vn_table (valid_info);
3127   optimistic_info = XCNEW (struct vn_tables_s);
3128   allocate_vn_table (optimistic_info);
3129 }
3130 
3131 void
3132 free_scc_vn (void)
3133 {
3134   size_t i;
3135 
3136   htab_delete (constant_to_value_id);
3137   BITMAP_FREE (constant_value_ids);
3138   VEC_free (tree, heap, shared_lookup_phiargs);
3139   VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3140   XDELETEVEC (rpo_numbers);
3141 
3142   for (i = 0; i < num_ssa_names; i++)
3143     {
3144       tree name = ssa_name (i);
3145       if (name
3146 	  && VN_INFO (name)->needs_insertion)
3147 	release_ssa_name (name);
3148     }
3149   obstack_free (&vn_ssa_aux_obstack, NULL);
3150   VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3151 
3152   VEC_free (tree, heap, sccstack);
3153   free_vn_table (valid_info);
3154   XDELETE (valid_info);
3155   free_vn_table (optimistic_info);
3156   XDELETE (optimistic_info);
3157 }
3158 
3159 /* Set the value ids in the valid hash tables.  */
3160 
3161 static void
3162 set_hashtable_value_ids (void)
3163 {
3164   htab_iterator hi;
3165   vn_nary_op_t vno;
3166   vn_reference_t vr;
3167   vn_phi_t vp;
3168 
3169   /* Now set the value ids of the things we had put in the hash
3170      table.  */
3171 
3172   FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3173 			 vno, vn_nary_op_t, hi)
3174     {
3175       if (vno->result)
3176 	{
3177 	  if (TREE_CODE (vno->result) == SSA_NAME)
3178 	    vno->value_id = VN_INFO (vno->result)->value_id;
3179 	  else if (is_gimple_min_invariant (vno->result))
3180 	    vno->value_id = get_or_alloc_constant_value_id (vno->result);
3181 	}
3182     }
3183 
3184   FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3185 			 vp, vn_phi_t, hi)
3186     {
3187       if (vp->result)
3188 	{
3189 	  if (TREE_CODE (vp->result) == SSA_NAME)
3190 	    vp->value_id = VN_INFO (vp->result)->value_id;
3191 	  else if (is_gimple_min_invariant (vp->result))
3192 	    vp->value_id = get_or_alloc_constant_value_id (vp->result);
3193 	}
3194     }
3195 
3196   FOR_EACH_HTAB_ELEMENT (valid_info->references,
3197 			 vr, vn_reference_t, hi)
3198     {
3199       if (vr->result)
3200 	{
3201 	  if (TREE_CODE (vr->result) == SSA_NAME)
3202 	    vr->value_id = VN_INFO (vr->result)->value_id;
3203 	  else if (is_gimple_min_invariant (vr->result))
3204 	    vr->value_id = get_or_alloc_constant_value_id (vr->result);
3205 	}
3206     }
3207 }
3208 
3209 /* Do SCCVN.  Returns true if it finished, false if we bailed out
3210    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
3211    how we use the alias oracle walking during the VN process.  */
3212 
3213 bool
3214 run_scc_vn (bool may_insert_arg, vn_lookup_kind default_vn_walk_kind_)
3215 {
3216   size_t i;
3217   tree param;
3218   bool changed = true;
3219 
3220   may_insert = may_insert_arg;
3221   default_vn_walk_kind = default_vn_walk_kind_;
3222 
3223   init_scc_vn ();
3224   current_info = valid_info;
3225 
3226   for (param = DECL_ARGUMENTS (current_function_decl);
3227        param;
3228        param = TREE_CHAIN (param))
3229     {
3230       if (gimple_default_def (cfun, param) != NULL)
3231 	{
3232 	  tree def = gimple_default_def (cfun, param);
3233 	  VN_INFO (def)->valnum = def;
3234 	}
3235     }
3236 
3237   for (i = 1; i < num_ssa_names; ++i)
3238     {
3239       tree name = ssa_name (i);
3240       if (name
3241 	  && VN_INFO (name)->visited == false
3242 	  && !has_zero_uses (name))
3243 	if (!DFS (name))
3244 	  {
3245 	    free_scc_vn ();
3246 	    may_insert = false;
3247 	    return false;
3248 	  }
3249     }
3250 
3251   /* Initialize the value ids.  */
3252 
3253   for (i = 1; i < num_ssa_names; ++i)
3254     {
3255       tree name = ssa_name (i);
3256       vn_ssa_aux_t info;
3257       if (!name)
3258 	continue;
3259       info = VN_INFO (name);
3260       if (info->valnum == name
3261 	  || info->valnum == VN_TOP)
3262 	info->value_id = get_next_value_id ();
3263       else if (is_gimple_min_invariant (info->valnum))
3264 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
3265     }
3266 
3267   /* Propagate until they stop changing.  */
3268   while (changed)
3269     {
3270       changed = false;
3271       for (i = 1; i < num_ssa_names; ++i)
3272 	{
3273 	  tree name = ssa_name (i);
3274 	  vn_ssa_aux_t info;
3275 	  if (!name)
3276 	    continue;
3277 	  info = VN_INFO (name);
3278 	  if (TREE_CODE (info->valnum) == SSA_NAME
3279 	      && info->valnum != name
3280 	      && info->value_id != VN_INFO (info->valnum)->value_id)
3281 	    {
3282 	      changed = true;
3283 	      info->value_id = VN_INFO (info->valnum)->value_id;
3284 	    }
3285 	}
3286     }
3287 
3288   set_hashtable_value_ids ();
3289 
3290   if (dump_file && (dump_flags & TDF_DETAILS))
3291     {
3292       fprintf (dump_file, "Value numbers:\n");
3293       for (i = 0; i < num_ssa_names; i++)
3294 	{
3295 	  tree name = ssa_name (i);
3296 	  if (name
3297 	      && VN_INFO (name)->visited
3298 	      && SSA_VAL (name) != name)
3299 	    {
3300 	      print_generic_expr (dump_file, name, 0);
3301 	      fprintf (dump_file, " = ");
3302 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
3303 	      fprintf (dump_file, "\n");
3304 	    }
3305 	}
3306     }
3307 
3308   may_insert = false;
3309   return true;
3310 }
3311 
3312 /* Return the maximum value id we have ever seen.  */
3313 
3314 unsigned int
3315 get_max_value_id (void)
3316 {
3317   return next_value_id;
3318 }
3319 
3320 /* Return the next unique value id.  */
3321 
3322 unsigned int
3323 get_next_value_id (void)
3324 {
3325   return next_value_id++;
3326 }
3327 
3328 
3329 /* Compare two expressions E1 and E2 and return true if they are equal.  */
3330 
3331 bool
3332 expressions_equal_p (tree e1, tree e2)
3333 {
3334   /* The obvious case.  */
3335   if (e1 == e2)
3336     return true;
3337 
3338   /* If only one of them is null, they cannot be equal.  */
3339   if (!e1 || !e2)
3340     return false;
3341 
3342   /* Now perform the actual comparison.  */
3343   if (TREE_CODE (e1) == TREE_CODE (e2)
3344       && operand_equal_p (e1, e2, OEP_PURE_SAME))
3345     return true;
3346 
3347   return false;
3348 }
3349 
3350 
3351 /* Return true if the nary operation NARY may trap.  This is a copy
3352    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
3353 
3354 bool
3355 vn_nary_may_trap (vn_nary_op_t nary)
3356 {
3357   tree type;
3358   tree rhs2 = NULL_TREE;
3359   bool honor_nans = false;
3360   bool honor_snans = false;
3361   bool fp_operation = false;
3362   bool honor_trapv = false;
3363   bool handled, ret;
3364   unsigned i;
3365 
3366   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3367       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3368       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3369     {
3370       type = nary->type;
3371       fp_operation = FLOAT_TYPE_P (type);
3372       if (fp_operation)
3373 	{
3374 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
3375 	  honor_snans = flag_signaling_nans != 0;
3376 	}
3377       else if (INTEGRAL_TYPE_P (type)
3378 	       && TYPE_OVERFLOW_TRAPS (type))
3379 	honor_trapv = true;
3380     }
3381   if (nary->length >= 2)
3382     rhs2 = nary->op[1];
3383   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3384 				       honor_trapv,
3385 				       honor_nans, honor_snans, rhs2,
3386 				       &handled);
3387   if (handled
3388       && ret)
3389     return true;
3390 
3391   for (i = 0; i < nary->length; ++i)
3392     if (tree_could_trap_p (nary->op[i]))
3393       return true;
3394 
3395   return false;
3396 }
3397