xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-ssa-sccvn.c (revision a680a824caa81f60b4d5944e34dfd86b656e862e)
1 /* SCC value numbering for trees
2    Copyright (C) 2006-2018 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
73 
74 /* This algorithm is based on the SCC algorithm presented by Keith
75    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76    (http://citeseer.ist.psu.edu/41805.html).  In
77    straight line code, it is equivalent to a regular hash based value
78    numbering that is performed in reverse postorder.
79 
80    For code with cycles, there are two alternatives, both of which
81    require keeping the hashtables separate from the actual list of
82    value numbers for SSA names.
83 
84    1. Iterate value numbering in an RPO walk of the blocks, removing
85    all the entries from the hashtable after each iteration (but
86    keeping the SSA name->value number mapping between iterations).
87    Iterate until it does not change.
88 
89    2. Perform value numbering as part of an SCC walk on the SSA graph,
90    iterating only the cycles in the SSA graph until they do not change
91    (using a separate, optimistic hashtable for value numbering the SCC
92    operands).
93 
94    The second is not just faster in practice (because most SSA graph
95    cycles do not involve all the variables in the graph), it also has
96    some nice properties.
97 
98    One of these nice properties is that when we pop an SCC off the
99    stack, we are guaranteed to have processed all the operands coming from
100    *outside of that SCC*, so we do not need to do anything special to
101    ensure they have value numbers.
102 
103    Another nice property is that the SCC walk is done as part of a DFS
104    of the SSA graph, which makes it easy to perform combining and
105    simplifying operations at the same time.
106 
107    The code below is deliberately written in a way that makes it easy
108    to separate the SCC walk from the other work it does.
109 
110    In order to propagate constants through the code, we track which
111    expressions contain constants, and use those while folding.  In
112    theory, we could also track expressions whose value numbers are
113    replaced, in case we end up folding based on expression
114    identities.
115 
116    In order to value number memory, we assign value numbers to vuses.
117    This enables us to note that, for example, stores to the same
118    address of the same value from the same starting memory states are
119    equivalent.
120    TODO:
121 
122    1. We can iterate only the changing portions of the SCC's, but
123    I have not seen an SCC big enough for this to be a win.
124    2. If you differentiate between phi nodes for loops and phi nodes
125    for if-then-else, you can properly consider phi nodes in different
126    blocks for equivalence.
127    3. We could value number vuses in more cases, particularly, whole
128    structure copies.
129 */
130 
131 
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
136 
137 /* vn_nary_op hashtable helpers.  */
138 
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141   typedef vn_nary_op_s *compare_type;
142   static inline hashval_t hash (const vn_nary_op_s *);
143   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145 
146 /* Return the computed hashcode for nary operation P1.  */
147 
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151   return vno1->hashcode;
152 }
153 
154 /* Compare nary operations P1 and P2 and return true if they are
155    equivalent.  */
156 
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160   return vn_nary_op_eq (vno1, vno2);
161 }
162 
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165 
166 
167 /* vn_phi hashtable helpers.  */
168 
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
173 {
174   static inline hashval_t hash (const vn_phi_s *);
175   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176   static inline void remove (vn_phi_s *);
177 };
178 
179 /* Return the computed hashcode for phi operation P1.  */
180 
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 {
184   return vp1->hashcode;
185 }
186 
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
188 
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 {
192   return vn_phi_eq (vp1, vp2);
193 }
194 
195 /* Free a phi operation structure VP.  */
196 
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
199 {
200   phi->phiargs.release ();
201 }
202 
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
205 
206 
207 /* Compare two reference operands P1 and P2 for equality.  Return true if
208    they are equal, and false otherwise.  */
209 
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
212 {
213   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
215 
216   return (vro1->opcode == vro2->opcode
217 	  /* We do not care for differences in type qualification.  */
218 	  && (vro1->type == vro2->type
219 	      || (vro1->type && vro2->type
220 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 					 TYPE_MAIN_VARIANT (vro2->type))))
222 	  && expressions_equal_p (vro1->op0, vro2->op0)
223 	  && expressions_equal_p (vro1->op1, vro2->op1)
224 	  && expressions_equal_p (vro1->op2, vro2->op2));
225 }
226 
227 /* Free a reference operation structure VP.  */
228 
229 static inline void
230 free_reference (vn_reference_s *vr)
231 {
232   vr->operands.release ();
233 }
234 
235 
236 /* vn_reference hashtable helpers.  */
237 
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
239 {
240   static inline hashval_t hash (const vn_reference_s *);
241   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242   static inline void remove (vn_reference_s *);
243 };
244 
245 /* Return the hashcode for a given reference operation P1.  */
246 
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
249 {
250   return vr1->hashcode;
251 }
252 
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
255 {
256   return vn_reference_eq (v, c);
257 }
258 
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
261 {
262   free_reference (v);
263 }
264 
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
267 
268 
269 /* The set of hashtables and alloc_pool's for their items.  */
270 
271 typedef struct vn_tables_s
272 {
273   vn_nary_op_table_type *nary;
274   vn_phi_table_type *phis;
275   vn_reference_table_type *references;
276   struct obstack nary_obstack;
277   object_allocator<vn_phi_s> *phis_pool;
278   object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
280 
281 
282 /* vn_constant hashtable helpers.  */
283 
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
285 {
286   static inline hashval_t hash (const vn_constant_s *);
287   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
288 };
289 
290 /* Hash table hash function for vn_constant_t.  */
291 
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
294 {
295   return vc1->hashcode;
296 }
297 
298 /* Hash table equality function for vn_constant_t.  */
299 
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
302 {
303   if (vc1->hashcode != vc2->hashcode)
304     return false;
305 
306   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
307 }
308 
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
311 
312 
313 /* Valid hashtables storing information we have proven to be
314    correct.  */
315 
316 static vn_tables_t valid_info;
317 
318 /* Optimistic hashtables storing information we are making assumptions about
319    during iterations.  */
320 
321 static vn_tables_t optimistic_info;
322 
323 /* Pointer to the set of hashtables that is currently being used.
324    Should always point to either the optimistic_info, or the
325    valid_info.  */
326 
327 static vn_tables_t current_info;
328 
329 
330 /* Reverse post order index for each basic block.  */
331 
332 static int *rpo_numbers;
333 
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
335 
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337    during elimination which will value-number the VDEF to the
338    associated VUSE (but not substitute in the whole lattice).  */
339 
340 static inline tree
341 vuse_ssa_val (tree x)
342 {
343   if (!x)
344     return NULL_TREE;
345 
346   do
347     {
348       tree tem = SSA_VAL (x);
349       /* stmt walking can walk over a backedge and reach code we didn't
350 	 value-number yet.  */
351       if (tem == VN_TOP)
352 	return x;
353       x = tem;
354     }
355   while (SSA_NAME_IN_FREE_LIST (x));
356 
357   return x;
358 }
359 
360 /* This represents the top of the VN lattice, which is the universal
361    value.  */
362 
363 tree VN_TOP;
364 
365 /* Unique counter for our value ids.  */
366 
367 static unsigned int next_value_id;
368 
369 /* Next DFS number and the stack for strongly connected component
370    detection. */
371 
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
374 
375 
376 
377 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
378    are allocated on an obstack for locality reasons, and to free them
379    without looping over the vec.  */
380 
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
383 
384 /* Return whether there is value numbering information for a given SSA name.  */
385 
386 bool
387 has_VN_INFO (tree name)
388 {
389   if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390     return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391   return false;
392 }
393 
394 /* Return the value numbering information for a given SSA name.  */
395 
396 vn_ssa_aux_t
397 VN_INFO (tree name)
398 {
399   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400   gcc_checking_assert (res);
401   return res;
402 }
403 
404 /* Set the value numbering info for a given SSA name to a given
405    value.  */
406 
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
409 {
410   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
411 }
412 
413 /* Initialize the value numbering info for a given SSA name.
414    This should be called just once for every SSA name.  */
415 
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
418 {
419   vn_ssa_aux_t newinfo;
420 
421   gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 	      || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426     vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428   return newinfo;
429 }
430 
431 
432 /* Return the vn_kind the expression computed by the stmt should be
433    associated with.  */
434 
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
437 {
438   switch (gimple_code (stmt))
439     {
440     case GIMPLE_CALL:
441       return VN_REFERENCE;
442     case GIMPLE_PHI:
443       return VN_PHI;
444     case GIMPLE_ASSIGN:
445       {
446 	enum tree_code code = gimple_assign_rhs_code (stmt);
447 	tree rhs1 = gimple_assign_rhs1 (stmt);
448 	switch (get_gimple_rhs_class (code))
449 	  {
450 	  case GIMPLE_UNARY_RHS:
451 	  case GIMPLE_BINARY_RHS:
452 	  case GIMPLE_TERNARY_RHS:
453 	    return VN_NARY;
454 	  case GIMPLE_SINGLE_RHS:
455 	    switch (TREE_CODE_CLASS (code))
456 	      {
457 	      case tcc_reference:
458 		/* VOP-less references can go through unary case.  */
459 		if ((code == REALPART_EXPR
460 		     || code == IMAGPART_EXPR
461 		     || code == VIEW_CONVERT_EXPR
462 		     || code == BIT_FIELD_REF)
463 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 		  return VN_NARY;
465 
466 		/* Fallthrough.  */
467 	      case tcc_declaration:
468 		return VN_REFERENCE;
469 
470 	      case tcc_constant:
471 		return VN_CONSTANT;
472 
473 	      default:
474 		if (code == ADDR_EXPR)
475 		  return (is_gimple_min_invariant (rhs1)
476 			  ? VN_CONSTANT : VN_REFERENCE);
477 		else if (code == CONSTRUCTOR)
478 		  return VN_NARY;
479 		return VN_NONE;
480 	      }
481 	  default:
482 	    return VN_NONE;
483 	  }
484       }
485     default:
486       return VN_NONE;
487     }
488 }
489 
490 /* Lookup a value id for CONSTANT and return it.  If it does not
491    exist returns 0.  */
492 
493 unsigned int
494 get_constant_value_id (tree constant)
495 {
496   vn_constant_s **slot;
497   struct vn_constant_s vc;
498 
499   vc.hashcode = vn_hash_constant_with_type (constant);
500   vc.constant = constant;
501   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502   if (slot)
503     return (*slot)->value_id;
504   return 0;
505 }
506 
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508    new one and return it.  If it does exist, return it.  */
509 
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
512 {
513   vn_constant_s **slot;
514   struct vn_constant_s vc;
515   vn_constant_t vcp;
516 
517   vc.hashcode = vn_hash_constant_with_type (constant);
518   vc.constant = constant;
519   slot = constant_to_value_id->find_slot (&vc, INSERT);
520   if (*slot)
521     return (*slot)->value_id;
522 
523   vcp = XNEW (struct vn_constant_s);
524   vcp->hashcode = vc.hashcode;
525   vcp->constant = constant;
526   vcp->value_id = get_next_value_id ();
527   *slot = vcp;
528   bitmap_set_bit (constant_value_ids, vcp->value_id);
529   return vcp->value_id;
530 }
531 
532 /* Return true if V is a value id for a constant.  */
533 
534 bool
535 value_id_constant_p (unsigned int v)
536 {
537   return bitmap_bit_p (constant_value_ids, v);
538 }
539 
540 /* Compute the hash for a reference operand VRO1.  */
541 
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
544 {
545   hstate.add_int (vro1->opcode);
546   if (vro1->op0)
547     inchash::add_expr (vro1->op0, hstate);
548   if (vro1->op1)
549     inchash::add_expr (vro1->op1, hstate);
550   if (vro1->op2)
551     inchash::add_expr (vro1->op2, hstate);
552 }
553 
554 /* Compute a hash for the reference operation VR1 and return it.  */
555 
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
558 {
559   inchash::hash hstate;
560   hashval_t result;
561   int i;
562   vn_reference_op_t vro;
563   poly_int64 off = -1;
564   bool deref = false;
565 
566   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
567     {
568       if (vro->opcode == MEM_REF)
569 	deref = true;
570       else if (vro->opcode != ADDR_EXPR)
571 	deref = false;
572       if (maybe_ne (vro->off, -1))
573 	{
574 	  if (known_eq (off, -1))
575 	    off = 0;
576 	  off += vro->off;
577 	}
578       else
579 	{
580 	  if (maybe_ne (off, -1)
581 	      && maybe_ne (off, 0))
582 	    hstate.add_poly_int (off);
583 	  off = -1;
584 	  if (deref
585 	      && vro->opcode == ADDR_EXPR)
586 	    {
587 	      if (vro->op0)
588 		{
589 		  tree op = TREE_OPERAND (vro->op0, 0);
590 		  hstate.add_int (TREE_CODE (op));
591 		  inchash::add_expr (op, hstate);
592 		}
593 	    }
594 	  else
595 	    vn_reference_op_compute_hash (vro, hstate);
596 	}
597     }
598   result = hstate.end ();
599   /* ??? We would ICE later if we hash instead of adding that in. */
600   if (vr1->vuse)
601     result += SSA_NAME_VERSION (vr1->vuse);
602 
603   return result;
604 }
605 
606 /* Return true if reference operations VR1 and VR2 are equivalent.  This
607    means they have the same set of operands and vuses.  */
608 
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
611 {
612   unsigned i, j;
613 
614   /* Early out if this is not a hash collision.  */
615   if (vr1->hashcode != vr2->hashcode)
616     return false;
617 
618   /* The VOP needs to be the same.  */
619   if (vr1->vuse != vr2->vuse)
620     return false;
621 
622   /* If the operands are the same we are done.  */
623   if (vr1->operands == vr2->operands)
624     return true;
625 
626   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627     return false;
628 
629   if (INTEGRAL_TYPE_P (vr1->type)
630       && INTEGRAL_TYPE_P (vr2->type))
631     {
632       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 	return false;
634     }
635   else if (INTEGRAL_TYPE_P (vr1->type)
636 	   && (TYPE_PRECISION (vr1->type)
637 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638     return false;
639   else if (INTEGRAL_TYPE_P (vr2->type)
640 	   && (TYPE_PRECISION (vr2->type)
641 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642     return false;
643 
644   i = 0;
645   j = 0;
646   do
647     {
648       poly_int64 off1 = 0, off2 = 0;
649       vn_reference_op_t vro1, vro2;
650       vn_reference_op_s tem1, tem2;
651       bool deref1 = false, deref2 = false;
652       for (; vr1->operands.iterate (i, &vro1); i++)
653 	{
654 	  if (vro1->opcode == MEM_REF)
655 	    deref1 = true;
656 	  /* Do not look through a storage order barrier.  */
657 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 	    return false;
659 	  if (known_eq (vro1->off, -1))
660 	    break;
661 	  off1 += vro1->off;
662 	}
663       for (; vr2->operands.iterate (j, &vro2); j++)
664 	{
665 	  if (vro2->opcode == MEM_REF)
666 	    deref2 = true;
667 	  /* Do not look through a storage order barrier.  */
668 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 	    return false;
670 	  if (known_eq (vro2->off, -1))
671 	    break;
672 	  off2 += vro2->off;
673 	}
674       if (maybe_ne (off1, off2))
675 	return false;
676       if (deref1 && vro1->opcode == ADDR_EXPR)
677 	{
678 	  memset (&tem1, 0, sizeof (tem1));
679 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 	  tem1.type = TREE_TYPE (tem1.op0);
681 	  tem1.opcode = TREE_CODE (tem1.op0);
682 	  vro1 = &tem1;
683 	  deref1 = false;
684 	}
685       if (deref2 && vro2->opcode == ADDR_EXPR)
686 	{
687 	  memset (&tem2, 0, sizeof (tem2));
688 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 	  tem2.type = TREE_TYPE (tem2.op0);
690 	  tem2.opcode = TREE_CODE (tem2.op0);
691 	  vro2 = &tem2;
692 	  deref2 = false;
693 	}
694       if (deref1 != deref2)
695 	return false;
696       if (!vn_reference_op_eq (vro1, vro2))
697 	return false;
698       ++j;
699       ++i;
700     }
701   while (vr1->operands.length () != i
702 	 || vr2->operands.length () != j);
703 
704   return true;
705 }
706 
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708    vn_reference_op_s's.  */
709 
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
712 {
713   if (TREE_CODE (ref) == TARGET_MEM_REF)
714     {
715       vn_reference_op_s temp;
716 
717       result->reserve (3);
718 
719       memset (&temp, 0, sizeof (temp));
720       temp.type = TREE_TYPE (ref);
721       temp.opcode = TREE_CODE (ref);
722       temp.op0 = TMR_INDEX (ref);
723       temp.op1 = TMR_STEP (ref);
724       temp.op2 = TMR_OFFSET (ref);
725       temp.off = -1;
726       temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727       temp.base = MR_DEPENDENCE_BASE (ref);
728       result->quick_push (temp);
729 
730       memset (&temp, 0, sizeof (temp));
731       temp.type = NULL_TREE;
732       temp.opcode = ERROR_MARK;
733       temp.op0 = TMR_INDEX2 (ref);
734       temp.off = -1;
735       result->quick_push (temp);
736 
737       memset (&temp, 0, sizeof (temp));
738       temp.type = NULL_TREE;
739       temp.opcode = TREE_CODE (TMR_BASE (ref));
740       temp.op0 = TMR_BASE (ref);
741       temp.off = -1;
742       result->quick_push (temp);
743       return;
744     }
745 
746   /* For non-calls, store the information that makes up the address.  */
747   tree orig = ref;
748   while (ref)
749     {
750       vn_reference_op_s temp;
751 
752       memset (&temp, 0, sizeof (temp));
753       temp.type = TREE_TYPE (ref);
754       temp.opcode = TREE_CODE (ref);
755       temp.off = -1;
756 
757       switch (temp.opcode)
758 	{
759 	case MODIFY_EXPR:
760 	  temp.op0 = TREE_OPERAND (ref, 1);
761 	  break;
762 	case WITH_SIZE_EXPR:
763 	  temp.op0 = TREE_OPERAND (ref, 1);
764 	  temp.off = 0;
765 	  break;
766 	case MEM_REF:
767 	  /* The base address gets its own vn_reference_op_s structure.  */
768 	  temp.op0 = TREE_OPERAND (ref, 1);
769 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 	    temp.off = -1;
771 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 	  temp.base = MR_DEPENDENCE_BASE (ref);
773 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 	  break;
775 	case BIT_FIELD_REF:
776 	  /* Record bits, position and storage order.  */
777 	  temp.op0 = TREE_OPERAND (ref, 1);
778 	  temp.op1 = TREE_OPERAND (ref, 2);
779 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 	    temp.off = -1;
781 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 	  break;
783 	case COMPONENT_REF:
784 	  /* The field decl is enough to unambiguously specify the field,
785 	     a matching type is not necessary and a mismatching type
786 	     is always a spurious difference.  */
787 	  temp.type = NULL_TREE;
788 	  temp.op0 = TREE_OPERAND (ref, 1);
789 	  temp.op1 = TREE_OPERAND (ref, 2);
790 	  {
791 	    tree this_offset = component_ref_field_offset (ref);
792 	    if (this_offset
793 		&& poly_int_tree_p (this_offset))
794 	      {
795 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
797 		  {
798 		    poly_offset_int off
799 		      = (wi::to_poly_offset (this_offset)
800 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 		    /* Probibit value-numbering zero offset components
802 		       of addresses the same before the pass folding
803 		       __builtin_object_size had a chance to run
804 		       (checking cfun->after_inlining does the
805 		       trick here).  */
806 		    if (TREE_CODE (orig) != ADDR_EXPR
807 			|| maybe_ne (off, 0)
808 			|| cfun->after_inlining)
809 		      off.to_shwi (&temp.off);
810 		  }
811 	      }
812 	  }
813 	  break;
814 	case ARRAY_RANGE_REF:
815 	case ARRAY_REF:
816 	  {
817 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 	    /* Record index as operand.  */
819 	    temp.op0 = TREE_OPERAND (ref, 1);
820 	    /* Always record lower bounds and element size.  */
821 	    temp.op1 = array_ref_low_bound (ref);
822 	    /* But record element size in units of the type alignment.  */
823 	    temp.op2 = TREE_OPERAND (ref, 3);
824 	    temp.align = eltype->type_common.align;
825 	    if (! temp.op2)
826 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 				     size_int (TYPE_ALIGN_UNIT (eltype)));
828 	    if (poly_int_tree_p (temp.op0)
829 		&& poly_int_tree_p (temp.op1)
830 		&& TREE_CODE (temp.op2) == INTEGER_CST)
831 	      {
832 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 					- wi::to_poly_offset (temp.op1))
834 				       * wi::to_offset (temp.op2)
835 				       * vn_ref_op_align_unit (&temp));
836 		off.to_shwi (&temp.off);
837 	      }
838 	  }
839 	  break;
840 	case VAR_DECL:
841 	  if (DECL_HARD_REGISTER (ref))
842 	    {
843 	      temp.op0 = ref;
844 	      break;
845 	    }
846 	  /* Fallthru.  */
847 	case PARM_DECL:
848 	case CONST_DECL:
849 	case RESULT_DECL:
850 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
851 	     when valueizing MEM[ptr] with ptr = &decl.  */
852 	  temp.opcode = MEM_REF;
853 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 	  temp.off = 0;
855 	  result->safe_push (temp);
856 	  temp.opcode = ADDR_EXPR;
857 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 	  temp.type = TREE_TYPE (temp.op0);
859 	  temp.off = -1;
860 	  break;
861 	case STRING_CST:
862 	case INTEGER_CST:
863 	case COMPLEX_CST:
864 	case VECTOR_CST:
865 	case REAL_CST:
866 	case FIXED_CST:
867 	case CONSTRUCTOR:
868 	case SSA_NAME:
869 	  temp.op0 = ref;
870 	  break;
871 	case ADDR_EXPR:
872 	  if (is_gimple_min_invariant (ref))
873 	    {
874 	      temp.op0 = ref;
875 	      break;
876 	    }
877 	  break;
878 	  /* These are only interesting for their operands, their
879 	     existence, and their type.  They will never be the last
880 	     ref in the chain of references (IE they require an
881 	     operand), so we don't have to put anything
882 	     for op* as it will be handled by the iteration  */
883 	case REALPART_EXPR:
884 	  temp.off = 0;
885 	  break;
886 	case VIEW_CONVERT_EXPR:
887 	  temp.off = 0;
888 	  temp.reverse = storage_order_barrier_p (ref);
889 	  break;
890 	case IMAGPART_EXPR:
891 	  /* This is only interesting for its constant offset.  */
892 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 	  break;
894 	default:
895 	  gcc_unreachable ();
896 	}
897       result->safe_push (temp);
898 
899       if (REFERENCE_CLASS_P (ref)
900 	  || TREE_CODE (ref) == MODIFY_EXPR
901 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
902 	  || (TREE_CODE (ref) == ADDR_EXPR
903 	      && !is_gimple_min_invariant (ref)))
904 	ref = TREE_OPERAND (ref, 0);
905       else
906 	ref = NULL_TREE;
907     }
908 }
909 
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911    operands in *OPS, the reference alias set SET and the reference type TYPE.
912    Return true if something useful was produced.  */
913 
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 			       alias_set_type set, tree type,
917 			       vec<vn_reference_op_s> ops)
918 {
919   vn_reference_op_t op;
920   unsigned i;
921   tree base = NULL_TREE;
922   tree *op0_p = &base;
923   poly_offset_int offset = 0;
924   poly_offset_int max_size;
925   poly_offset_int size = -1;
926   tree size_tree = NULL_TREE;
927   alias_set_type base_alias_set = -1;
928 
929   /* First get the final access size from just the outermost expression.  */
930   op = &ops[0];
931   if (op->opcode == COMPONENT_REF)
932     size_tree = DECL_SIZE (op->op0);
933   else if (op->opcode == BIT_FIELD_REF)
934     size_tree = op->op0;
935   else
936     {
937       machine_mode mode = TYPE_MODE (type);
938       if (mode == BLKmode)
939 	size_tree = TYPE_SIZE (type);
940       else
941 	size = GET_MODE_BITSIZE (mode);
942     }
943   if (size_tree != NULL_TREE
944       && poly_int_tree_p (size_tree))
945     size = wi::to_poly_offset (size_tree);
946 
947   /* Initially, maxsize is the same as the accessed element size.
948      In the following it will only grow (or become -1).  */
949   max_size = size;
950 
951   /* Compute cumulative bit-offset for nested component-refs and array-refs,
952      and find the ultimate containing object.  */
953   FOR_EACH_VEC_ELT (ops, i, op)
954     {
955       switch (op->opcode)
956 	{
957 	/* These may be in the reference ops, but we cannot do anything
958 	   sensible with them here.  */
959 	case ADDR_EXPR:
960 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
961 	  if (base != NULL_TREE
962 	      && TREE_CODE (base) == MEM_REF
963 	      && op->op0
964 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
965 	    {
966 	      vn_reference_op_t pop = &ops[i-1];
967 	      base = TREE_OPERAND (op->op0, 0);
968 	      if (known_eq (pop->off, -1))
969 		{
970 		  max_size = -1;
971 		  offset = 0;
972 		}
973 	      else
974 		offset += pop->off * BITS_PER_UNIT;
975 	      op0_p = NULL;
976 	      break;
977 	    }
978 	  /* Fallthru.  */
979 	case CALL_EXPR:
980 	  return false;
981 
982 	/* Record the base objects.  */
983 	case MEM_REF:
984 	  base_alias_set = get_deref_alias_set (op->op0);
985 	  *op0_p = build2 (MEM_REF, op->type,
986 			   NULL_TREE, op->op0);
987 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 	  op0_p = &TREE_OPERAND (*op0_p, 0);
990 	  break;
991 
992 	case VAR_DECL:
993 	case PARM_DECL:
994 	case RESULT_DECL:
995 	case SSA_NAME:
996 	  *op0_p = op->op0;
997 	  op0_p = NULL;
998 	  break;
999 
1000 	/* And now the usual component-reference style ops.  */
1001 	case BIT_FIELD_REF:
1002 	  offset += wi::to_offset (op->op1);
1003 	  break;
1004 
1005 	case COMPONENT_REF:
1006 	  {
1007 	    tree field = op->op0;
1008 	    /* We do not have a complete COMPONENT_REF tree here so we
1009 	       cannot use component_ref_field_offset.  Do the interesting
1010 	       parts manually.  */
1011 	    tree this_offset = DECL_FIELD_OFFSET (field);
1012 
1013 	    if (op->op1 || !poly_int_tree_p (this_offset))
1014 	      max_size = -1;
1015 	    else
1016 	      {
1017 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 					   << LOG2_BITS_PER_UNIT);
1019 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 		offset += woffset;
1021 	      }
1022 	    break;
1023 	  }
1024 
1025 	case ARRAY_RANGE_REF:
1026 	case ARRAY_REF:
1027 	  /* We recorded the lower bound and the element size.  */
1028 	  if (!poly_int_tree_p (op->op0)
1029 	      || !poly_int_tree_p (op->op1)
1030 	      || TREE_CODE (op->op2) != INTEGER_CST)
1031 	    max_size = -1;
1032 	  else
1033 	    {
1034 	      poly_offset_int woffset
1035 		= wi::sext (wi::to_poly_offset (op->op0)
1036 			    - wi::to_poly_offset (op->op1),
1037 			    TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 	      woffset <<= LOG2_BITS_PER_UNIT;
1040 	      offset += woffset;
1041 	    }
1042 	  break;
1043 
1044 	case REALPART_EXPR:
1045 	  break;
1046 
1047 	case IMAGPART_EXPR:
1048 	  offset += size;
1049 	  break;
1050 
1051 	case VIEW_CONVERT_EXPR:
1052 	  break;
1053 
1054 	case STRING_CST:
1055 	case INTEGER_CST:
1056 	case COMPLEX_CST:
1057 	case VECTOR_CST:
1058 	case REAL_CST:
1059 	case CONSTRUCTOR:
1060 	case CONST_DECL:
1061 	  return false;
1062 
1063 	default:
1064 	  return false;
1065 	}
1066     }
1067 
1068   if (base == NULL_TREE)
1069     return false;
1070 
1071   ref->ref = NULL_TREE;
1072   ref->base = base;
1073   ref->ref_alias_set = set;
1074   if (base_alias_set != -1)
1075     ref->base_alias_set = base_alias_set;
1076   else
1077     ref->base_alias_set = get_alias_set (base);
1078   /* We discount volatiles from value-numbering elsewhere.  */
1079   ref->volatile_p = false;
1080 
1081   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1082     {
1083       ref->offset = 0;
1084       ref->size = -1;
1085       ref->max_size = -1;
1086       return true;
1087     }
1088 
1089   if (!offset.to_shwi (&ref->offset))
1090     {
1091       ref->offset = 0;
1092       ref->max_size = -1;
1093       return true;
1094     }
1095 
1096   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097     ref->max_size = -1;
1098 
1099   return true;
1100 }
1101 
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103    vn_reference_op_s's.  */
1104 
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 			      vec<vn_reference_op_s> *result)
1108 {
1109   vn_reference_op_s temp;
1110   unsigned i;
1111   tree lhs = gimple_call_lhs (call);
1112   int lr;
1113 
1114   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115      different.  By adding the lhs here in the vector, we ensure that the
1116      hashcode is different, guaranteeing a different value number.  */
1117   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1118     {
1119       memset (&temp, 0, sizeof (temp));
1120       temp.opcode = MODIFY_EXPR;
1121       temp.type = TREE_TYPE (lhs);
1122       temp.op0 = lhs;
1123       temp.off = -1;
1124       result->safe_push (temp);
1125     }
1126 
1127   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1128   memset (&temp, 0, sizeof (temp));
1129   temp.type = gimple_call_return_type (call);
1130   temp.opcode = CALL_EXPR;
1131   temp.op0 = gimple_call_fn (call);
1132   temp.op1 = gimple_call_chain (call);
1133   if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134     temp.op2 = size_int (lr);
1135   temp.off = -1;
1136   if (gimple_call_with_bounds_p (call))
1137     temp.with_bounds = 1;
1138   result->safe_push (temp);
1139 
1140   /* Copy the call arguments.  As they can be references as well,
1141      just chain them together.  */
1142   for (i = 0; i < gimple_call_num_args (call); ++i)
1143     {
1144       tree callarg = gimple_call_arg (call, i);
1145       copy_reference_ops_from_ref (callarg, result);
1146     }
1147 }
1148 
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1150    *I_P to point to the last element of the replacement.  */
1151 static bool
1152 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1153 			    unsigned int *i_p)
1154 {
1155   unsigned int i = *i_p;
1156   vn_reference_op_t op = &(*ops)[i];
1157   vn_reference_op_t mem_op = &(*ops)[i - 1];
1158   tree addr_base;
1159   poly_int64 addr_offset = 0;
1160 
1161   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162      from .foo.bar to the preceding MEM_REF offset and replace the
1163      address with &OBJ.  */
1164   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1165 					     &addr_offset);
1166   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1167   if (addr_base != TREE_OPERAND (op->op0, 0))
1168     {
1169       poly_offset_int off
1170 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1171 				  SIGNED)
1172 	   + addr_offset);
1173       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1174       op->op0 = build_fold_addr_expr (addr_base);
1175       if (tree_fits_shwi_p (mem_op->op0))
1176 	mem_op->off = tree_to_shwi (mem_op->op0);
1177       else
1178 	mem_op->off = -1;
1179       return true;
1180     }
1181   return false;
1182 }
1183 
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1185    *I_P to point to the last element of the replacement.  */
1186 static bool
1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 				     unsigned int *i_p)
1189 {
1190   unsigned int i = *i_p;
1191   vn_reference_op_t op = &(*ops)[i];
1192   vn_reference_op_t mem_op = &(*ops)[i - 1];
1193   gimple *def_stmt;
1194   enum tree_code code;
1195   poly_offset_int off;
1196 
1197   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198   if (!is_gimple_assign (def_stmt))
1199     return false;
1200 
1201   code = gimple_assign_rhs_code (def_stmt);
1202   if (code != ADDR_EXPR
1203       && code != POINTER_PLUS_EXPR)
1204     return false;
1205 
1206   off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1207 
1208   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209      from .foo.bar to the preceding MEM_REF offset and replace the
1210      address with &OBJ.  */
1211   if (code == ADDR_EXPR)
1212     {
1213       tree addr, addr_base;
1214       poly_int64 addr_offset;
1215 
1216       addr = gimple_assign_rhs1 (def_stmt);
1217       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 						 &addr_offset);
1219       /* If that didn't work because the address isn't invariant propagate
1220          the reference tree from the address operation in case the current
1221 	 dereference isn't offsetted.  */
1222       if (!addr_base
1223 	  && *i_p == ops->length () - 1
1224 	  && known_eq (off, 0)
1225 	  /* This makes us disable this transform for PRE where the
1226 	     reference ops might be also used for code insertion which
1227 	     is invalid.  */
1228 	  && default_vn_walk_kind == VN_WALKREWRITE)
1229 	{
1230 	  auto_vec<vn_reference_op_s, 32> tem;
1231 	  copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1232 	  /* Make sure to preserve TBAA info.  The only objects not
1233 	     wrapped in MEM_REFs that can have their address taken are
1234 	     STRING_CSTs.  */
1235 	  if (tem.length () >= 2
1236 	      && tem[tem.length () - 2].opcode == MEM_REF)
1237 	    {
1238 	      vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1239 	      new_mem_op->op0
1240 		= wide_int_to_tree (TREE_TYPE (mem_op->op0),
1241 				    wi::to_poly_wide (new_mem_op->op0));
1242 	    }
1243 	  else
1244 	    gcc_assert (tem.last ().opcode == STRING_CST);
1245 	  ops->pop ();
1246 	  ops->pop ();
1247 	  ops->safe_splice (tem);
1248 	  --*i_p;
1249 	  return true;
1250 	}
1251       if (!addr_base
1252 	  || TREE_CODE (addr_base) != MEM_REF
1253 	  || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1254 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1255 	return false;
1256 
1257       off += addr_offset;
1258       off += mem_ref_offset (addr_base);
1259       op->op0 = TREE_OPERAND (addr_base, 0);
1260     }
1261   else
1262     {
1263       tree ptr, ptroff;
1264       ptr = gimple_assign_rhs1 (def_stmt);
1265       ptroff = gimple_assign_rhs2 (def_stmt);
1266       if (TREE_CODE (ptr) != SSA_NAME
1267 	  || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1268 	  || TREE_CODE (ptroff) != INTEGER_CST)
1269 	return false;
1270 
1271       off += wi::to_offset (ptroff);
1272       op->op0 = ptr;
1273     }
1274 
1275   mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1276   if (tree_fits_shwi_p (mem_op->op0))
1277     mem_op->off = tree_to_shwi (mem_op->op0);
1278   else
1279     mem_op->off = -1;
1280   if (TREE_CODE (op->op0) == SSA_NAME)
1281     op->op0 = SSA_VAL (op->op0);
1282   if (TREE_CODE (op->op0) != SSA_NAME)
1283     op->opcode = TREE_CODE (op->op0);
1284 
1285   /* And recurse.  */
1286   if (TREE_CODE (op->op0) == SSA_NAME)
1287     vn_reference_maybe_forwprop_address (ops, i_p);
1288   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1289     vn_reference_fold_indirect (ops, i_p);
1290   return true;
1291 }
1292 
1293 /* Optimize the reference REF to a constant if possible or return
1294    NULL_TREE if not.  */
1295 
1296 tree
1297 fully_constant_vn_reference_p (vn_reference_t ref)
1298 {
1299   vec<vn_reference_op_s> operands = ref->operands;
1300   vn_reference_op_t op;
1301 
1302   /* Try to simplify the translated expression if it is
1303      a call to a builtin function with at most two arguments.  */
1304   op = &operands[0];
1305   if (op->opcode == CALL_EXPR
1306       && TREE_CODE (op->op0) == ADDR_EXPR
1307       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1308       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1309       && operands.length () >= 2
1310       && operands.length () <= 3)
1311     {
1312       vn_reference_op_t arg0, arg1 = NULL;
1313       bool anyconst = false;
1314       arg0 = &operands[1];
1315       if (operands.length () > 2)
1316 	arg1 = &operands[2];
1317       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1318 	  || (arg0->opcode == ADDR_EXPR
1319 	      && is_gimple_min_invariant (arg0->op0)))
1320 	anyconst = true;
1321       if (arg1
1322 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1323 	      || (arg1->opcode == ADDR_EXPR
1324 		  && is_gimple_min_invariant (arg1->op0))))
1325 	anyconst = true;
1326       if (anyconst)
1327 	{
1328 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1329 					 arg1 ? 2 : 1,
1330 					 arg0->op0,
1331 					 arg1 ? arg1->op0 : NULL);
1332 	  if (folded
1333 	      && TREE_CODE (folded) == NOP_EXPR)
1334 	    folded = TREE_OPERAND (folded, 0);
1335 	  if (folded
1336 	      && is_gimple_min_invariant (folded))
1337 	    return folded;
1338 	}
1339     }
1340 
1341   /* Simplify reads from constants or constant initializers.  */
1342   else if (BITS_PER_UNIT == 8
1343 	   && is_gimple_reg_type (ref->type)
1344 	   && (!INTEGRAL_TYPE_P (ref->type)
1345 	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1346     {
1347       poly_int64 off = 0;
1348       HOST_WIDE_INT size;
1349       if (INTEGRAL_TYPE_P (ref->type))
1350 	size = TYPE_PRECISION (ref->type);
1351       else
1352 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1353       if (size % BITS_PER_UNIT != 0
1354 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1355 	return NULL_TREE;
1356       size /= BITS_PER_UNIT;
1357       unsigned i;
1358       for (i = 0; i < operands.length (); ++i)
1359 	{
1360 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1361 	    {
1362 	      ++i;
1363 	      break;
1364 	    }
1365 	  if (known_eq (operands[i].off, -1))
1366 	    return NULL_TREE;
1367 	  off += operands[i].off;
1368 	  if (operands[i].opcode == MEM_REF)
1369 	    {
1370 	      ++i;
1371 	      break;
1372 	    }
1373 	}
1374       vn_reference_op_t base = &operands[--i];
1375       tree ctor = error_mark_node;
1376       tree decl = NULL_TREE;
1377       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1378 	ctor = base->op0;
1379       else if (base->opcode == MEM_REF
1380 	       && base[1].opcode == ADDR_EXPR
1381 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1382 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1383 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1384 	{
1385 	  decl = TREE_OPERAND (base[1].op0, 0);
1386 	  if (TREE_CODE (decl) == STRING_CST)
1387 	    ctor = decl;
1388 	  else
1389 	    ctor = ctor_for_folding (decl);
1390 	}
1391       if (ctor == NULL_TREE)
1392 	return build_zero_cst (ref->type);
1393       else if (ctor != error_mark_node)
1394 	{
1395 	  HOST_WIDE_INT const_off;
1396 	  if (decl)
1397 	    {
1398 	      tree res = fold_ctor_reference (ref->type, ctor,
1399 					      off * BITS_PER_UNIT,
1400 					      size * BITS_PER_UNIT, decl);
1401 	      if (res)
1402 		{
1403 		  STRIP_USELESS_TYPE_CONVERSION (res);
1404 		  if (is_gimple_min_invariant (res))
1405 		    return res;
1406 		}
1407 	    }
1408 	  else if (off.is_constant (&const_off))
1409 	    {
1410 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 	      int len = native_encode_expr (ctor, buf, size, const_off);
1412 	      if (len > 0)
1413 		return native_interpret_expr (ref->type, buf, len);
1414 	    }
1415 	}
1416     }
1417 
1418   return NULL_TREE;
1419 }
1420 
1421 /* Return true if OPS contain a storage order barrier.  */
1422 
1423 static bool
1424 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1425 {
1426   vn_reference_op_t op;
1427   unsigned i;
1428 
1429   FOR_EACH_VEC_ELT (ops, i, op)
1430     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1431       return true;
1432 
1433   return false;
1434 }
1435 
1436 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1437    structures into their value numbers.  This is done in-place, and
1438    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1439    whether any operands were valueized.  */
1440 
1441 static vec<vn_reference_op_s>
1442 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1443 {
1444   vn_reference_op_t vro;
1445   unsigned int i;
1446 
1447   *valueized_anything = false;
1448 
1449   FOR_EACH_VEC_ELT (orig, i, vro)
1450     {
1451       if (vro->opcode == SSA_NAME
1452 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1453 	{
1454 	  tree tem = SSA_VAL (vro->op0);
1455 	  if (tem != vro->op0)
1456 	    {
1457 	      *valueized_anything = true;
1458 	      vro->op0 = tem;
1459 	    }
1460 	  /* If it transforms from an SSA_NAME to a constant, update
1461 	     the opcode.  */
1462 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1463 	    vro->opcode = TREE_CODE (vro->op0);
1464 	}
1465       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1466 	{
1467 	  tree tem = SSA_VAL (vro->op1);
1468 	  if (tem != vro->op1)
1469 	    {
1470 	      *valueized_anything = true;
1471 	      vro->op1 = tem;
1472 	    }
1473 	}
1474       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1475 	{
1476 	  tree tem = SSA_VAL (vro->op2);
1477 	  if (tem != vro->op2)
1478 	    {
1479 	      *valueized_anything = true;
1480 	      vro->op2 = tem;
1481 	    }
1482 	}
1483       /* If it transforms from an SSA_NAME to an address, fold with
1484 	 a preceding indirect reference.  */
1485       if (i > 0
1486 	  && vro->op0
1487 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1488 	  && orig[i - 1].opcode == MEM_REF)
1489 	{
1490 	  if (vn_reference_fold_indirect (&orig, &i))
1491 	    *valueized_anything = true;
1492 	}
1493       else if (i > 0
1494 	       && vro->opcode == SSA_NAME
1495 	       && orig[i - 1].opcode == MEM_REF)
1496 	{
1497 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1498 	    *valueized_anything = true;
1499 	}
1500       /* If it transforms a non-constant ARRAY_REF into a constant
1501 	 one, adjust the constant offset.  */
1502       else if (vro->opcode == ARRAY_REF
1503 	       && known_eq (vro->off, -1)
1504 	       && poly_int_tree_p (vro->op0)
1505 	       && poly_int_tree_p (vro->op1)
1506 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1507 	{
1508 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1509 				  - wi::to_poly_offset (vro->op1))
1510 				 * wi::to_offset (vro->op2)
1511 				 * vn_ref_op_align_unit (vro));
1512 	  off.to_shwi (&vro->off);
1513 	}
1514     }
1515 
1516   return orig;
1517 }
1518 
1519 static vec<vn_reference_op_s>
1520 valueize_refs (vec<vn_reference_op_s> orig)
1521 {
1522   bool tem;
1523   return valueize_refs_1 (orig, &tem);
1524 }
1525 
1526 static vec<vn_reference_op_s> shared_lookup_references;
1527 
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1530    this function.  *VALUEIZED_ANYTHING will specify whether any
1531    operands were valueized.  */
1532 
1533 static vec<vn_reference_op_s>
1534 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1535 {
1536   if (!ref)
1537     return vNULL;
1538   shared_lookup_references.truncate (0);
1539   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1540   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1541 					      valueized_anything);
1542   return shared_lookup_references;
1543 }
1544 
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546    call statement.  The vector is shared among all callers of
1547    this function.  */
1548 
1549 static vec<vn_reference_op_s>
1550 valueize_shared_reference_ops_from_call (gcall *call)
1551 {
1552   if (!call)
1553     return vNULL;
1554   shared_lookup_references.truncate (0);
1555   copy_reference_ops_from_call (call, &shared_lookup_references);
1556   shared_lookup_references = valueize_refs (shared_lookup_references);
1557   return shared_lookup_references;
1558 }
1559 
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561    Returns the resulting value number if it exists in the hash table,
1562    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1563    vn_reference_t stored in the hashtable if something is found.  */
1564 
1565 static tree
1566 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1567 {
1568   vn_reference_s **slot;
1569   hashval_t hash;
1570 
1571   hash = vr->hashcode;
1572   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573   if (!slot && current_info == optimistic_info)
1574     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575   if (slot)
1576     {
1577       if (vnresult)
1578 	*vnresult = (vn_reference_t)*slot;
1579       return ((vn_reference_t)*slot)->result;
1580     }
1581 
1582   return NULL_TREE;
1583 }
1584 
1585 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1586    with the current VUSE and performs the expression lookup.  */
1587 
1588 static void *
1589 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1590 		       unsigned int cnt, void *vr_)
1591 {
1592   vn_reference_t vr = (vn_reference_t)vr_;
1593   vn_reference_s **slot;
1594   hashval_t hash;
1595 
1596   /* This bounds the stmt walks we perform on reference lookups
1597      to O(1) instead of O(N) where N is the number of dominating
1598      stores.  */
1599   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1600     return (void *)-1;
1601 
1602   if (last_vuse_ptr)
1603     *last_vuse_ptr = vuse;
1604 
1605   /* Fixup vuse and hash.  */
1606   if (vr->vuse)
1607     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1608   vr->vuse = vuse_ssa_val (vuse);
1609   if (vr->vuse)
1610     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1611 
1612   hash = vr->hashcode;
1613   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614   if (!slot && current_info == optimistic_info)
1615     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616   if (slot)
1617     return *slot;
1618 
1619   return NULL;
1620 }
1621 
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623    value table for the VUSE, SET, TYPE, OPERANDS reference which
1624    has the value VALUE which is either a constant or an SSA name.  */
1625 
1626 static vn_reference_t
1627 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1628 					  alias_set_type set,
1629 					  tree type,
1630 					  vec<vn_reference_op_s,
1631 					        va_heap> operands,
1632 					  tree value)
1633 {
1634   vn_reference_s vr1;
1635   vn_reference_t result;
1636   unsigned value_id;
1637   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1638   vr1.operands = operands;
1639   vr1.type = type;
1640   vr1.set = set;
1641   vr1.hashcode = vn_reference_compute_hash (&vr1);
1642   if (vn_reference_lookup_1 (&vr1, &result))
1643     return result;
1644   if (TREE_CODE (value) == SSA_NAME)
1645     value_id = VN_INFO (value)->value_id;
1646   else
1647     value_id = get_or_alloc_constant_value_id (value);
1648   return vn_reference_insert_pieces (vuse, set, type,
1649 				     operands.copy (), value, value_id);
1650 }
1651 
1652 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1653 static unsigned mprts_hook_cnt;
1654 
1655 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
1656 
1657 static tree
1658 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1659 {
1660   if (!rcode.is_tree_code ())
1661     return NULL_TREE;
1662   tree *ops = ops_;
1663   unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1664   if (rcode == CONSTRUCTOR
1665       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1666          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
1667       && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1668     {
1669       length = CONSTRUCTOR_NELTS (ops_[0]);
1670       ops = XALLOCAVEC (tree, length);
1671       for (unsigned i = 0; i < length; ++i)
1672 	ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1673     }
1674   vn_nary_op_t vnresult = NULL;
1675   tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1676 				       type, ops, &vnresult);
1677   /* We can end up endlessly recursing simplifications if the lookup above
1678      presents us with a def-use chain that mirrors the original simplification.
1679      See PR80887 for an example.  Limit successful lookup artificially
1680      to 10 times if we are called as mprts_hook.  */
1681   if (res
1682       && mprts_hook
1683       && --mprts_hook_cnt == 0)
1684     {
1685       if (dump_file && (dump_flags & TDF_DETAILS))
1686 	fprintf (dump_file, "Resetting mprts_hook after too many "
1687 		 "invocations.\n");
1688       mprts_hook = NULL;
1689     }
1690   return res;
1691 }
1692 
1693 /* Return a value-number for RCODE OPS... either by looking up an existing
1694    value-number for the simplified result or by inserting the operation if
1695    INSERT is true.  */
1696 
1697 static tree
1698 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1699 			   bool insert)
1700 {
1701   tree result = NULL_TREE;
1702   /* We will be creating a value number for
1703        RCODE (OPS...).
1704      So first simplify and lookup this expression to see if it
1705      is already available.  */
1706   mprts_hook = vn_lookup_simplify_result;
1707   mprts_hook_cnt = 9;
1708   bool res = false;
1709   switch (TREE_CODE_LENGTH ((tree_code) rcode))
1710     {
1711     case 1:
1712       res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1713       break;
1714     case 2:
1715       res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1716       break;
1717     case 3:
1718       res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1719       break;
1720     }
1721   mprts_hook = NULL;
1722   gimple *new_stmt = NULL;
1723   if (res
1724       && gimple_simplified_result_is_gimple_val (rcode, ops))
1725     /* The expression is already available.  */
1726     result = ops[0];
1727   else
1728     {
1729       tree val = vn_lookup_simplify_result (rcode, type, ops);
1730       if (!val && insert)
1731 	{
1732 	  gimple_seq stmts = NULL;
1733 	  result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1734 	  if (result)
1735 	    {
1736 	      gcc_assert (gimple_seq_singleton_p (stmts));
1737 	      new_stmt = gimple_seq_first_stmt (stmts);
1738 	    }
1739 	}
1740       else
1741 	/* The expression is already available.  */
1742 	result = val;
1743     }
1744   if (new_stmt)
1745     {
1746       /* The expression is not yet available, value-number lhs to
1747 	 the new SSA_NAME we created.  */
1748       /* Initialize value-number information properly.  */
1749       VN_INFO_GET (result)->valnum = result;
1750       VN_INFO (result)->value_id = get_next_value_id ();
1751       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1752 					  new_stmt);
1753       VN_INFO (result)->needs_insertion = true;
1754       /* ???  PRE phi-translation inserts NARYs without corresponding
1755          SSA name result.  Re-use those but set their result according
1756 	 to the stmt we just built.  */
1757       vn_nary_op_t nary = NULL;
1758       vn_nary_op_lookup_stmt (new_stmt, &nary);
1759       if (nary)
1760 	{
1761 	  gcc_assert (nary->result == NULL_TREE);
1762 	  nary->result = gimple_assign_lhs (new_stmt);
1763 	}
1764       /* As all "inserted" statements are singleton SCCs, insert
1765 	 to the valid table.  This is strictly needed to
1766 	 avoid re-generating new value SSA_NAMEs for the same
1767 	 expression during SCC iteration over and over (the
1768 	 optimistic table gets cleared after each iteration).
1769 	 We do not need to insert into the optimistic table, as
1770 	 lookups there will fall back to the valid table.  */
1771       else if (current_info == optimistic_info)
1772 	{
1773 	  current_info = valid_info;
1774 	  vn_nary_op_insert_stmt (new_stmt, result);
1775 	  current_info = optimistic_info;
1776 	}
1777       else
1778 	vn_nary_op_insert_stmt (new_stmt, result);
1779       if (dump_file && (dump_flags & TDF_DETAILS))
1780 	{
1781 	  fprintf (dump_file, "Inserting name ");
1782 	  print_generic_expr (dump_file, result);
1783 	  fprintf (dump_file, " for expression ");
1784 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1785 	  fprintf (dump_file, "\n");
1786 	}
1787     }
1788   return result;
1789 }
1790 
1791 /* Return a value-number for RCODE OPS... either by looking up an existing
1792    value-number for the simplified result or by inserting the operation.  */
1793 
1794 static tree
1795 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1796 {
1797   return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1798 }
1799 
1800 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1801    its value if present.  */
1802 
1803 tree
1804 vn_nary_simplify (vn_nary_op_t nary)
1805 {
1806   if (nary->length > 3)
1807     return NULL_TREE;
1808   tree ops[3];
1809   memcpy (ops, nary->op, sizeof (tree) * nary->length);
1810   return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1811 }
1812 
1813 
1814 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1815    from the statement defining VUSE and if not successful tries to
1816    translate *REFP and VR_ through an aggregate copy at the definition
1817    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
1818    of *REF and *VR.  If only disambiguation was performed then
1819    *DISAMBIGUATE_ONLY is set to true.  */
1820 
1821 static void *
1822 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1823 		       bool *disambiguate_only)
1824 {
1825   vn_reference_t vr = (vn_reference_t)vr_;
1826   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1827   tree base = ao_ref_base (ref);
1828   HOST_WIDE_INT offseti, maxsizei;
1829   static vec<vn_reference_op_s> lhs_ops;
1830   ao_ref lhs_ref;
1831   bool lhs_ref_ok = false;
1832   poly_int64 copy_size;
1833 
1834   /* If the reference is based on a parameter that was determined as
1835      pointing to readonly memory it doesn't change.  */
1836   if (TREE_CODE (base) == MEM_REF
1837       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1838       && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1839       && bitmap_bit_p (const_parms,
1840 		       SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1841     {
1842       *disambiguate_only = true;
1843       return NULL;
1844     }
1845 
1846   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1847   if (is_gimple_assign (def_stmt))
1848     {
1849       tree lhs = gimple_assign_lhs (def_stmt);
1850       bool valueized_anything = false;
1851       /* Avoid re-allocation overhead.  */
1852       lhs_ops.truncate (0);
1853       copy_reference_ops_from_ref (lhs, &lhs_ops);
1854       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1855       if (valueized_anything)
1856 	{
1857 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1858 						      get_alias_set (lhs),
1859 						      TREE_TYPE (lhs), lhs_ops);
1860 	  if (lhs_ref_ok
1861 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1862 	    {
1863 	      *disambiguate_only = true;
1864 	      return NULL;
1865 	    }
1866 	}
1867       else
1868 	{
1869 	  ao_ref_init (&lhs_ref, lhs);
1870 	  lhs_ref_ok = true;
1871 	}
1872 
1873       /* If we reach a clobbering statement try to skip it and see if
1874          we find a VN result with exactly the same value as the
1875 	 possible clobber.  In this case we can ignore the clobber
1876 	 and return the found value.
1877 	 Note that we don't need to worry about partial overlapping
1878 	 accesses as we then can use TBAA to disambiguate against the
1879 	 clobbering statement when looking up a load (thus the
1880 	 VN_WALKREWRITE guard).  */
1881       if (vn_walk_kind == VN_WALKREWRITE
1882 	  && is_gimple_reg_type (TREE_TYPE (lhs))
1883 	  && types_compatible_p (TREE_TYPE (lhs), vr->type))
1884 	{
1885 	  tree *saved_last_vuse_ptr = last_vuse_ptr;
1886 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
1887 	  last_vuse_ptr = NULL;
1888 	  tree saved_vuse = vr->vuse;
1889 	  hashval_t saved_hashcode = vr->hashcode;
1890 	  void *res = vn_reference_lookup_2 (ref,
1891 					     gimple_vuse (def_stmt), 0, vr);
1892 	  /* Need to restore vr->vuse and vr->hashcode.  */
1893 	  vr->vuse = saved_vuse;
1894 	  vr->hashcode = saved_hashcode;
1895 	  last_vuse_ptr = saved_last_vuse_ptr;
1896 	  if (res && res != (void *)-1)
1897 	    {
1898 	      vn_reference_t vnresult = (vn_reference_t) res;
1899 	      if (vnresult->result
1900 		  && operand_equal_p (vnresult->result,
1901 				      gimple_assign_rhs1 (def_stmt), 0))
1902 		return res;
1903 	    }
1904 	}
1905     }
1906   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1907 	   && gimple_call_num_args (def_stmt) <= 4)
1908     {
1909       /* For builtin calls valueize its arguments and call the
1910          alias oracle again.  Valueization may improve points-to
1911 	 info of pointers and constify size and position arguments.
1912 	 Originally this was motivated by PR61034 which has
1913 	 conditional calls to free falsely clobbering ref because
1914 	 of imprecise points-to info of the argument.  */
1915       tree oldargs[4];
1916       bool valueized_anything = false;
1917       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1918 	{
1919 	  oldargs[i] = gimple_call_arg (def_stmt, i);
1920 	  tree val = vn_valueize (oldargs[i]);
1921 	  if (val != oldargs[i])
1922 	    {
1923 	      gimple_call_set_arg (def_stmt, i, val);
1924 	      valueized_anything = true;
1925 	    }
1926 	}
1927       if (valueized_anything)
1928 	{
1929 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1930 					       ref);
1931 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1932 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1933 	  if (!res)
1934 	    {
1935 	      *disambiguate_only = true;
1936 	      return NULL;
1937 	    }
1938 	}
1939     }
1940 
1941   if (*disambiguate_only)
1942     return (void *)-1;
1943 
1944   /* If we cannot constrain the size of the reference we cannot
1945      test if anything kills it.  */
1946   if (!ref->max_size_known_p ())
1947     return (void *)-1;
1948 
1949   poly_int64 offset = ref->offset;
1950   poly_int64 maxsize = ref->max_size;
1951 
1952   /* We can't deduce anything useful from clobbers.  */
1953   if (gimple_clobber_p (def_stmt))
1954     return (void *)-1;
1955 
1956   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1957      from that definition.
1958      1) Memset.  */
1959   if (is_gimple_reg_type (vr->type)
1960       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1961       && integer_zerop (gimple_call_arg (def_stmt, 1))
1962       && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1963       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1964     {
1965       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1966       tree base2;
1967       poly_int64 offset2, size2, maxsize2;
1968       bool reverse;
1969       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1970 				       &reverse);
1971       tree len = gimple_call_arg (def_stmt, 2);
1972       if (known_size_p (maxsize2)
1973 	  && operand_equal_p (base, base2, 0)
1974 	  && known_subrange_p (offset, maxsize, offset2,
1975 			       wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1976 	{
1977 	  tree val = build_zero_cst (vr->type);
1978 	  return vn_reference_lookup_or_insert_for_pieces
1979 	           (vuse, vr->set, vr->type, vr->operands, val);
1980 	}
1981     }
1982 
1983   /* 2) Assignment from an empty CONSTRUCTOR.  */
1984   else if (is_gimple_reg_type (vr->type)
1985 	   && gimple_assign_single_p (def_stmt)
1986 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1987 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1988     {
1989       tree base2;
1990       poly_int64 offset2, size2, maxsize2;
1991       bool reverse;
1992       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1993 				       &offset2, &size2, &maxsize2, &reverse);
1994       if (known_size_p (maxsize2)
1995 	  && operand_equal_p (base, base2, 0)
1996 	  && known_subrange_p (offset, maxsize, offset2, size2))
1997 	{
1998 	  tree val = build_zero_cst (vr->type);
1999 	  return vn_reference_lookup_or_insert_for_pieces
2000 	           (vuse, vr->set, vr->type, vr->operands, val);
2001 	}
2002     }
2003 
2004   /* 3) Assignment from a constant.  We can use folds native encode/interpret
2005      routines to extract the assigned bits.  */
2006   else if (known_eq (ref->size, maxsize)
2007 	   && is_gimple_reg_type (vr->type)
2008 	   && !contains_storage_order_barrier_p (vr->operands)
2009 	   && gimple_assign_single_p (def_stmt)
2010 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2011 	   /* native_encode and native_decode operate on arrays of bytes
2012 	      and so fundamentally need a compile-time size and offset.  */
2013 	   && maxsize.is_constant (&maxsizei)
2014 	   && maxsizei % BITS_PER_UNIT == 0
2015 	   && offset.is_constant (&offseti)
2016 	   && offseti % BITS_PER_UNIT == 0
2017 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2018 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2019 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2020     {
2021       tree base2;
2022       HOST_WIDE_INT offset2, size2;
2023       bool reverse;
2024       base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2025 					   &offset2, &size2, &reverse);
2026       if (base2
2027 	  && !reverse
2028 	  && size2 % BITS_PER_UNIT == 0
2029 	  && offset2 % BITS_PER_UNIT == 0
2030 	  && operand_equal_p (base, base2, 0)
2031 	  && known_subrange_p (offseti, maxsizei, offset2, size2))
2032 	{
2033 	  /* We support up to 512-bit values (for V8DFmode).  */
2034 	  unsigned char buffer[64];
2035 	  int len;
2036 
2037 	  tree rhs = gimple_assign_rhs1 (def_stmt);
2038 	  if (TREE_CODE (rhs) == SSA_NAME)
2039 	    rhs = SSA_VAL (rhs);
2040 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2041 				    buffer, sizeof (buffer),
2042 				    (offseti - offset2) / BITS_PER_UNIT);
2043 	  if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2044 	    {
2045 	      tree type = vr->type;
2046 	      /* Make sure to interpret in a type that has a range
2047 	         covering the whole access size.  */
2048 	      if (INTEGRAL_TYPE_P (vr->type)
2049 		  && maxsizei != TYPE_PRECISION (vr->type))
2050 		type = build_nonstandard_integer_type (maxsizei,
2051 						       TYPE_UNSIGNED (type));
2052 	      tree val = native_interpret_expr (type, buffer,
2053 						maxsizei / BITS_PER_UNIT);
2054 	      /* If we chop off bits because the types precision doesn't
2055 		 match the memory access size this is ok when optimizing
2056 		 reads but not when called from the DSE code during
2057 		 elimination.  */
2058 	      if (val
2059 		  && type != vr->type)
2060 		{
2061 		  if (! int_fits_type_p (val, vr->type))
2062 		    val = NULL_TREE;
2063 		  else
2064 		    val = fold_convert (vr->type, val);
2065 		}
2066 
2067 	      if (val)
2068 		return vn_reference_lookup_or_insert_for_pieces
2069 			 (vuse, vr->set, vr->type, vr->operands, val);
2070 	    }
2071 	}
2072     }
2073 
2074   /* 4) Assignment from an SSA name which definition we may be able
2075      to access pieces from.  */
2076   else if (known_eq (ref->size, maxsize)
2077 	   && is_gimple_reg_type (vr->type)
2078 	   && !contains_storage_order_barrier_p (vr->operands)
2079 	   && gimple_assign_single_p (def_stmt)
2080 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2081     {
2082       tree base2;
2083       poly_int64 offset2, size2, maxsize2;
2084       bool reverse;
2085       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2086 				       &offset2, &size2, &maxsize2,
2087 				       &reverse);
2088       if (!reverse
2089 	  && known_size_p (maxsize2)
2090 	  && known_eq (maxsize2, size2)
2091 	  && operand_equal_p (base, base2, 0)
2092 	  && known_subrange_p (offset, maxsize, offset2, size2)
2093 	  /* ???  We can't handle bitfield precision extracts without
2094 	     either using an alternate type for the BIT_FIELD_REF and
2095 	     then doing a conversion or possibly adjusting the offset
2096 	     according to endianness.  */
2097 	  && (! INTEGRAL_TYPE_P (vr->type)
2098 	      || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2099 	  && multiple_p (ref->size, BITS_PER_UNIT))
2100 	{
2101 	  code_helper rcode = BIT_FIELD_REF;
2102 	  tree ops[3];
2103 	  ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2104 	  ops[1] = bitsize_int (ref->size);
2105 	  ops[2] = bitsize_int (offset - offset2);
2106 	  tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2107 	  if (val
2108 	      && (TREE_CODE (val) != SSA_NAME
2109 		  || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2110 	    {
2111 	      vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2112 		  (vuse, vr->set, vr->type, vr->operands, val);
2113 	      return res;
2114 	    }
2115 	}
2116     }
2117 
2118   /* 5) For aggregate copies translate the reference through them if
2119      the copy kills ref.  */
2120   else if (vn_walk_kind == VN_WALKREWRITE
2121 	   && gimple_assign_single_p (def_stmt)
2122 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
2123 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2124 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2125     {
2126       tree base2;
2127       int i, j, k;
2128       auto_vec<vn_reference_op_s> rhs;
2129       vn_reference_op_t vro;
2130       ao_ref r;
2131 
2132       if (!lhs_ref_ok)
2133 	return (void *)-1;
2134 
2135       /* See if the assignment kills REF.  */
2136       base2 = ao_ref_base (&lhs_ref);
2137       if (!lhs_ref.max_size_known_p ()
2138 	  || (base != base2
2139 	      && (TREE_CODE (base) != MEM_REF
2140 		  || TREE_CODE (base2) != MEM_REF
2141 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2142 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2143 					  TREE_OPERAND (base2, 1))))
2144 	  || !stmt_kills_ref_p (def_stmt, ref))
2145 	return (void *)-1;
2146 
2147       /* Find the common base of ref and the lhs.  lhs_ops already
2148          contains valueized operands for the lhs.  */
2149       i = vr->operands.length () - 1;
2150       j = lhs_ops.length () - 1;
2151       while (j >= 0 && i >= 0
2152 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2153 	{
2154 	  i--;
2155 	  j--;
2156 	}
2157 
2158       /* ???  The innermost op should always be a MEM_REF and we already
2159          checked that the assignment to the lhs kills vr.  Thus for
2160 	 aggregate copies using char[] types the vn_reference_op_eq
2161 	 may fail when comparing types for compatibility.  But we really
2162 	 don't care here - further lookups with the rewritten operands
2163 	 will simply fail if we messed up types too badly.  */
2164       poly_int64 extra_off = 0;
2165       if (j == 0 && i >= 0
2166 	  && lhs_ops[0].opcode == MEM_REF
2167 	  && maybe_ne (lhs_ops[0].off, -1))
2168 	{
2169 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2170 	    i--, j--;
2171 	  else if (vr->operands[i].opcode == MEM_REF
2172 		   && maybe_ne (vr->operands[i].off, -1))
2173 	    {
2174 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
2175 	      i--, j--;
2176 	    }
2177 	}
2178 
2179       /* i now points to the first additional op.
2180 	 ???  LHS may not be completely contained in VR, one or more
2181 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
2182 	 try handling outermost VIEW_CONVERT_EXPRs.  */
2183       if (j != -1)
2184 	return (void *)-1;
2185 
2186       /* Punt if the additional ops contain a storage order barrier.  */
2187       for (k = i; k >= 0; k--)
2188 	{
2189 	  vro = &vr->operands[k];
2190 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2191 	    return (void *)-1;
2192 	}
2193 
2194       /* Now re-write REF to be based on the rhs of the assignment.  */
2195       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2196 
2197       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
2198       if (maybe_ne (extra_off, 0))
2199 	{
2200 	  if (rhs.length () < 2
2201 	      || rhs[0].opcode != MEM_REF
2202 	      || known_eq (rhs[0].off, -1))
2203 	    return (void *)-1;
2204 	  rhs[0].off += extra_off;
2205 	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2206 					build_int_cst (TREE_TYPE (rhs[0].op0),
2207 						       extra_off));
2208 	}
2209 
2210       /* We need to pre-pend vr->operands[0..i] to rhs.  */
2211       vec<vn_reference_op_s> old = vr->operands;
2212       if (i + 1 + rhs.length () > vr->operands.length ())
2213 	vr->operands.safe_grow (i + 1 + rhs.length ());
2214       else
2215 	vr->operands.truncate (i + 1 + rhs.length ());
2216       FOR_EACH_VEC_ELT (rhs, j, vro)
2217 	vr->operands[i + 1 + j] = *vro;
2218       vr->operands = valueize_refs (vr->operands);
2219       if (old == shared_lookup_references)
2220 	shared_lookup_references = vr->operands;
2221       vr->hashcode = vn_reference_compute_hash (vr);
2222 
2223       /* Try folding the new reference to a constant.  */
2224       tree val = fully_constant_vn_reference_p (vr);
2225       if (val)
2226 	return vn_reference_lookup_or_insert_for_pieces
2227 		 (vuse, vr->set, vr->type, vr->operands, val);
2228 
2229       /* Adjust *ref from the new operands.  */
2230       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2231 	return (void *)-1;
2232       /* This can happen with bitfields.  */
2233       if (maybe_ne (ref->size, r.size))
2234 	return (void *)-1;
2235       *ref = r;
2236 
2237       /* Do not update last seen VUSE after translating.  */
2238       last_vuse_ptr = NULL;
2239 
2240       /* Keep looking for the adjusted *REF / VR pair.  */
2241       return NULL;
2242     }
2243 
2244   /* 6) For memcpy copies translate the reference through them if
2245      the copy kills ref.  */
2246   else if (vn_walk_kind == VN_WALKREWRITE
2247 	   && is_gimple_reg_type (vr->type)
2248 	   /* ???  Handle BCOPY as well.  */
2249 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2250 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2251 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2252 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2253 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2254 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2255 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2256 	   && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2257     {
2258       tree lhs, rhs;
2259       ao_ref r;
2260       poly_int64 rhs_offset, lhs_offset;
2261       vn_reference_op_s op;
2262       poly_uint64 mem_offset;
2263       poly_int64 at, byte_maxsize;
2264 
2265       /* Only handle non-variable, addressable refs.  */
2266       if (maybe_ne (ref->size, maxsize)
2267 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
2268 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2269 	return (void *)-1;
2270 
2271       /* Extract a pointer base and an offset for the destination.  */
2272       lhs = gimple_call_arg (def_stmt, 0);
2273       lhs_offset = 0;
2274       if (TREE_CODE (lhs) == SSA_NAME)
2275 	{
2276 	  lhs = SSA_VAL (lhs);
2277 	  if (TREE_CODE (lhs) == SSA_NAME)
2278 	    {
2279 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2280 	      if (gimple_assign_single_p (def_stmt)
2281 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2282 		lhs = gimple_assign_rhs1 (def_stmt);
2283 	    }
2284 	}
2285       if (TREE_CODE (lhs) == ADDR_EXPR)
2286 	{
2287 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2288 						    &lhs_offset);
2289 	  if (!tem)
2290 	    return (void *)-1;
2291 	  if (TREE_CODE (tem) == MEM_REF
2292 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2293 	    {
2294 	      lhs = TREE_OPERAND (tem, 0);
2295 	      if (TREE_CODE (lhs) == SSA_NAME)
2296 		lhs = SSA_VAL (lhs);
2297 	      lhs_offset += mem_offset;
2298 	    }
2299 	  else if (DECL_P (tem))
2300 	    lhs = build_fold_addr_expr (tem);
2301 	  else
2302 	    return (void *)-1;
2303 	}
2304       if (TREE_CODE (lhs) != SSA_NAME
2305 	  && TREE_CODE (lhs) != ADDR_EXPR)
2306 	return (void *)-1;
2307 
2308       /* Extract a pointer base and an offset for the source.  */
2309       rhs = gimple_call_arg (def_stmt, 1);
2310       rhs_offset = 0;
2311       if (TREE_CODE (rhs) == SSA_NAME)
2312 	rhs = SSA_VAL (rhs);
2313       if (TREE_CODE (rhs) == ADDR_EXPR)
2314 	{
2315 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2316 						    &rhs_offset);
2317 	  if (!tem)
2318 	    return (void *)-1;
2319 	  if (TREE_CODE (tem) == MEM_REF
2320 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2321 	    {
2322 	      rhs = TREE_OPERAND (tem, 0);
2323 	      rhs_offset += mem_offset;
2324 	    }
2325 	  else if (DECL_P (tem)
2326 		   || TREE_CODE (tem) == STRING_CST)
2327 	    rhs = build_fold_addr_expr (tem);
2328 	  else
2329 	    return (void *)-1;
2330 	}
2331       if (TREE_CODE (rhs) != SSA_NAME
2332 	  && TREE_CODE (rhs) != ADDR_EXPR)
2333 	return (void *)-1;
2334 
2335       /* The bases of the destination and the references have to agree.  */
2336       if (TREE_CODE (base) == MEM_REF)
2337 	{
2338 	  if (TREE_OPERAND (base, 0) != lhs
2339 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2340 	    return (void *) -1;
2341 	  at += mem_offset;
2342 	}
2343       else if (!DECL_P (base)
2344 	       || TREE_CODE (lhs) != ADDR_EXPR
2345 	       || TREE_OPERAND (lhs, 0) != base)
2346 	return (void *)-1;
2347 
2348       /* If the access is completely outside of the memcpy destination
2349 	 area there is no aliasing.  */
2350       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2351 	return NULL;
2352       /* And the access has to be contained within the memcpy destination.  */
2353       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2354 	return (void *)-1;
2355 
2356       /* Make room for 2 operands in the new reference.  */
2357       if (vr->operands.length () < 2)
2358 	{
2359 	  vec<vn_reference_op_s> old = vr->operands;
2360 	  vr->operands.safe_grow_cleared (2);
2361 	  if (old == shared_lookup_references)
2362 	    shared_lookup_references = vr->operands;
2363 	}
2364       else
2365 	vr->operands.truncate (2);
2366 
2367       /* The looked-through reference is a simple MEM_REF.  */
2368       memset (&op, 0, sizeof (op));
2369       op.type = vr->type;
2370       op.opcode = MEM_REF;
2371       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2372       op.off = at - lhs_offset + rhs_offset;
2373       vr->operands[0] = op;
2374       op.type = TREE_TYPE (rhs);
2375       op.opcode = TREE_CODE (rhs);
2376       op.op0 = rhs;
2377       op.off = -1;
2378       vr->operands[1] = op;
2379       vr->hashcode = vn_reference_compute_hash (vr);
2380 
2381       /* Try folding the new reference to a constant.  */
2382       tree val = fully_constant_vn_reference_p (vr);
2383       if (val)
2384 	return vn_reference_lookup_or_insert_for_pieces
2385 		 (vuse, vr->set, vr->type, vr->operands, val);
2386 
2387       /* Adjust *ref from the new operands.  */
2388       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2389 	return (void *)-1;
2390       /* This can happen with bitfields.  */
2391       if (maybe_ne (ref->size, r.size))
2392 	return (void *)-1;
2393       *ref = r;
2394 
2395       /* Do not update last seen VUSE after translating.  */
2396       last_vuse_ptr = NULL;
2397 
2398       /* Keep looking for the adjusted *REF / VR pair.  */
2399       return NULL;
2400     }
2401 
2402   /* Bail out and stop walking.  */
2403   return (void *)-1;
2404 }
2405 
2406 /* Return a reference op vector from OP that can be used for
2407    vn_reference_lookup_pieces.  The caller is responsible for releasing
2408    the vector.  */
2409 
2410 vec<vn_reference_op_s>
2411 vn_reference_operands_for_lookup (tree op)
2412 {
2413   bool valueized;
2414   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2415 }
2416 
2417 /* Lookup a reference operation by it's parts, in the current hash table.
2418    Returns the resulting value number if it exists in the hash table,
2419    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2420    vn_reference_t stored in the hashtable if something is found.  */
2421 
2422 tree
2423 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2424 			    vec<vn_reference_op_s> operands,
2425 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2426 {
2427   struct vn_reference_s vr1;
2428   vn_reference_t tmp;
2429   tree cst;
2430 
2431   if (!vnresult)
2432     vnresult = &tmp;
2433   *vnresult = NULL;
2434 
2435   vr1.vuse = vuse_ssa_val (vuse);
2436   shared_lookup_references.truncate (0);
2437   shared_lookup_references.safe_grow (operands.length ());
2438   memcpy (shared_lookup_references.address (),
2439 	  operands.address (),
2440 	  sizeof (vn_reference_op_s)
2441 	  * operands.length ());
2442   vr1.operands = operands = shared_lookup_references
2443     = valueize_refs (shared_lookup_references);
2444   vr1.type = type;
2445   vr1.set = set;
2446   vr1.hashcode = vn_reference_compute_hash (&vr1);
2447   if ((cst = fully_constant_vn_reference_p (&vr1)))
2448     return cst;
2449 
2450   vn_reference_lookup_1 (&vr1, vnresult);
2451   if (!*vnresult
2452       && kind != VN_NOWALK
2453       && vr1.vuse)
2454     {
2455       ao_ref r;
2456       vn_walk_kind = kind;
2457       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2458 	*vnresult =
2459 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2460 						  vn_reference_lookup_2,
2461 						  vn_reference_lookup_3,
2462 						  vuse_ssa_val, &vr1);
2463       gcc_checking_assert (vr1.operands == shared_lookup_references);
2464     }
2465 
2466   if (*vnresult)
2467      return (*vnresult)->result;
2468 
2469   return NULL_TREE;
2470 }
2471 
2472 /* Lookup OP in the current hash table, and return the resulting value
2473    number if it exists in the hash table.  Return NULL_TREE if it does
2474    not exist in the hash table or if the result field of the structure
2475    was NULL..  VNRESULT will be filled in with the vn_reference_t
2476    stored in the hashtable if one exists.  When TBAA_P is false assume
2477    we are looking up a store and treat it as having alias-set zero.  */
2478 
2479 tree
2480 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2481 		     vn_reference_t *vnresult, bool tbaa_p)
2482 {
2483   vec<vn_reference_op_s> operands;
2484   struct vn_reference_s vr1;
2485   tree cst;
2486   bool valuezied_anything;
2487 
2488   if (vnresult)
2489     *vnresult = NULL;
2490 
2491   vr1.vuse = vuse_ssa_val (vuse);
2492   vr1.operands = operands
2493     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2494   vr1.type = TREE_TYPE (op);
2495   vr1.set = tbaa_p ? get_alias_set (op) : 0;
2496   vr1.hashcode = vn_reference_compute_hash (&vr1);
2497   if ((cst = fully_constant_vn_reference_p (&vr1)))
2498     return cst;
2499 
2500   if (kind != VN_NOWALK
2501       && vr1.vuse)
2502     {
2503       vn_reference_t wvnresult;
2504       ao_ref r;
2505       /* Make sure to use a valueized reference if we valueized anything.
2506          Otherwise preserve the full reference for advanced TBAA.  */
2507       if (!valuezied_anything
2508 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2509 					     vr1.operands))
2510 	ao_ref_init (&r, op);
2511       if (! tbaa_p)
2512 	r.ref_alias_set = r.base_alias_set = 0;
2513       vn_walk_kind = kind;
2514       wvnresult =
2515 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2516 						vn_reference_lookup_2,
2517 						vn_reference_lookup_3,
2518 						vuse_ssa_val, &vr1);
2519       gcc_checking_assert (vr1.operands == shared_lookup_references);
2520       if (wvnresult)
2521 	{
2522 	  if (vnresult)
2523 	    *vnresult = wvnresult;
2524 	  return wvnresult->result;
2525 	}
2526 
2527       return NULL_TREE;
2528     }
2529 
2530   return vn_reference_lookup_1 (&vr1, vnresult);
2531 }
2532 
2533 /* Lookup CALL in the current hash table and return the entry in
2534    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2535 
2536 void
2537 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2538 			  vn_reference_t vr)
2539 {
2540   if (vnresult)
2541     *vnresult = NULL;
2542 
2543   tree vuse = gimple_vuse (call);
2544 
2545   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2546   vr->operands = valueize_shared_reference_ops_from_call (call);
2547   vr->type = gimple_expr_type (call);
2548   vr->set = 0;
2549   vr->hashcode = vn_reference_compute_hash (vr);
2550   vn_reference_lookup_1 (vr, vnresult);
2551 }
2552 
2553 /* Insert OP into the current hash table with a value number of
2554    RESULT, and return the resulting reference structure we created.  */
2555 
2556 static vn_reference_t
2557 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2558 {
2559   vn_reference_s **slot;
2560   vn_reference_t vr1;
2561   bool tem;
2562 
2563   vr1 = current_info->references_pool->allocate ();
2564   if (TREE_CODE (result) == SSA_NAME)
2565     vr1->value_id = VN_INFO (result)->value_id;
2566   else
2567     vr1->value_id = get_or_alloc_constant_value_id (result);
2568   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2569   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2570   vr1->type = TREE_TYPE (op);
2571   vr1->set = get_alias_set (op);
2572   vr1->hashcode = vn_reference_compute_hash (vr1);
2573   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2574   vr1->result_vdef = vdef;
2575 
2576   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2577 							INSERT);
2578 
2579   /* Because we lookup stores using vuses, and value number failures
2580      using the vdefs (see visit_reference_op_store for how and why),
2581      it's possible that on failure we may try to insert an already
2582      inserted store.  This is not wrong, there is no ssa name for a
2583      store that we could use as a differentiator anyway.  Thus, unlike
2584      the other lookup functions, you cannot gcc_assert (!*slot)
2585      here.  */
2586 
2587   /* But free the old slot in case of a collision.  */
2588   if (*slot)
2589     free_reference (*slot);
2590 
2591   *slot = vr1;
2592   return vr1;
2593 }
2594 
2595 /* Insert a reference by it's pieces into the current hash table with
2596    a value number of RESULT.  Return the resulting reference
2597    structure we created.  */
2598 
2599 vn_reference_t
2600 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2601 			    vec<vn_reference_op_s> operands,
2602 			    tree result, unsigned int value_id)
2603 
2604 {
2605   vn_reference_s **slot;
2606   vn_reference_t vr1;
2607 
2608   vr1 = current_info->references_pool->allocate ();
2609   vr1->value_id = value_id;
2610   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2611   vr1->operands = valueize_refs (operands);
2612   vr1->type = type;
2613   vr1->set = set;
2614   vr1->hashcode = vn_reference_compute_hash (vr1);
2615   if (result && TREE_CODE (result) == SSA_NAME)
2616     result = SSA_VAL (result);
2617   vr1->result = result;
2618 
2619   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2620 							INSERT);
2621 
2622   /* At this point we should have all the things inserted that we have
2623      seen before, and we should never try inserting something that
2624      already exists.  */
2625   gcc_assert (!*slot);
2626   if (*slot)
2627     free_reference (*slot);
2628 
2629   *slot = vr1;
2630   return vr1;
2631 }
2632 
2633 /* Compute and return the hash value for nary operation VBO1.  */
2634 
2635 static hashval_t
2636 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2637 {
2638   inchash::hash hstate;
2639   unsigned i;
2640 
2641   for (i = 0; i < vno1->length; ++i)
2642     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2643       vno1->op[i] = SSA_VAL (vno1->op[i]);
2644 
2645   if (((vno1->length == 2
2646 	&& commutative_tree_code (vno1->opcode))
2647        || (vno1->length == 3
2648 	   && commutative_ternary_tree_code (vno1->opcode)))
2649       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2650     std::swap (vno1->op[0], vno1->op[1]);
2651   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2652 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2653     {
2654       std::swap (vno1->op[0], vno1->op[1]);
2655       vno1->opcode = swap_tree_comparison  (vno1->opcode);
2656     }
2657 
2658   hstate.add_int (vno1->opcode);
2659   for (i = 0; i < vno1->length; ++i)
2660     inchash::add_expr (vno1->op[i], hstate);
2661 
2662   return hstate.end ();
2663 }
2664 
2665 /* Compare nary operations VNO1 and VNO2 and return true if they are
2666    equivalent.  */
2667 
2668 bool
2669 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2670 {
2671   unsigned i;
2672 
2673   if (vno1->hashcode != vno2->hashcode)
2674     return false;
2675 
2676   if (vno1->length != vno2->length)
2677     return false;
2678 
2679   if (vno1->opcode != vno2->opcode
2680       || !types_compatible_p (vno1->type, vno2->type))
2681     return false;
2682 
2683   for (i = 0; i < vno1->length; ++i)
2684     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2685       return false;
2686 
2687   /* BIT_INSERT_EXPR has an implict operand as the type precision
2688      of op1.  Need to check to make sure they are the same.  */
2689   if (vno1->opcode == BIT_INSERT_EXPR
2690       && TREE_CODE (vno1->op[1]) == INTEGER_CST
2691       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2692 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2693     return false;
2694 
2695   return true;
2696 }
2697 
2698 /* Initialize VNO from the pieces provided.  */
2699 
2700 static void
2701 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2702 			     enum tree_code code, tree type, tree *ops)
2703 {
2704   vno->opcode = code;
2705   vno->length = length;
2706   vno->type = type;
2707   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2708 }
2709 
2710 /* Initialize VNO from OP.  */
2711 
2712 static void
2713 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2714 {
2715   unsigned i;
2716 
2717   vno->opcode = TREE_CODE (op);
2718   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2719   vno->type = TREE_TYPE (op);
2720   for (i = 0; i < vno->length; ++i)
2721     vno->op[i] = TREE_OPERAND (op, i);
2722 }
2723 
2724 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2725 
2726 static unsigned int
2727 vn_nary_length_from_stmt (gimple *stmt)
2728 {
2729   switch (gimple_assign_rhs_code (stmt))
2730     {
2731     case REALPART_EXPR:
2732     case IMAGPART_EXPR:
2733     case VIEW_CONVERT_EXPR:
2734       return 1;
2735 
2736     case BIT_FIELD_REF:
2737       return 3;
2738 
2739     case CONSTRUCTOR:
2740       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2741 
2742     default:
2743       return gimple_num_ops (stmt) - 1;
2744     }
2745 }
2746 
2747 /* Initialize VNO from STMT.  */
2748 
2749 static void
2750 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2751 {
2752   unsigned i;
2753 
2754   vno->opcode = gimple_assign_rhs_code (stmt);
2755   vno->type = gimple_expr_type (stmt);
2756   switch (vno->opcode)
2757     {
2758     case REALPART_EXPR:
2759     case IMAGPART_EXPR:
2760     case VIEW_CONVERT_EXPR:
2761       vno->length = 1;
2762       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2763       break;
2764 
2765     case BIT_FIELD_REF:
2766       vno->length = 3;
2767       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2768       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2769       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2770       break;
2771 
2772     case CONSTRUCTOR:
2773       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2774       for (i = 0; i < vno->length; ++i)
2775 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2776       break;
2777 
2778     default:
2779       gcc_checking_assert (!gimple_assign_single_p (stmt));
2780       vno->length = gimple_num_ops (stmt) - 1;
2781       for (i = 0; i < vno->length; ++i)
2782 	vno->op[i] = gimple_op (stmt, i + 1);
2783     }
2784 }
2785 
2786 /* Compute the hashcode for VNO and look for it in the hash table;
2787    return the resulting value number if it exists in the hash table.
2788    Return NULL_TREE if it does not exist in the hash table or if the
2789    result field of the operation is NULL.  VNRESULT will contain the
2790    vn_nary_op_t from the hashtable if it exists.  */
2791 
2792 static tree
2793 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2794 {
2795   vn_nary_op_s **slot;
2796 
2797   if (vnresult)
2798     *vnresult = NULL;
2799 
2800   vno->hashcode = vn_nary_op_compute_hash (vno);
2801   slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2802 						  NO_INSERT);
2803   if (!slot && current_info == optimistic_info)
2804     slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2805 						  NO_INSERT);
2806   if (!slot)
2807     return NULL_TREE;
2808   if (vnresult)
2809     *vnresult = *slot;
2810   return (*slot)->result;
2811 }
2812 
2813 /* Lookup a n-ary operation by its pieces and return the resulting value
2814    number if it exists in the hash table.  Return NULL_TREE if it does
2815    not exist in the hash table or if the result field of the operation
2816    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2817    if it exists.  */
2818 
2819 tree
2820 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2821 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2822 {
2823   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2824 				  sizeof_vn_nary_op (length));
2825   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2826   return vn_nary_op_lookup_1 (vno1, vnresult);
2827 }
2828 
2829 /* Lookup OP in the current hash table, and return the resulting value
2830    number if it exists in the hash table.  Return NULL_TREE if it does
2831    not exist in the hash table or if the result field of the operation
2832    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2833    if it exists.  */
2834 
2835 tree
2836 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2837 {
2838   vn_nary_op_t vno1
2839     = XALLOCAVAR (struct vn_nary_op_s,
2840 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2841   init_vn_nary_op_from_op (vno1, op);
2842   return vn_nary_op_lookup_1 (vno1, vnresult);
2843 }
2844 
2845 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2846    value number if it exists in the hash table.  Return NULL_TREE if
2847    it does not exist in the hash table.  VNRESULT will contain the
2848    vn_nary_op_t from the hashtable if it exists.  */
2849 
2850 tree
2851 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2852 {
2853   vn_nary_op_t vno1
2854     = XALLOCAVAR (struct vn_nary_op_s,
2855 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2856   init_vn_nary_op_from_stmt (vno1, stmt);
2857   return vn_nary_op_lookup_1 (vno1, vnresult);
2858 }
2859 
2860 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2861 
2862 static vn_nary_op_t
2863 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2864 {
2865   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2866 }
2867 
2868 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2869    obstack.  */
2870 
2871 static vn_nary_op_t
2872 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2873 {
2874   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2875 					       &current_info->nary_obstack);
2876 
2877   vno1->value_id = value_id;
2878   vno1->length = length;
2879   vno1->result = result;
2880 
2881   return vno1;
2882 }
2883 
2884 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2885    VNO->HASHCODE first.  */
2886 
2887 static vn_nary_op_t
2888 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2889 			bool compute_hash)
2890 {
2891   vn_nary_op_s **slot;
2892 
2893   if (compute_hash)
2894     vno->hashcode = vn_nary_op_compute_hash (vno);
2895 
2896   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2897   /* While we do not want to insert things twice it's awkward to
2898      avoid it in the case where visit_nary_op pattern-matches stuff
2899      and ends up simplifying the replacement to itself.  We then
2900      get two inserts, one from visit_nary_op and one from
2901      vn_nary_build_or_lookup.
2902      So allow inserts with the same value number.  */
2903   if (*slot && (*slot)->result == vno->result)
2904     return *slot;
2905 
2906   gcc_assert (!*slot);
2907 
2908   *slot = vno;
2909   return vno;
2910 }
2911 
2912 /* Insert a n-ary operation into the current hash table using it's
2913    pieces.  Return the vn_nary_op_t structure we created and put in
2914    the hashtable.  */
2915 
2916 vn_nary_op_t
2917 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2918 			  tree type, tree *ops,
2919 			  tree result, unsigned int value_id)
2920 {
2921   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2922   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2923   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2924 }
2925 
2926 /* Insert OP into the current hash table with a value number of
2927    RESULT.  Return the vn_nary_op_t structure we created and put in
2928    the hashtable.  */
2929 
2930 vn_nary_op_t
2931 vn_nary_op_insert (tree op, tree result)
2932 {
2933   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2934   vn_nary_op_t vno1;
2935 
2936   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2937   init_vn_nary_op_from_op (vno1, op);
2938   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2939 }
2940 
2941 /* Insert the rhs of STMT into the current hash table with a value number of
2942    RESULT.  */
2943 
2944 static vn_nary_op_t
2945 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2946 {
2947   vn_nary_op_t vno1
2948     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2949 			result, VN_INFO (result)->value_id);
2950   init_vn_nary_op_from_stmt (vno1, stmt);
2951   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2952 }
2953 
2954 /* Compute a hashcode for PHI operation VP1 and return it.  */
2955 
2956 static inline hashval_t
2957 vn_phi_compute_hash (vn_phi_t vp1)
2958 {
2959   inchash::hash hstate (vp1->phiargs.length () > 2
2960 			? vp1->block->index : vp1->phiargs.length ());
2961   tree phi1op;
2962   tree type;
2963   edge e;
2964   edge_iterator ei;
2965 
2966   /* If all PHI arguments are constants we need to distinguish
2967      the PHI node via its type.  */
2968   type = vp1->type;
2969   hstate.merge_hash (vn_hash_type (type));
2970 
2971   FOR_EACH_EDGE (e, ei, vp1->block->preds)
2972     {
2973       /* Don't hash backedge values they need to be handled as VN_TOP
2974          for optimistic value-numbering.  */
2975       if (e->flags & EDGE_DFS_BACK)
2976 	continue;
2977 
2978       phi1op = vp1->phiargs[e->dest_idx];
2979       if (phi1op == VN_TOP)
2980 	continue;
2981       inchash::add_expr (phi1op, hstate);
2982     }
2983 
2984   return hstate.end ();
2985 }
2986 
2987 
2988 /* Return true if COND1 and COND2 represent the same condition, set
2989    *INVERTED_P if one needs to be inverted to make it the same as
2990    the other.  */
2991 
2992 static bool
2993 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2994 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2995 {
2996   enum tree_code code1 = gimple_cond_code (cond1);
2997   enum tree_code code2 = gimple_cond_code (cond2);
2998 
2999   *inverted_p = false;
3000   if (code1 == code2)
3001     ;
3002   else if (code1 == swap_tree_comparison (code2))
3003     std::swap (lhs2, rhs2);
3004   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3005     *inverted_p = true;
3006   else if (code1 == invert_tree_comparison
3007 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3008     {
3009       std::swap (lhs2, rhs2);
3010       *inverted_p = true;
3011     }
3012   else
3013     return false;
3014 
3015   return ((expressions_equal_p (lhs1, lhs2)
3016 	   && expressions_equal_p (rhs1, rhs2))
3017 	  || (commutative_tree_code (code1)
3018 	      && expressions_equal_p (lhs1, rhs2)
3019 	      && expressions_equal_p (rhs1, lhs2)));
3020 }
3021 
3022 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
3023 
3024 static int
3025 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3026 {
3027   if (vp1->hashcode != vp2->hashcode)
3028     return false;
3029 
3030   if (vp1->block != vp2->block)
3031     {
3032       if (vp1->phiargs.length () != vp2->phiargs.length ())
3033 	return false;
3034 
3035       switch (vp1->phiargs.length ())
3036 	{
3037 	case 1:
3038 	  /* Single-arg PHIs are just copies.  */
3039 	  break;
3040 
3041 	case 2:
3042 	  {
3043 	    /* Rule out backedges into the PHI.  */
3044 	    if (vp1->block->loop_father->header == vp1->block
3045 		|| vp2->block->loop_father->header == vp2->block)
3046 	      return false;
3047 
3048 	    /* If the PHI nodes do not have compatible types
3049 	       they are not the same.  */
3050 	    if (!types_compatible_p (vp1->type, vp2->type))
3051 	      return false;
3052 
3053 	    basic_block idom1
3054 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3055 	    basic_block idom2
3056 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3057 	    /* If the immediate dominator end in switch stmts multiple
3058 	       values may end up in the same PHI arg via intermediate
3059 	       CFG merges.  */
3060 	    if (EDGE_COUNT (idom1->succs) != 2
3061 		|| EDGE_COUNT (idom2->succs) != 2)
3062 	      return false;
3063 
3064 	    /* Verify the controlling stmt is the same.  */
3065 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3066 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3067 	    if (! last1 || ! last2)
3068 	      return false;
3069 	    bool inverted_p;
3070 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3071 				      last2, vp2->cclhs, vp2->ccrhs,
3072 				      &inverted_p))
3073 	      return false;
3074 
3075 	    /* Get at true/false controlled edges into the PHI.  */
3076 	    edge te1, te2, fe1, fe2;
3077 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
3078 						       &te1, &fe1)
3079 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
3080 							  &te2, &fe2))
3081 	      return false;
3082 
3083 	    /* Swap edges if the second condition is the inverted of the
3084 	       first.  */
3085 	    if (inverted_p)
3086 	      std::swap (te2, fe2);
3087 
3088 	    /* ???  Handle VN_TOP specially.  */
3089 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3090 				       vp2->phiargs[te2->dest_idx])
3091 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3092 					  vp2->phiargs[fe2->dest_idx]))
3093 	      return false;
3094 
3095 	    return true;
3096 	  }
3097 
3098 	default:
3099 	  return false;
3100 	}
3101     }
3102 
3103   /* If the PHI nodes do not have compatible types
3104      they are not the same.  */
3105   if (!types_compatible_p (vp1->type, vp2->type))
3106     return false;
3107 
3108   /* Any phi in the same block will have it's arguments in the
3109      same edge order, because of how we store phi nodes.  */
3110   int i;
3111   tree phi1op;
3112   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3113     {
3114       tree phi2op = vp2->phiargs[i];
3115       if (phi1op == VN_TOP || phi2op == VN_TOP)
3116 	continue;
3117       if (!expressions_equal_p (phi1op, phi2op))
3118 	return false;
3119     }
3120 
3121   return true;
3122 }
3123 
3124 static vec<tree> shared_lookup_phiargs;
3125 
3126 /* Lookup PHI in the current hash table, and return the resulting
3127    value number if it exists in the hash table.  Return NULL_TREE if
3128    it does not exist in the hash table. */
3129 
3130 static tree
3131 vn_phi_lookup (gimple *phi)
3132 {
3133   vn_phi_s **slot;
3134   struct vn_phi_s vp1;
3135   edge e;
3136   edge_iterator ei;
3137 
3138   shared_lookup_phiargs.truncate (0);
3139   shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3140 
3141   /* Canonicalize the SSA_NAME's to their value number.  */
3142   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3143     {
3144       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3145       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3146       shared_lookup_phiargs[e->dest_idx] = def;
3147     }
3148   vp1.type = TREE_TYPE (gimple_phi_result (phi));
3149   vp1.phiargs = shared_lookup_phiargs;
3150   vp1.block = gimple_bb (phi);
3151   /* Extract values of the controlling condition.  */
3152   vp1.cclhs = NULL_TREE;
3153   vp1.ccrhs = NULL_TREE;
3154   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3155   if (EDGE_COUNT (idom1->succs) == 2)
3156     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3157       {
3158 	vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3159 	vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3160       }
3161   vp1.hashcode = vn_phi_compute_hash (&vp1);
3162   slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3163 						  NO_INSERT);
3164   if (!slot && current_info == optimistic_info)
3165     slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3166 						  NO_INSERT);
3167   if (!slot)
3168     return NULL_TREE;
3169   return (*slot)->result;
3170 }
3171 
3172 /* Insert PHI into the current hash table with a value number of
3173    RESULT.  */
3174 
3175 static vn_phi_t
3176 vn_phi_insert (gimple *phi, tree result)
3177 {
3178   vn_phi_s **slot;
3179   vn_phi_t vp1 = current_info->phis_pool->allocate ();
3180   vec<tree> args = vNULL;
3181   edge e;
3182   edge_iterator ei;
3183 
3184   args.safe_grow (gimple_phi_num_args (phi));
3185 
3186   /* Canonicalize the SSA_NAME's to their value number.  */
3187   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3188     {
3189       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3190       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3191       args[e->dest_idx] = def;
3192     }
3193   vp1->value_id = VN_INFO (result)->value_id;
3194   vp1->type = TREE_TYPE (gimple_phi_result (phi));
3195   vp1->phiargs = args;
3196   vp1->block = gimple_bb (phi);
3197   /* Extract values of the controlling condition.  */
3198   vp1->cclhs = NULL_TREE;
3199   vp1->ccrhs = NULL_TREE;
3200   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3201   if (EDGE_COUNT (idom1->succs) == 2)
3202     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3203       {
3204 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3205 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3206       }
3207   vp1->result = result;
3208   vp1->hashcode = vn_phi_compute_hash (vp1);
3209 
3210   slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3211 
3212   /* Because we iterate over phi operations more than once, it's
3213      possible the slot might already exist here, hence no assert.*/
3214   *slot = vp1;
3215   return vp1;
3216 }
3217 
3218 
3219 /* Print set of components in strongly connected component SCC to OUT. */
3220 
3221 static void
3222 print_scc (FILE *out, vec<tree> scc)
3223 {
3224   tree var;
3225   unsigned int i;
3226 
3227   fprintf (out, "SCC consists of %u:", scc.length ());
3228   FOR_EACH_VEC_ELT (scc, i, var)
3229     {
3230       fprintf (out, " ");
3231       print_generic_expr (out, var);
3232     }
3233   fprintf (out, "\n");
3234 }
3235 
3236 /* Return true if BB1 is dominated by BB2 taking into account edges
3237    that are not executable.  */
3238 
3239 static bool
3240 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3241 {
3242   edge_iterator ei;
3243   edge e;
3244 
3245   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3246     return true;
3247 
3248   /* Before iterating we'd like to know if there exists a
3249      (executable) path from bb2 to bb1 at all, if not we can
3250      directly return false.  For now simply iterate once.  */
3251 
3252   /* Iterate to the single executable bb1 predecessor.  */
3253   if (EDGE_COUNT (bb1->preds) > 1)
3254     {
3255       edge prede = NULL;
3256       FOR_EACH_EDGE (e, ei, bb1->preds)
3257 	if (e->flags & EDGE_EXECUTABLE)
3258 	  {
3259 	    if (prede)
3260 	      {
3261 		prede = NULL;
3262 		break;
3263 	      }
3264 	    prede = e;
3265 	  }
3266       if (prede)
3267 	{
3268 	  bb1 = prede->src;
3269 
3270 	  /* Re-do the dominance check with changed bb1.  */
3271 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3272 	    return true;
3273 	}
3274     }
3275 
3276   /* Iterate to the single executable bb2 successor.  */
3277   edge succe = NULL;
3278   FOR_EACH_EDGE (e, ei, bb2->succs)
3279     if (e->flags & EDGE_EXECUTABLE)
3280       {
3281 	if (succe)
3282 	  {
3283 	    succe = NULL;
3284 	    break;
3285 	  }
3286 	succe = e;
3287       }
3288   if (succe)
3289     {
3290       /* Verify the reached block is only reached through succe.
3291 	 If there is only one edge we can spare us the dominator
3292 	 check and iterate directly.  */
3293       if (EDGE_COUNT (succe->dest->preds) > 1)
3294 	{
3295 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
3296 	    if (e != succe
3297 		&& (e->flags & EDGE_EXECUTABLE))
3298 	      {
3299 		succe = NULL;
3300 		break;
3301 	      }
3302 	}
3303       if (succe)
3304 	{
3305 	  bb2 = succe->dest;
3306 
3307 	  /* Re-do the dominance check with changed bb2.  */
3308 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3309 	    return true;
3310 	}
3311     }
3312 
3313   /* We could now iterate updating bb1 / bb2.  */
3314   return false;
3315 }
3316 
3317 /* Set the value number of FROM to TO, return true if it has changed
3318    as a result.  */
3319 
3320 static inline bool
3321 set_ssa_val_to (tree from, tree to)
3322 {
3323   tree currval = SSA_VAL (from);
3324   poly_int64 toff, coff;
3325 
3326   /* The only thing we allow as value numbers are ssa_names
3327      and invariants.  So assert that here.  We don't allow VN_TOP
3328      as visiting a stmt should produce a value-number other than
3329      that.
3330      ???  Still VN_TOP can happen for unreachable code, so force
3331      it to varying in that case.  Not all code is prepared to
3332      get VN_TOP on valueization.  */
3333   if (to == VN_TOP)
3334     {
3335       if (dump_file && (dump_flags & TDF_DETAILS))
3336 	fprintf (dump_file, "Forcing value number to varying on "
3337 		 "receiving VN_TOP\n");
3338       to = from;
3339     }
3340 
3341   gcc_assert (to != NULL_TREE
3342 	      && ((TREE_CODE (to) == SSA_NAME
3343 		   && (to == from || SSA_VAL (to) == to))
3344 		  || is_gimple_min_invariant (to)));
3345 
3346   if (from != to)
3347     {
3348       if (currval == from)
3349 	{
3350 	  if (dump_file && (dump_flags & TDF_DETAILS))
3351 	    {
3352 	      fprintf (dump_file, "Not changing value number of ");
3353 	      print_generic_expr (dump_file, from);
3354 	      fprintf (dump_file, " from VARYING to ");
3355 	      print_generic_expr (dump_file, to);
3356 	      fprintf (dump_file, "\n");
3357 	    }
3358 	  return false;
3359 	}
3360       else if (currval != VN_TOP
3361 	       && ! is_gimple_min_invariant (currval)
3362 	       && is_gimple_min_invariant (to))
3363 	{
3364 	  if (dump_file && (dump_flags & TDF_DETAILS))
3365 	    {
3366 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3367 		       "value number of ");
3368 	      print_generic_expr (dump_file, from);
3369 	      fprintf (dump_file, " from ");
3370 	      print_generic_expr (dump_file, currval);
3371 	      fprintf (dump_file, " (non-constant) to ");
3372 	      print_generic_expr (dump_file, to);
3373 	      fprintf (dump_file, " (constant)\n");
3374 	    }
3375 	  to = from;
3376 	}
3377       else if (TREE_CODE (to) == SSA_NAME
3378 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3379 	to = from;
3380     }
3381 
3382   if (dump_file && (dump_flags & TDF_DETAILS))
3383     {
3384       fprintf (dump_file, "Setting value number of ");
3385       print_generic_expr (dump_file, from);
3386       fprintf (dump_file, " to ");
3387       print_generic_expr (dump_file, to);
3388     }
3389 
3390   if (currval != to
3391       && !operand_equal_p (currval, to, 0)
3392       /* Different undefined SSA names are not actually different.  See
3393          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
3394       && !(TREE_CODE (currval) == SSA_NAME
3395 	   && TREE_CODE (to) == SSA_NAME
3396 	   && ssa_undefined_value_p (currval, false)
3397 	   && ssa_undefined_value_p (to, false))
3398       /* ???  For addresses involving volatile objects or types operand_equal_p
3399          does not reliably detect ADDR_EXPRs as equal.  We know we are only
3400 	 getting invariant gimple addresses here, so can use
3401 	 get_addr_base_and_unit_offset to do this comparison.  */
3402       && !(TREE_CODE (currval) == ADDR_EXPR
3403 	   && TREE_CODE (to) == ADDR_EXPR
3404 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3405 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3406 	   && known_eq (coff, toff)))
3407     {
3408       if (dump_file && (dump_flags & TDF_DETAILS))
3409 	fprintf (dump_file, " (changed)\n");
3410 
3411       /* If we equate two SSA names we have to make the side-band info
3412          of the leader conservative (and remember whatever original value
3413 	 was present).  */
3414       if (TREE_CODE (to) == SSA_NAME)
3415 	{
3416 	  if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3417 	      && SSA_NAME_RANGE_INFO (to))
3418 	    {
3419 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3420 		  || dominated_by_p_w_unex
3421 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3422 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3423 		/* Keep the info from the dominator.  */
3424 		;
3425 	      else
3426 		{
3427 		  /* Save old info.  */
3428 		  if (! VN_INFO (to)->info.range_info)
3429 		    {
3430 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3431 		      VN_INFO (to)->range_info_anti_range_p
3432 			= SSA_NAME_ANTI_RANGE_P (to);
3433 		    }
3434 		  /* Rather than allocating memory and unioning the info
3435 		     just clear it.  */
3436 		  if (dump_file && (dump_flags & TDF_DETAILS))
3437 		    {
3438 		      fprintf (dump_file, "clearing range info of ");
3439 		      print_generic_expr (dump_file, to);
3440 		      fprintf (dump_file, "\n");
3441 		    }
3442 		  SSA_NAME_RANGE_INFO (to) = NULL;
3443 		}
3444 	    }
3445 	  else if (POINTER_TYPE_P (TREE_TYPE (to))
3446 		   && SSA_NAME_PTR_INFO (to))
3447 	    {
3448 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3449 		  || dominated_by_p_w_unex
3450 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3451 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3452 		/* Keep the info from the dominator.  */
3453 		;
3454 	      else if (! SSA_NAME_PTR_INFO (from)
3455 		       /* Handle the case of trivially equivalent info.  */
3456 		       || memcmp (SSA_NAME_PTR_INFO (to),
3457 				  SSA_NAME_PTR_INFO (from),
3458 				  sizeof (ptr_info_def)) != 0)
3459 		{
3460 		  /* Save old info.  */
3461 		  if (! VN_INFO (to)->info.ptr_info)
3462 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3463 		  /* Rather than allocating memory and unioning the info
3464 		     just clear it.  */
3465 		  if (dump_file && (dump_flags & TDF_DETAILS))
3466 		    {
3467 		      fprintf (dump_file, "clearing points-to info of ");
3468 		      print_generic_expr (dump_file, to);
3469 		      fprintf (dump_file, "\n");
3470 		    }
3471 		  SSA_NAME_PTR_INFO (to) = NULL;
3472 		}
3473 	    }
3474 	}
3475 
3476       VN_INFO (from)->valnum = to;
3477       return true;
3478     }
3479   if (dump_file && (dump_flags & TDF_DETAILS))
3480     fprintf (dump_file, "\n");
3481   return false;
3482 }
3483 
3484 /* Mark as processed all the definitions in the defining stmt of USE, or
3485    the USE itself.  */
3486 
3487 static void
3488 mark_use_processed (tree use)
3489 {
3490   ssa_op_iter iter;
3491   def_operand_p defp;
3492   gimple *stmt = SSA_NAME_DEF_STMT (use);
3493 
3494   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3495     {
3496       VN_INFO (use)->use_processed = true;
3497       return;
3498     }
3499 
3500   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3501     {
3502       tree def = DEF_FROM_PTR (defp);
3503 
3504       VN_INFO (def)->use_processed = true;
3505     }
3506 }
3507 
3508 /* Set all definitions in STMT to value number to themselves.
3509    Return true if a value number changed. */
3510 
3511 static bool
3512 defs_to_varying (gimple *stmt)
3513 {
3514   bool changed = false;
3515   ssa_op_iter iter;
3516   def_operand_p defp;
3517 
3518   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3519     {
3520       tree def = DEF_FROM_PTR (defp);
3521       changed |= set_ssa_val_to (def, def);
3522     }
3523   return changed;
3524 }
3525 
3526 /* Visit a copy between LHS and RHS, return true if the value number
3527    changed.  */
3528 
3529 static bool
3530 visit_copy (tree lhs, tree rhs)
3531 {
3532   /* Valueize.  */
3533   rhs = SSA_VAL (rhs);
3534 
3535   return set_ssa_val_to (lhs, rhs);
3536 }
3537 
3538 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3539    is the same.  */
3540 
3541 static tree
3542 valueized_wider_op (tree wide_type, tree op)
3543 {
3544   if (TREE_CODE (op) == SSA_NAME)
3545     op = SSA_VAL (op);
3546 
3547   /* Either we have the op widened available.  */
3548   tree ops[3] = {};
3549   ops[0] = op;
3550   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3551 				       wide_type, ops, NULL);
3552   if (tem)
3553     return tem;
3554 
3555   /* Or the op is truncated from some existing value.  */
3556   if (TREE_CODE (op) == SSA_NAME)
3557     {
3558       gimple *def = SSA_NAME_DEF_STMT (op);
3559       if (is_gimple_assign (def)
3560 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3561 	{
3562 	  tem = gimple_assign_rhs1 (def);
3563 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3564 	    {
3565 	      if (TREE_CODE (tem) == SSA_NAME)
3566 		tem = SSA_VAL (tem);
3567 	      return tem;
3568 	    }
3569 	}
3570     }
3571 
3572   /* For constants simply extend it.  */
3573   if (TREE_CODE (op) == INTEGER_CST)
3574     return wide_int_to_tree (wide_type, wi::to_wide (op));
3575 
3576   return NULL_TREE;
3577 }
3578 
3579 /* Visit a nary operator RHS, value number it, and return true if the
3580    value number of LHS has changed as a result.  */
3581 
3582 static bool
3583 visit_nary_op (tree lhs, gassign *stmt)
3584 {
3585   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3586   if (result)
3587     return set_ssa_val_to (lhs, result);
3588 
3589   /* Do some special pattern matching for redundancies of operations
3590      in different types.  */
3591   enum tree_code code = gimple_assign_rhs_code (stmt);
3592   tree type = TREE_TYPE (lhs);
3593   tree rhs1 = gimple_assign_rhs1 (stmt);
3594   switch (code)
3595     {
3596     CASE_CONVERT:
3597       /* Match arithmetic done in a different type where we can easily
3598          substitute the result from some earlier sign-changed or widened
3599 	 operation.  */
3600       if (INTEGRAL_TYPE_P (type)
3601 	  && TREE_CODE (rhs1) == SSA_NAME
3602 	  /* We only handle sign-changes or zero-extension -> & mask.  */
3603 	  && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3604 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3605 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3606 	{
3607 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3608 	  if (def
3609 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
3610 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
3611 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
3612 	    {
3613 	      tree ops[3] = {};
3614 	      /* Either we have the op widened available.  */
3615 	      ops[0] = valueized_wider_op (type,
3616 					   gimple_assign_rhs1 (def));
3617 	      if (ops[0])
3618 		ops[1] = valueized_wider_op (type,
3619 					     gimple_assign_rhs2 (def));
3620 	      if (ops[0] && ops[1])
3621 		{
3622 		  ops[0] = vn_nary_op_lookup_pieces
3623 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
3624 		  /* We have wider operation available.  */
3625 		  if (ops[0])
3626 		    {
3627 		      unsigned lhs_prec = TYPE_PRECISION (type);
3628 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3629 		      if (lhs_prec == rhs_prec)
3630 			{
3631 			  ops[1] = NULL_TREE;
3632 			  result = vn_nary_build_or_lookup (NOP_EXPR,
3633 							    type, ops);
3634 			  if (result)
3635 			    {
3636 			      bool changed = set_ssa_val_to (lhs, result);
3637 			      vn_nary_op_insert_stmt (stmt, result);
3638 			      return changed;
3639 			    }
3640 			}
3641 		      else
3642 			{
3643 			  ops[1] = wide_int_to_tree (type,
3644 						     wi::mask (rhs_prec, false,
3645 							       lhs_prec));
3646 			  result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3647 							    TREE_TYPE (lhs),
3648 							    ops);
3649 			  if (result)
3650 			    {
3651 			      bool changed = set_ssa_val_to (lhs, result);
3652 			      vn_nary_op_insert_stmt (stmt, result);
3653 			      return changed;
3654 			    }
3655 			}
3656 		    }
3657 		}
3658 	    }
3659 	}
3660     default:;
3661     }
3662 
3663   bool changed = set_ssa_val_to (lhs, lhs);
3664   vn_nary_op_insert_stmt (stmt, lhs);
3665   return changed;
3666 }
3667 
3668 /* Visit a call STMT storing into LHS.  Return true if the value number
3669    of the LHS has changed as a result.  */
3670 
3671 static bool
3672 visit_reference_op_call (tree lhs, gcall *stmt)
3673 {
3674   bool changed = false;
3675   struct vn_reference_s vr1;
3676   vn_reference_t vnresult = NULL;
3677   tree vdef = gimple_vdef (stmt);
3678 
3679   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
3680   if (lhs && TREE_CODE (lhs) != SSA_NAME)
3681     lhs = NULL_TREE;
3682 
3683   vn_reference_lookup_call (stmt, &vnresult, &vr1);
3684   if (vnresult)
3685     {
3686       if (vnresult->result_vdef && vdef)
3687 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3688       else if (vdef)
3689 	/* If the call was discovered to be pure or const reflect
3690 	   that as far as possible.  */
3691 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3692 
3693       if (!vnresult->result && lhs)
3694 	vnresult->result = lhs;
3695 
3696       if (vnresult->result && lhs)
3697 	changed |= set_ssa_val_to (lhs, vnresult->result);
3698     }
3699   else
3700     {
3701       vn_reference_t vr2;
3702       vn_reference_s **slot;
3703       tree vdef_val = vdef;
3704       if (vdef)
3705 	{
3706 	  /* If we value numbered an indirect functions function to
3707 	     one not clobbering memory value number its VDEF to its
3708 	     VUSE.  */
3709 	  tree fn = gimple_call_fn (stmt);
3710 	  if (fn && TREE_CODE (fn) == SSA_NAME)
3711 	    {
3712 	      fn = SSA_VAL (fn);
3713 	      if (TREE_CODE (fn) == ADDR_EXPR
3714 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3715 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3716 		      & (ECF_CONST | ECF_PURE)))
3717 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3718 	    }
3719 	  changed |= set_ssa_val_to (vdef, vdef_val);
3720 	}
3721       if (lhs)
3722 	changed |= set_ssa_val_to (lhs, lhs);
3723       vr2 = current_info->references_pool->allocate ();
3724       vr2->vuse = vr1.vuse;
3725       /* As we are not walking the virtual operand chain we know the
3726 	 shared_lookup_references are still original so we can re-use
3727 	 them here.  */
3728       vr2->operands = vr1.operands.copy ();
3729       vr2->type = vr1.type;
3730       vr2->set = vr1.set;
3731       vr2->hashcode = vr1.hashcode;
3732       vr2->result = lhs;
3733       vr2->result_vdef = vdef_val;
3734       slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3735 							    INSERT);
3736       gcc_assert (!*slot);
3737       *slot = vr2;
3738     }
3739 
3740   return changed;
3741 }
3742 
3743 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3744    and return true if the value number of the LHS has changed as a result.  */
3745 
3746 static bool
3747 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3748 {
3749   bool changed = false;
3750   tree last_vuse;
3751   tree result;
3752 
3753   last_vuse = gimple_vuse (stmt);
3754   last_vuse_ptr = &last_vuse;
3755   result = vn_reference_lookup (op, gimple_vuse (stmt),
3756 				default_vn_walk_kind, NULL, true);
3757   last_vuse_ptr = NULL;
3758 
3759   /* We handle type-punning through unions by value-numbering based
3760      on offset and size of the access.  Be prepared to handle a
3761      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3762   if (result
3763       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3764     {
3765       /* We will be setting the value number of lhs to the value number
3766 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3767 	 So first simplify and lookup this expression to see if it
3768 	 is already available.  */
3769       code_helper rcode = VIEW_CONVERT_EXPR;
3770       tree ops[3] = { result };
3771       result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3772     }
3773 
3774   if (result)
3775     changed = set_ssa_val_to (lhs, result);
3776   else
3777     {
3778       changed = set_ssa_val_to (lhs, lhs);
3779       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3780     }
3781 
3782   return changed;
3783 }
3784 
3785 
3786 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3787    and return true if the value number of the LHS has changed as a result.  */
3788 
3789 static bool
3790 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3791 {
3792   bool changed = false;
3793   vn_reference_t vnresult = NULL;
3794   tree assign;
3795   bool resultsame = false;
3796   tree vuse = gimple_vuse (stmt);
3797   tree vdef = gimple_vdef (stmt);
3798 
3799   if (TREE_CODE (op) == SSA_NAME)
3800     op = SSA_VAL (op);
3801 
3802   /* First we want to lookup using the *vuses* from the store and see
3803      if there the last store to this location with the same address
3804      had the same value.
3805 
3806      The vuses represent the memory state before the store.  If the
3807      memory state, address, and value of the store is the same as the
3808      last store to this location, then this store will produce the
3809      same memory state as that store.
3810 
3811      In this case the vdef versions for this store are value numbered to those
3812      vuse versions, since they represent the same memory state after
3813      this store.
3814 
3815      Otherwise, the vdefs for the store are used when inserting into
3816      the table, since the store generates a new memory state.  */
3817 
3818   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3819   if (vnresult
3820       && vnresult->result)
3821     {
3822       tree result = vnresult->result;
3823       if (TREE_CODE (result) == SSA_NAME)
3824 	result = SSA_VAL (result);
3825       resultsame = expressions_equal_p (result, op);
3826       if (resultsame)
3827 	{
3828 	  /* If the TBAA state isn't compatible for downstream reads
3829 	     we cannot value-number the VDEFs the same.  */
3830 	  alias_set_type set = get_alias_set (lhs);
3831 	  if (vnresult->set != set
3832 	      && ! alias_set_subset_of (set, vnresult->set))
3833 	    resultsame = false;
3834 	}
3835     }
3836 
3837   if (!resultsame)
3838     {
3839       /* Only perform the following when being called from PRE
3840 	 which embeds tail merging.  */
3841       if (default_vn_walk_kind == VN_WALK)
3842 	{
3843 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3844 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3845 	  if (vnresult)
3846 	    {
3847 	      VN_INFO (vdef)->use_processed = true;
3848 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
3849 	    }
3850 	}
3851 
3852       if (dump_file && (dump_flags & TDF_DETAILS))
3853 	{
3854 	  fprintf (dump_file, "No store match\n");
3855 	  fprintf (dump_file, "Value numbering store ");
3856 	  print_generic_expr (dump_file, lhs);
3857 	  fprintf (dump_file, " to ");
3858 	  print_generic_expr (dump_file, op);
3859 	  fprintf (dump_file, "\n");
3860 	}
3861       /* Have to set value numbers before insert, since insert is
3862 	 going to valueize the references in-place.  */
3863       if (vdef)
3864 	changed |= set_ssa_val_to (vdef, vdef);
3865 
3866       /* Do not insert structure copies into the tables.  */
3867       if (is_gimple_min_invariant (op)
3868 	  || is_gimple_reg (op))
3869         vn_reference_insert (lhs, op, vdef, NULL);
3870 
3871       /* Only perform the following when being called from PRE
3872 	 which embeds tail merging.  */
3873       if (default_vn_walk_kind == VN_WALK)
3874 	{
3875 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3876 	  vn_reference_insert (assign, lhs, vuse, vdef);
3877 	}
3878     }
3879   else
3880     {
3881       /* We had a match, so value number the vdef to have the value
3882 	 number of the vuse it came from.  */
3883 
3884       if (dump_file && (dump_flags & TDF_DETAILS))
3885 	fprintf (dump_file, "Store matched earlier value, "
3886 		 "value numbering store vdefs to matching vuses.\n");
3887 
3888       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3889     }
3890 
3891   return changed;
3892 }
3893 
3894 /* Visit and value number PHI, return true if the value number
3895    changed.  */
3896 
3897 static bool
3898 visit_phi (gimple *phi)
3899 {
3900   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3901   unsigned n_executable = 0;
3902   bool allsame = true;
3903   edge_iterator ei;
3904   edge e;
3905 
3906   /* TODO: We could check for this in init_sccvn, and replace this
3907      with a gcc_assert.  */
3908   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3909     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3910 
3911   /* See if all non-TOP arguments have the same value.  TOP is
3912      equivalent to everything, so we can ignore it.  */
3913   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3914     if (e->flags & EDGE_EXECUTABLE)
3915       {
3916 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3917 
3918 	++n_executable;
3919 	if (TREE_CODE (def) == SSA_NAME)
3920 	  def = SSA_VAL (def);
3921 	if (def == VN_TOP)
3922 	  ;
3923 	/* Ignore undefined defs for sameval but record one.  */
3924 	else if (TREE_CODE (def) == SSA_NAME
3925 		 && ssa_undefined_value_p (def, false))
3926 	  seen_undef = def;
3927 	else if (sameval == VN_TOP)
3928 	  sameval = def;
3929 	else if (!expressions_equal_p (def, sameval))
3930 	  {
3931 	    allsame = false;
3932 	    break;
3933 	  }
3934       }
3935 
3936 
3937   /* If none of the edges was executable keep the value-number at VN_TOP,
3938      if only a single edge is exectuable use its value.  */
3939   if (n_executable <= 1)
3940     result = seen_undef ? seen_undef : sameval;
3941   /* If we saw only undefined values and VN_TOP use one of the
3942      undefined values.  */
3943   else if (sameval == VN_TOP)
3944     result = seen_undef ? seen_undef : sameval;
3945   /* First see if it is equivalent to a phi node in this block.  We prefer
3946      this as it allows IV elimination - see PRs 66502 and 67167.  */
3947   else if ((result = vn_phi_lookup (phi)))
3948     ;
3949   /* If all values are the same use that, unless we've seen undefined
3950      values as well and the value isn't constant.
3951      CCP/copyprop have the same restriction to not remove uninit warnings.  */
3952   else if (allsame
3953 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
3954     result = sameval;
3955   else
3956     {
3957       result = PHI_RESULT (phi);
3958       /* Only insert PHIs that are varying, for constant value numbers
3959          we mess up equivalences otherwise as we are only comparing
3960 	 the immediate controlling predicates.  */
3961       vn_phi_insert (phi, result);
3962     }
3963 
3964   return set_ssa_val_to (PHI_RESULT (phi), result);
3965 }
3966 
3967 /* Try to simplify RHS using equivalences and constant folding.  */
3968 
3969 static tree
3970 try_to_simplify (gassign *stmt)
3971 {
3972   enum tree_code code = gimple_assign_rhs_code (stmt);
3973   tree tem;
3974 
3975   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3976      in this case, there is no point in doing extra work.  */
3977   if (code == SSA_NAME)
3978     return NULL_TREE;
3979 
3980   /* First try constant folding based on our current lattice.  */
3981   mprts_hook = vn_lookup_simplify_result;
3982   mprts_hook_cnt = 9;
3983   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3984   mprts_hook = NULL;
3985   if (tem
3986       && (TREE_CODE (tem) == SSA_NAME
3987 	  || is_gimple_min_invariant (tem)))
3988     return tem;
3989 
3990   return NULL_TREE;
3991 }
3992 
3993 /* Visit and value number USE, return true if the value number
3994    changed. */
3995 
3996 static bool
3997 visit_use (tree use)
3998 {
3999   bool changed = false;
4000   gimple *stmt = SSA_NAME_DEF_STMT (use);
4001 
4002   mark_use_processed (use);
4003 
4004   gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4005 	      && !SSA_NAME_IS_DEFAULT_DEF (use));
4006 
4007   if (dump_file && (dump_flags & TDF_DETAILS))
4008     {
4009       fprintf (dump_file, "Value numbering ");
4010       print_generic_expr (dump_file, use);
4011       fprintf (dump_file, " stmt = ");
4012       print_gimple_stmt (dump_file, stmt, 0);
4013     }
4014 
4015   if (gimple_code (stmt) == GIMPLE_PHI)
4016     changed = visit_phi (stmt);
4017   else if (gimple_has_volatile_ops (stmt))
4018     changed = defs_to_varying (stmt);
4019   else if (gassign *ass = dyn_cast <gassign *> (stmt))
4020     {
4021       enum tree_code code = gimple_assign_rhs_code (ass);
4022       tree lhs = gimple_assign_lhs (ass);
4023       tree rhs1 = gimple_assign_rhs1 (ass);
4024       tree simplified;
4025 
4026       /* Shortcut for copies. Simplifying copies is pointless,
4027 	 since we copy the expression and value they represent.  */
4028       if (code == SSA_NAME
4029 	  && TREE_CODE (lhs) == SSA_NAME)
4030 	{
4031 	  changed = visit_copy (lhs, rhs1);
4032 	  goto done;
4033 	}
4034       simplified = try_to_simplify (ass);
4035       if (simplified)
4036 	{
4037 	  if (dump_file && (dump_flags & TDF_DETAILS))
4038 	    {
4039 	      fprintf (dump_file, "RHS ");
4040 	      print_gimple_expr (dump_file, ass, 0);
4041 	      fprintf (dump_file, " simplified to ");
4042 	      print_generic_expr (dump_file, simplified);
4043 	      fprintf (dump_file, "\n");
4044 	    }
4045 	}
4046       /* Setting value numbers to constants will occasionally
4047 	 screw up phi congruence because constants are not
4048 	 uniquely associated with a single ssa name that can be
4049 	 looked up.  */
4050       if (simplified
4051 	  && is_gimple_min_invariant (simplified)
4052 	  && TREE_CODE (lhs) == SSA_NAME)
4053 	{
4054 	  changed = set_ssa_val_to (lhs, simplified);
4055 	  goto done;
4056 	}
4057       else if (simplified
4058 	       && TREE_CODE (simplified) == SSA_NAME
4059 	       && TREE_CODE (lhs) == SSA_NAME)
4060 	{
4061 	  changed = visit_copy (lhs, simplified);
4062 	  goto done;
4063 	}
4064 
4065       if ((TREE_CODE (lhs) == SSA_NAME
4066 	   /* We can substitute SSA_NAMEs that are live over
4067 	      abnormal edges with their constant value.  */
4068 	   && !(gimple_assign_copy_p (ass)
4069 		&& is_gimple_min_invariant (rhs1))
4070 	   && !(simplified
4071 		&& is_gimple_min_invariant (simplified))
4072 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4073 	  /* Stores or copies from SSA_NAMEs that are live over
4074 	     abnormal edges are a problem.  */
4075 	  || (code == SSA_NAME
4076 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4077 	changed = defs_to_varying (ass);
4078       else if (REFERENCE_CLASS_P (lhs)
4079 	       || DECL_P (lhs))
4080 	changed = visit_reference_op_store (lhs, rhs1, ass);
4081       else if (TREE_CODE (lhs) == SSA_NAME)
4082 	{
4083 	  if ((gimple_assign_copy_p (ass)
4084 	       && is_gimple_min_invariant (rhs1))
4085 	      || (simplified
4086 		  && is_gimple_min_invariant (simplified)))
4087 	    {
4088 	      if (simplified)
4089 		changed = set_ssa_val_to (lhs, simplified);
4090 	      else
4091 		changed = set_ssa_val_to (lhs, rhs1);
4092 	    }
4093 	  else
4094 	    {
4095 	      /* Visit the original statement.  */
4096 	      switch (vn_get_stmt_kind (ass))
4097 		{
4098 		case VN_NARY:
4099 		changed = visit_nary_op (lhs, ass);
4100 		break;
4101 		case VN_REFERENCE:
4102 		changed = visit_reference_op_load (lhs, rhs1, ass);
4103 		break;
4104 		default:
4105 		changed = defs_to_varying (ass);
4106 		break;
4107 		}
4108 	    }
4109 	}
4110       else
4111 	changed = defs_to_varying (ass);
4112     }
4113   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4114     {
4115       tree lhs = gimple_call_lhs (call_stmt);
4116       if (lhs && TREE_CODE (lhs) == SSA_NAME)
4117 	{
4118 	  /* Try constant folding based on our current lattice.  */
4119 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4120 							    vn_valueize);
4121 	  if (simplified)
4122 	    {
4123 	      if (dump_file && (dump_flags & TDF_DETAILS))
4124 		{
4125 		  fprintf (dump_file, "call ");
4126 		  print_gimple_expr (dump_file, call_stmt, 0);
4127 		  fprintf (dump_file, " simplified to ");
4128 		  print_generic_expr (dump_file, simplified);
4129 		  fprintf (dump_file, "\n");
4130 		}
4131 	    }
4132 	  /* Setting value numbers to constants will occasionally
4133 	     screw up phi congruence because constants are not
4134 	     uniquely associated with a single ssa name that can be
4135 	     looked up.  */
4136 	  if (simplified
4137 	      && is_gimple_min_invariant (simplified))
4138 	    {
4139 	      changed = set_ssa_val_to (lhs, simplified);
4140 	      if (gimple_vdef (call_stmt))
4141 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4142 					   SSA_VAL (gimple_vuse (call_stmt)));
4143 	      goto done;
4144 	    }
4145 	  else if (simplified
4146 		   && TREE_CODE (simplified) == SSA_NAME)
4147 	    {
4148 	      changed = visit_copy (lhs, simplified);
4149 	      if (gimple_vdef (call_stmt))
4150 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4151 					   SSA_VAL (gimple_vuse (call_stmt)));
4152 	      goto done;
4153 	    }
4154 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4155 	    {
4156 	      changed = defs_to_varying (call_stmt);
4157 	      goto done;
4158 	    }
4159 	}
4160 
4161       /* Pick up flags from a devirtualization target.  */
4162       tree fn = gimple_call_fn (stmt);
4163       int extra_fnflags = 0;
4164       if (fn && TREE_CODE (fn) == SSA_NAME)
4165 	{
4166 	  fn = SSA_VAL (fn);
4167 	  if (TREE_CODE (fn) == ADDR_EXPR
4168 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4169 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4170 	}
4171       if (!gimple_call_internal_p (call_stmt)
4172 	  && (/* Calls to the same function with the same vuse
4173 		 and the same operands do not necessarily return the same
4174 		 value, unless they're pure or const.  */
4175 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
4176 	       & (ECF_PURE | ECF_CONST))
4177 	      /* If calls have a vdef, subsequent calls won't have
4178 		 the same incoming vuse.  So, if 2 calls with vdef have the
4179 		 same vuse, we know they're not subsequent.
4180 		 We can value number 2 calls to the same function with the
4181 		 same vuse and the same operands which are not subsequent
4182 		 the same, because there is no code in the program that can
4183 		 compare the 2 values...  */
4184 	      || (gimple_vdef (call_stmt)
4185 		  /* ... unless the call returns a pointer which does
4186 		     not alias with anything else.  In which case the
4187 		     information that the values are distinct are encoded
4188 		     in the IL.  */
4189 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4190 		  /* Only perform the following when being called from PRE
4191 		     which embeds tail merging.  */
4192 		  && default_vn_walk_kind == VN_WALK)))
4193 	changed = visit_reference_op_call (lhs, call_stmt);
4194       else
4195 	changed = defs_to_varying (call_stmt);
4196     }
4197   else
4198     changed = defs_to_varying (stmt);
4199  done:
4200   return changed;
4201 }
4202 
4203 /* Compare two operands by reverse postorder index */
4204 
4205 static int
4206 compare_ops (const void *pa, const void *pb)
4207 {
4208   const tree opa = *((const tree *)pa);
4209   const tree opb = *((const tree *)pb);
4210   gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4211   gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4212   basic_block bba;
4213   basic_block bbb;
4214 
4215   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4216     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4217   else if (gimple_nop_p (opstmta))
4218     return -1;
4219   else if (gimple_nop_p (opstmtb))
4220     return 1;
4221 
4222   bba = gimple_bb (opstmta);
4223   bbb = gimple_bb (opstmtb);
4224 
4225   if (!bba && !bbb)
4226     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4227   else if (!bba)
4228     return -1;
4229   else if (!bbb)
4230     return 1;
4231 
4232   if (bba == bbb)
4233     {
4234       if (gimple_code (opstmta) == GIMPLE_PHI
4235 	  && gimple_code (opstmtb) == GIMPLE_PHI)
4236 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4237       else if (gimple_code (opstmta) == GIMPLE_PHI)
4238 	return -1;
4239       else if (gimple_code (opstmtb) == GIMPLE_PHI)
4240 	return 1;
4241       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4242         return gimple_uid (opstmta) - gimple_uid (opstmtb);
4243       else
4244 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4245     }
4246   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4247 }
4248 
4249 /* Sort an array containing members of a strongly connected component
4250    SCC so that the members are ordered by RPO number.
4251    This means that when the sort is complete, iterating through the
4252    array will give you the members in RPO order.  */
4253 
4254 static void
4255 sort_scc (vec<tree> scc)
4256 {
4257   scc.qsort (compare_ops);
4258 }
4259 
4260 /* Insert the no longer used nary ONARY to the hash INFO.  */
4261 
4262 static void
4263 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4264 {
4265   size_t size = sizeof_vn_nary_op (onary->length);
4266   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4267 					       &info->nary_obstack);
4268   memcpy (nary, onary, size);
4269   vn_nary_op_insert_into (nary, info->nary, false);
4270 }
4271 
4272 /* Insert the no longer used phi OPHI to the hash INFO.  */
4273 
4274 static void
4275 copy_phi (vn_phi_t ophi, vn_tables_t info)
4276 {
4277   vn_phi_t phi = info->phis_pool->allocate ();
4278   vn_phi_s **slot;
4279   memcpy (phi, ophi, sizeof (*phi));
4280   ophi->phiargs.create (0);
4281   slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4282   gcc_assert (!*slot);
4283   *slot = phi;
4284 }
4285 
4286 /* Insert the no longer used reference OREF to the hash INFO.  */
4287 
4288 static void
4289 copy_reference (vn_reference_t oref, vn_tables_t info)
4290 {
4291   vn_reference_t ref;
4292   vn_reference_s **slot;
4293   ref = info->references_pool->allocate ();
4294   memcpy (ref, oref, sizeof (*ref));
4295   oref->operands.create (0);
4296   slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4297   if (*slot)
4298     free_reference (*slot);
4299   *slot = ref;
4300 }
4301 
4302 /* Process a strongly connected component in the SSA graph.  */
4303 
4304 static void
4305 process_scc (vec<tree> scc)
4306 {
4307   tree var;
4308   unsigned int i;
4309   unsigned int iterations = 0;
4310   bool changed = true;
4311   vn_nary_op_iterator_type hin;
4312   vn_phi_iterator_type hip;
4313   vn_reference_iterator_type hir;
4314   vn_nary_op_t nary;
4315   vn_phi_t phi;
4316   vn_reference_t ref;
4317 
4318   /* If the SCC has a single member, just visit it.  */
4319   if (scc.length () == 1)
4320     {
4321       tree use = scc[0];
4322       if (VN_INFO (use)->use_processed)
4323 	return;
4324       /* We need to make sure it doesn't form a cycle itself, which can
4325 	 happen for self-referential PHI nodes.  In that case we would
4326 	 end up inserting an expression with VN_TOP operands into the
4327 	 valid table which makes us derive bogus equivalences later.
4328 	 The cheapest way to check this is to assume it for all PHI nodes.  */
4329       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4330 	/* Fallthru to iteration.  */ ;
4331       else
4332 	{
4333 	  visit_use (use);
4334 	  return;
4335 	}
4336     }
4337 
4338   if (dump_file && (dump_flags & TDF_DETAILS))
4339     print_scc (dump_file, scc);
4340 
4341   /* Iterate over the SCC with the optimistic table until it stops
4342      changing.  */
4343   current_info = optimistic_info;
4344   while (changed)
4345     {
4346       changed = false;
4347       iterations++;
4348       if (dump_file && (dump_flags & TDF_DETAILS))
4349 	fprintf (dump_file, "Starting iteration %d\n", iterations);
4350       /* As we are value-numbering optimistically we have to
4351 	 clear the expression tables and the simplified expressions
4352 	 in each iteration until we converge.  */
4353       optimistic_info->nary->empty ();
4354       optimistic_info->phis->empty ();
4355       optimistic_info->references->empty ();
4356       obstack_free (&optimistic_info->nary_obstack, NULL);
4357       gcc_obstack_init (&optimistic_info->nary_obstack);
4358       optimistic_info->phis_pool->release ();
4359       optimistic_info->references_pool->release ();
4360       FOR_EACH_VEC_ELT (scc, i, var)
4361 	gcc_assert (!VN_INFO (var)->needs_insertion
4362 		    && VN_INFO (var)->expr == NULL);
4363       FOR_EACH_VEC_ELT (scc, i, var)
4364 	changed |= visit_use (var);
4365     }
4366 
4367   if (dump_file && (dump_flags & TDF_DETAILS))
4368     fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4369   statistics_histogram_event (cfun, "SCC iterations", iterations);
4370 
4371   /* Finally, copy the contents of the no longer used optimistic
4372      table to the valid table.  */
4373   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4374     copy_nary (nary, valid_info);
4375   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4376     copy_phi (phi, valid_info);
4377   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4378 			       ref, vn_reference_t, hir)
4379     copy_reference (ref, valid_info);
4380 
4381   current_info = valid_info;
4382 }
4383 
4384 
4385 /* Pop the components of the found SCC for NAME off the SCC stack
4386    and process them.  Returns true if all went well, false if
4387    we run into resource limits.  */
4388 
4389 static void
4390 extract_and_process_scc_for_name (tree name)
4391 {
4392   auto_vec<tree> scc;
4393   tree x;
4394 
4395   /* Found an SCC, pop the components off the SCC stack and
4396      process them.  */
4397   do
4398     {
4399       x = sccstack.pop ();
4400 
4401       VN_INFO (x)->on_sccstack = false;
4402       scc.safe_push (x);
4403     } while (x != name);
4404 
4405   /* Drop all defs in the SCC to varying in case a SCC turns out to be
4406      incredibly large.
4407      ???  Just switch to a non-optimistic mode that avoids any iteration.  */
4408   if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4409     {
4410       if (dump_file)
4411 	{
4412 	  print_scc (dump_file, scc);
4413 	  fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4414 		   "size %u exceeding %u\n", scc.length (),
4415 		   (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4416 	}
4417       tree var;
4418       unsigned i;
4419       FOR_EACH_VEC_ELT (scc, i, var)
4420 	{
4421 	  gimple *def = SSA_NAME_DEF_STMT (var);
4422 	  mark_use_processed (var);
4423 	  if (SSA_NAME_IS_DEFAULT_DEF (var)
4424 	      || gimple_code (def) == GIMPLE_PHI)
4425 	    set_ssa_val_to (var, var);
4426 	  else
4427 	    defs_to_varying (def);
4428 	}
4429       return;
4430     }
4431 
4432   if (scc.length () > 1)
4433     sort_scc (scc);
4434 
4435   process_scc (scc);
4436 }
4437 
4438 /* Depth first search on NAME to discover and process SCC's in the SSA
4439    graph.
4440    Execution of this algorithm relies on the fact that the SCC's are
4441    popped off the stack in topological order.
4442    Returns true if successful, false if we stopped processing SCC's due
4443    to resource constraints.  */
4444 
4445 static void
4446 DFS (tree name)
4447 {
4448   auto_vec<ssa_op_iter> itervec;
4449   auto_vec<tree> namevec;
4450   use_operand_p usep = NULL;
4451   gimple *defstmt;
4452   tree use;
4453   ssa_op_iter iter;
4454 
4455 start_over:
4456   /* SCC info */
4457   VN_INFO (name)->dfsnum = next_dfs_num++;
4458   VN_INFO (name)->visited = true;
4459   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4460 
4461   sccstack.safe_push (name);
4462   VN_INFO (name)->on_sccstack = true;
4463   defstmt = SSA_NAME_DEF_STMT (name);
4464 
4465   /* Recursively DFS on our operands, looking for SCC's.  */
4466   if (!gimple_nop_p (defstmt))
4467     {
4468       /* Push a new iterator.  */
4469       if (gphi *phi = dyn_cast <gphi *> (defstmt))
4470 	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4471       else
4472 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4473     }
4474   else
4475     clear_and_done_ssa_iter (&iter);
4476 
4477   while (1)
4478     {
4479       /* If we are done processing uses of a name, go up the stack
4480 	 of iterators and process SCCs as we found them.  */
4481       if (op_iter_done (&iter))
4482 	{
4483 	  /* See if we found an SCC.  */
4484 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4485 	    extract_and_process_scc_for_name (name);
4486 
4487 	  /* Check if we are done.  */
4488 	  if (namevec.is_empty ())
4489 	    return;
4490 
4491 	  /* Restore the last use walker and continue walking there.  */
4492 	  use = name;
4493 	  name = namevec.pop ();
4494 	  memcpy (&iter, &itervec.last (),
4495 		  sizeof (ssa_op_iter));
4496 	  itervec.pop ();
4497 	  goto continue_walking;
4498 	}
4499 
4500       use = USE_FROM_PTR (usep);
4501 
4502       /* Since we handle phi nodes, we will sometimes get
4503 	 invariants in the use expression.  */
4504       if (TREE_CODE (use) == SSA_NAME)
4505 	{
4506 	  if (! (VN_INFO (use)->visited))
4507 	    {
4508 	      /* Recurse by pushing the current use walking state on
4509 		 the stack and starting over.  */
4510 	      itervec.safe_push (iter);
4511 	      namevec.safe_push (name);
4512 	      name = use;
4513 	      goto start_over;
4514 
4515 continue_walking:
4516 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4517 					 VN_INFO (use)->low);
4518 	    }
4519 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4520 	      && VN_INFO (use)->on_sccstack)
4521 	    {
4522 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4523 					 VN_INFO (name)->low);
4524 	    }
4525 	}
4526 
4527       usep = op_iter_next_use (&iter);
4528     }
4529 }
4530 
4531 /* Allocate a value number table.  */
4532 
4533 static void
4534 allocate_vn_table (vn_tables_t table)
4535 {
4536   table->phis = new vn_phi_table_type (23);
4537   table->nary = new vn_nary_op_table_type (23);
4538   table->references = new vn_reference_table_type (23);
4539 
4540   gcc_obstack_init (&table->nary_obstack);
4541   table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4542   table->references_pool = new object_allocator<vn_reference_s>
4543     ("VN references");
4544 }
4545 
4546 /* Free a value number table.  */
4547 
4548 static void
4549 free_vn_table (vn_tables_t table)
4550 {
4551   delete table->phis;
4552   table->phis = NULL;
4553   delete table->nary;
4554   table->nary = NULL;
4555   delete table->references;
4556   table->references = NULL;
4557   obstack_free (&table->nary_obstack, NULL);
4558   delete table->phis_pool;
4559   delete table->references_pool;
4560 }
4561 
4562 static void
4563 init_scc_vn (void)
4564 {
4565   int j;
4566   int *rpo_numbers_temp;
4567 
4568   calculate_dominance_info (CDI_DOMINATORS);
4569   mark_dfs_back_edges ();
4570 
4571   sccstack.create (0);
4572   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4573 
4574   constant_value_ids = BITMAP_ALLOC (NULL);
4575 
4576   next_dfs_num = 1;
4577   next_value_id = 1;
4578 
4579   vn_ssa_aux_table.create (num_ssa_names + 1);
4580   /* VEC_alloc doesn't actually grow it to the right size, it just
4581      preallocates the space to do so.  */
4582   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4583   gcc_obstack_init (&vn_ssa_aux_obstack);
4584 
4585   shared_lookup_phiargs.create (0);
4586   shared_lookup_references.create (0);
4587   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4588   rpo_numbers_temp =
4589     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4590   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4591 
4592   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4593      the i'th block in RPO order is bb.  We want to map bb's to RPO
4594      numbers, so we need to rearrange this array.  */
4595   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4596     rpo_numbers[rpo_numbers_temp[j]] = j;
4597 
4598   XDELETE (rpo_numbers_temp);
4599 
4600   VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4601 		       get_identifier ("VN_TOP"), error_mark_node);
4602 
4603   renumber_gimple_stmt_uids ();
4604 
4605   /* Create the valid and optimistic value numbering tables.  */
4606   valid_info = XCNEW (struct vn_tables_s);
4607   allocate_vn_table (valid_info);
4608   optimistic_info = XCNEW (struct vn_tables_s);
4609   allocate_vn_table (optimistic_info);
4610   current_info = valid_info;
4611 
4612   /* Create the VN_INFO structures, and initialize value numbers to
4613      TOP or VARYING for parameters.  */
4614   size_t i;
4615   tree name;
4616 
4617   FOR_EACH_SSA_NAME (i, name, cfun)
4618     {
4619       VN_INFO_GET (name)->valnum = VN_TOP;
4620       VN_INFO (name)->needs_insertion = false;
4621       VN_INFO (name)->expr = NULL;
4622       VN_INFO (name)->value_id = 0;
4623 
4624       if (!SSA_NAME_IS_DEFAULT_DEF (name))
4625 	continue;
4626 
4627       switch (TREE_CODE (SSA_NAME_VAR (name)))
4628 	{
4629 	case VAR_DECL:
4630 	  /* All undefined vars are VARYING.  */
4631 	  VN_INFO (name)->valnum = name;
4632 	  VN_INFO (name)->visited = true;
4633 	  break;
4634 
4635 	case PARM_DECL:
4636 	  /* Parameters are VARYING but we can record a condition
4637 	     if we know it is a non-NULL pointer.  */
4638 	  VN_INFO (name)->visited = true;
4639 	  VN_INFO (name)->valnum = name;
4640 	  if (POINTER_TYPE_P (TREE_TYPE (name))
4641 	      && nonnull_arg_p (SSA_NAME_VAR (name)))
4642 	    {
4643 	      tree ops[2];
4644 	      ops[0] = name;
4645 	      ops[1] = build_int_cst (TREE_TYPE (name), 0);
4646 	      vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4647 					boolean_true_node, 0);
4648 	      if (dump_file && (dump_flags & TDF_DETAILS))
4649 		{
4650 		  fprintf (dump_file, "Recording ");
4651 		  print_generic_expr (dump_file, name, TDF_SLIM);
4652 		  fprintf (dump_file, " != 0\n");
4653 		}
4654 	    }
4655 	  break;
4656 
4657 	case RESULT_DECL:
4658 	  /* If the result is passed by invisible reference the default
4659 	     def is initialized, otherwise it's uninitialized.  Still
4660 	     undefined is varying.  */
4661 	  VN_INFO (name)->visited = true;
4662 	  VN_INFO (name)->valnum = name;
4663 	  break;
4664 
4665 	default:
4666 	  gcc_unreachable ();
4667 	}
4668     }
4669 }
4670 
4671 /* Restore SSA info that has been reset on value leaders.  */
4672 
4673 void
4674 scc_vn_restore_ssa_info (void)
4675 {
4676   unsigned i;
4677   tree name;
4678 
4679   FOR_EACH_SSA_NAME (i, name, cfun)
4680     {
4681       if (has_VN_INFO (name))
4682 	{
4683 	  if (VN_INFO (name)->needs_insertion)
4684 	    ;
4685 	  else if (POINTER_TYPE_P (TREE_TYPE (name))
4686 		   && VN_INFO (name)->info.ptr_info)
4687 	    SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4688 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4689 		   && VN_INFO (name)->info.range_info)
4690 	    {
4691 	      SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4692 	      SSA_NAME_ANTI_RANGE_P (name)
4693 		= VN_INFO (name)->range_info_anti_range_p;
4694 	    }
4695 	}
4696     }
4697 }
4698 
4699 void
4700 free_scc_vn (void)
4701 {
4702   size_t i;
4703   tree name;
4704 
4705   delete constant_to_value_id;
4706   constant_to_value_id = NULL;
4707   BITMAP_FREE (constant_value_ids);
4708   shared_lookup_phiargs.release ();
4709   shared_lookup_references.release ();
4710   XDELETEVEC (rpo_numbers);
4711 
4712   FOR_EACH_SSA_NAME (i, name, cfun)
4713     {
4714       if (has_VN_INFO (name)
4715 	  && VN_INFO (name)->needs_insertion)
4716 	release_ssa_name (name);
4717     }
4718   obstack_free (&vn_ssa_aux_obstack, NULL);
4719   vn_ssa_aux_table.release ();
4720 
4721   sccstack.release ();
4722   free_vn_table (valid_info);
4723   XDELETE (valid_info);
4724   free_vn_table (optimistic_info);
4725   XDELETE (optimistic_info);
4726 
4727   BITMAP_FREE (const_parms);
4728 }
4729 
4730 /* Set *ID according to RESULT.  */
4731 
4732 static void
4733 set_value_id_for_result (tree result, unsigned int *id)
4734 {
4735   if (result && TREE_CODE (result) == SSA_NAME)
4736     *id = VN_INFO (result)->value_id;
4737   else if (result && is_gimple_min_invariant (result))
4738     *id = get_or_alloc_constant_value_id (result);
4739   else
4740     *id = get_next_value_id ();
4741 }
4742 
4743 /* Set the value ids in the valid hash tables.  */
4744 
4745 static void
4746 set_hashtable_value_ids (void)
4747 {
4748   vn_nary_op_iterator_type hin;
4749   vn_phi_iterator_type hip;
4750   vn_reference_iterator_type hir;
4751   vn_nary_op_t vno;
4752   vn_reference_t vr;
4753   vn_phi_t vp;
4754 
4755   /* Now set the value ids of the things we had put in the hash
4756      table.  */
4757 
4758   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4759     set_value_id_for_result (vno->result, &vno->value_id);
4760 
4761   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4762     set_value_id_for_result (vp->result, &vp->value_id);
4763 
4764   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4765 			       hir)
4766     set_value_id_for_result (vr->result, &vr->value_id);
4767 }
4768 
4769 class sccvn_dom_walker : public dom_walker
4770 {
4771 public:
4772   sccvn_dom_walker ()
4773     : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4774 
4775   virtual edge before_dom_children (basic_block);
4776   virtual void after_dom_children (basic_block);
4777 
4778   void record_cond (basic_block,
4779 		    enum tree_code code, tree lhs, tree rhs, bool value);
4780   void record_conds (basic_block,
4781 		     enum tree_code code, tree lhs, tree rhs, bool value);
4782 
4783   auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4784     cond_stack;
4785 };
4786 
4787 /* Record a temporary condition for the BB and its dominated blocks.  */
4788 
4789 void
4790 sccvn_dom_walker::record_cond (basic_block bb,
4791 			       enum tree_code code, tree lhs, tree rhs,
4792 			       bool value)
4793 {
4794   tree ops[2] = { lhs, rhs };
4795   vn_nary_op_t old = NULL;
4796   if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4797     current_info->nary->remove_elt_with_hash (old, old->hashcode);
4798   vn_nary_op_t cond
4799     = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4800 				value
4801 				? boolean_true_node
4802 				: boolean_false_node, 0);
4803   if (dump_file && (dump_flags & TDF_DETAILS))
4804     {
4805       fprintf (dump_file, "Recording temporarily ");
4806       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4807       fprintf (dump_file, " %s ", get_tree_code_name (code));
4808       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4809       fprintf (dump_file, " == %s%s\n",
4810 	       value ? "true" : "false",
4811 	       old ? " (old entry saved)" : "");
4812     }
4813   cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4814 }
4815 
4816 /* Record temporary conditions for the BB and its dominated blocks
4817    according to LHS CODE RHS == VALUE and its dominated conditions.  */
4818 
4819 void
4820 sccvn_dom_walker::record_conds (basic_block bb,
4821 				enum tree_code code, tree lhs, tree rhs,
4822 				bool value)
4823 {
4824   /* Record the original condition.  */
4825   record_cond (bb, code, lhs, rhs, value);
4826 
4827   if (!value)
4828     return;
4829 
4830   /* Record dominated conditions if the condition is true.  Note that
4831      the inversion is already recorded.  */
4832   switch (code)
4833     {
4834     case LT_EXPR:
4835     case GT_EXPR:
4836       record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4837       record_cond (bb, NE_EXPR, lhs, rhs, true);
4838       record_cond (bb, EQ_EXPR, lhs, rhs, false);
4839       break;
4840 
4841     case EQ_EXPR:
4842       record_cond (bb, LE_EXPR, lhs, rhs, true);
4843       record_cond (bb, GE_EXPR, lhs, rhs, true);
4844       record_cond (bb, LT_EXPR, lhs, rhs, false);
4845       record_cond (bb, GT_EXPR, lhs, rhs, false);
4846       break;
4847 
4848     default:
4849       break;
4850     }
4851 }
4852 
4853 /* Restore expressions and values derived from conditionals.  */
4854 
4855 void
4856 sccvn_dom_walker::after_dom_children (basic_block bb)
4857 {
4858   while (!cond_stack.is_empty ()
4859 	 && cond_stack.last ().first == bb)
4860     {
4861       vn_nary_op_t cond = cond_stack.last ().second.first;
4862       vn_nary_op_t old = cond_stack.last ().second.second;
4863       current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4864       if (old)
4865 	vn_nary_op_insert_into (old, current_info->nary, false);
4866       cond_stack.pop ();
4867     }
4868 }
4869 
4870 /* Value number all statements in BB.  */
4871 
4872 edge
4873 sccvn_dom_walker::before_dom_children (basic_block bb)
4874 {
4875   if (dump_file && (dump_flags & TDF_DETAILS))
4876     fprintf (dump_file, "Visiting BB %d\n", bb->index);
4877 
4878   /* If we have a single predecessor record the equivalence from a
4879      possible condition on the predecessor edge.  */
4880   edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4881   if (pred_e)
4882     {
4883       /* Check if there are multiple executable successor edges in
4884 	 the source block.  Otherwise there is no additional info
4885 	 to be recorded.  */
4886       edge_iterator ei;
4887       edge e2;
4888       FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4889 	if (e2 != pred_e
4890 	    && e2->flags & EDGE_EXECUTABLE)
4891 	  break;
4892       if (e2 && (e2->flags & EDGE_EXECUTABLE))
4893 	{
4894 	  gimple *stmt = last_stmt (pred_e->src);
4895 	  if (stmt
4896 	      && gimple_code (stmt) == GIMPLE_COND)
4897 	    {
4898 	      enum tree_code code = gimple_cond_code (stmt);
4899 	      tree lhs = gimple_cond_lhs (stmt);
4900 	      tree rhs = gimple_cond_rhs (stmt);
4901 	      record_conds (bb, code, lhs, rhs,
4902 			    (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4903 	      code = invert_tree_comparison (code, HONOR_NANS (lhs));
4904 	      if (code != ERROR_MARK)
4905 		record_conds (bb, code, lhs, rhs,
4906 			      (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4907 	    }
4908 	}
4909     }
4910 
4911   /* Value-number all defs in the basic-block.  */
4912   for (gphi_iterator gsi = gsi_start_phis (bb);
4913        !gsi_end_p (gsi); gsi_next (&gsi))
4914     {
4915       gphi *phi = gsi.phi ();
4916       tree res = PHI_RESULT (phi);
4917       if (!VN_INFO (res)->visited)
4918 	DFS (res);
4919     }
4920   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4921        !gsi_end_p (gsi); gsi_next (&gsi))
4922     {
4923       ssa_op_iter i;
4924       tree op;
4925       FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4926 	if (!VN_INFO (op)->visited)
4927 	  DFS (op);
4928     }
4929 
4930   /* Finally look at the last stmt.  */
4931   gimple *stmt = last_stmt (bb);
4932   if (!stmt)
4933     return NULL;
4934 
4935   enum gimple_code code = gimple_code (stmt);
4936   if (code != GIMPLE_COND
4937       && code != GIMPLE_SWITCH
4938       && code != GIMPLE_GOTO)
4939     return NULL;
4940 
4941   if (dump_file && (dump_flags & TDF_DETAILS))
4942     {
4943       fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4944       print_gimple_stmt (dump_file, stmt, 0);
4945     }
4946 
4947   /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4948      if value-numbering can prove they are not reachable.  Handling
4949      computed gotos is also possible.  */
4950   tree val;
4951   switch (code)
4952     {
4953     case GIMPLE_COND:
4954       {
4955 	tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4956 	tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4957 	val = gimple_simplify (gimple_cond_code (stmt),
4958 			       boolean_type_node, lhs, rhs,
4959 			       NULL, vn_valueize);
4960 	/* If that didn't simplify to a constant see if we have recorded
4961 	   temporary expressions from taken edges.  */
4962 	if (!val || TREE_CODE (val) != INTEGER_CST)
4963 	  {
4964 	    tree ops[2];
4965 	    ops[0] = lhs;
4966 	    ops[1] = rhs;
4967 	    val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4968 					    boolean_type_node, ops, NULL);
4969 	  }
4970 	break;
4971       }
4972     case GIMPLE_SWITCH:
4973       val = gimple_switch_index (as_a <gswitch *> (stmt));
4974       break;
4975     case GIMPLE_GOTO:
4976       val = gimple_goto_dest (stmt);
4977       break;
4978     default:
4979       gcc_unreachable ();
4980     }
4981   if (!val)
4982     return NULL;
4983 
4984   edge taken = find_taken_edge (bb, vn_valueize (val));
4985   if (!taken)
4986     return NULL;
4987 
4988   if (dump_file && (dump_flags & TDF_DETAILS))
4989     fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4990 	     "not executable\n", bb->index, bb->index, taken->dest->index);
4991 
4992   return taken;
4993 }
4994 
4995 /* Do SCCVN.  Returns true if it finished, false if we bailed out
4996    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4997    how we use the alias oracle walking during the VN process.  */
4998 
4999 void
5000 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5001 {
5002   size_t i;
5003 
5004   default_vn_walk_kind = default_vn_walk_kind_;
5005 
5006   init_scc_vn ();
5007 
5008   /* Collect pointers we know point to readonly memory.  */
5009   const_parms = BITMAP_ALLOC (NULL);
5010   tree fnspec = lookup_attribute ("fn spec",
5011 				  TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5012   if (fnspec)
5013     {
5014       fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5015       i = 1;
5016       for (tree arg = DECL_ARGUMENTS (cfun->decl);
5017 	   arg; arg = DECL_CHAIN (arg), ++i)
5018 	{
5019 	  if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5020 	    break;
5021 	  if (TREE_STRING_POINTER (fnspec)[i]  == 'R'
5022 	      || TREE_STRING_POINTER (fnspec)[i] == 'r')
5023 	    {
5024 	      tree name = ssa_default_def (cfun, arg);
5025 	      if (name)
5026 		bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5027 	    }
5028 	}
5029     }
5030 
5031   /* Walk all blocks in dominator order, value-numbering stmts
5032      SSA defs and decide whether outgoing edges are not executable.  */
5033   sccvn_dom_walker walker;
5034   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5035 
5036   /* Initialize the value ids and prune out remaining VN_TOPs
5037      from dead code.  */
5038   tree name;
5039   FOR_EACH_SSA_NAME (i, name, cfun)
5040     {
5041       vn_ssa_aux_t info = VN_INFO (name);
5042       if (!info->visited
5043 	  || info->valnum == VN_TOP)
5044 	info->valnum = name;
5045       if (info->valnum == name)
5046 	info->value_id = get_next_value_id ();
5047       else if (is_gimple_min_invariant (info->valnum))
5048 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
5049     }
5050 
5051   /* Propagate.  */
5052   FOR_EACH_SSA_NAME (i, name, cfun)
5053     {
5054       vn_ssa_aux_t info = VN_INFO (name);
5055       if (TREE_CODE (info->valnum) == SSA_NAME
5056 	  && info->valnum != name
5057 	  && info->value_id != VN_INFO (info->valnum)->value_id)
5058 	info->value_id = VN_INFO (info->valnum)->value_id;
5059     }
5060 
5061   set_hashtable_value_ids ();
5062 
5063   if (dump_file && (dump_flags & TDF_DETAILS))
5064     {
5065       fprintf (dump_file, "Value numbers:\n");
5066       FOR_EACH_SSA_NAME (i, name, cfun)
5067 	{
5068 	  if (VN_INFO (name)->visited
5069 	      && SSA_VAL (name) != name)
5070 	    {
5071 	      print_generic_expr (dump_file, name);
5072 	      fprintf (dump_file, " = ");
5073 	      print_generic_expr (dump_file, SSA_VAL (name));
5074 	      fprintf (dump_file, "\n");
5075 	    }
5076 	}
5077     }
5078 }
5079 
5080 /* Return the maximum value id we have ever seen.  */
5081 
5082 unsigned int
5083 get_max_value_id (void)
5084 {
5085   return next_value_id;
5086 }
5087 
5088 /* Return the next unique value id.  */
5089 
5090 unsigned int
5091 get_next_value_id (void)
5092 {
5093   return next_value_id++;
5094 }
5095 
5096 
5097 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5098 
5099 bool
5100 expressions_equal_p (tree e1, tree e2)
5101 {
5102   /* The obvious case.  */
5103   if (e1 == e2)
5104     return true;
5105 
5106   /* If either one is VN_TOP consider them equal.  */
5107   if (e1 == VN_TOP || e2 == VN_TOP)
5108     return true;
5109 
5110   /* If only one of them is null, they cannot be equal.  */
5111   if (!e1 || !e2)
5112     return false;
5113 
5114   /* Now perform the actual comparison.  */
5115   if (TREE_CODE (e1) == TREE_CODE (e2)
5116       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5117     return true;
5118 
5119   return false;
5120 }
5121 
5122 
5123 /* Return true if the nary operation NARY may trap.  This is a copy
5124    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5125 
5126 bool
5127 vn_nary_may_trap (vn_nary_op_t nary)
5128 {
5129   tree type;
5130   tree rhs2 = NULL_TREE;
5131   bool honor_nans = false;
5132   bool honor_snans = false;
5133   bool fp_operation = false;
5134   bool honor_trapv = false;
5135   bool handled, ret;
5136   unsigned i;
5137 
5138   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5139       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5140       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5141     {
5142       type = nary->type;
5143       fp_operation = FLOAT_TYPE_P (type);
5144       if (fp_operation)
5145 	{
5146 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5147 	  honor_snans = flag_signaling_nans != 0;
5148 	}
5149       else if (INTEGRAL_TYPE_P (type)
5150 	       && TYPE_OVERFLOW_TRAPS (type))
5151 	honor_trapv = true;
5152     }
5153   if (nary->length >= 2)
5154     rhs2 = nary->op[1];
5155   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5156 				       honor_trapv,
5157 				       honor_nans, honor_snans, rhs2,
5158 				       &handled);
5159   if (handled
5160       && ret)
5161     return true;
5162 
5163   for (i = 0; i < nary->length; ++i)
5164     if (tree_could_trap_p (nary->op[i]))
5165       return true;
5166 
5167   return false;
5168 }
5169 
5170 
5171 class eliminate_dom_walker : public dom_walker
5172 {
5173 public:
5174   eliminate_dom_walker (cdi_direction, bitmap);
5175   ~eliminate_dom_walker ();
5176 
5177   virtual edge before_dom_children (basic_block);
5178   virtual void after_dom_children (basic_block);
5179 
5180   tree eliminate_avail (tree op);
5181   void eliminate_push_avail (tree op);
5182   tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5183 
5184   bool do_pre;
5185   unsigned int el_todo;
5186   unsigned int eliminations;
5187   unsigned int insertions;
5188 
5189   /* SSA names that had their defs inserted by PRE if do_pre.  */
5190   bitmap inserted_exprs;
5191 
5192   /* Blocks with statements that have had their EH properties changed.  */
5193   bitmap need_eh_cleanup;
5194 
5195   /* Blocks with statements that have had their AB properties changed.  */
5196   bitmap need_ab_cleanup;
5197 
5198   auto_vec<gimple *> to_remove;
5199   auto_vec<gimple *> to_fixup;
5200   auto_vec<tree> avail;
5201   auto_vec<tree> avail_stack;
5202 };
5203 
5204 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5205 					    bitmap inserted_exprs_)
5206   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5207     el_todo (0), eliminations (0), insertions (0),
5208     inserted_exprs (inserted_exprs_)
5209 {
5210   need_eh_cleanup = BITMAP_ALLOC (NULL);
5211   need_ab_cleanup = BITMAP_ALLOC (NULL);
5212 }
5213 
5214 eliminate_dom_walker::~eliminate_dom_walker ()
5215 {
5216   BITMAP_FREE (need_eh_cleanup);
5217   BITMAP_FREE (need_ab_cleanup);
5218 }
5219 
5220 /* Return a leader for OP that is available at the current point of the
5221    eliminate domwalk.  */
5222 
5223 tree
5224 eliminate_dom_walker::eliminate_avail (tree op)
5225 {
5226   tree valnum = VN_INFO (op)->valnum;
5227   if (TREE_CODE (valnum) == SSA_NAME)
5228     {
5229       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5230 	return valnum;
5231       if (avail.length () > SSA_NAME_VERSION (valnum))
5232 	return avail[SSA_NAME_VERSION (valnum)];
5233     }
5234   else if (is_gimple_min_invariant (valnum))
5235     return valnum;
5236   return NULL_TREE;
5237 }
5238 
5239 /* At the current point of the eliminate domwalk make OP available.  */
5240 
5241 void
5242 eliminate_dom_walker::eliminate_push_avail (tree op)
5243 {
5244   tree valnum = VN_INFO (op)->valnum;
5245   if (TREE_CODE (valnum) == SSA_NAME)
5246     {
5247       if (avail.length () <= SSA_NAME_VERSION (valnum))
5248 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5249       tree pushop = op;
5250       if (avail[SSA_NAME_VERSION (valnum)])
5251 	pushop = avail[SSA_NAME_VERSION (valnum)];
5252       avail_stack.safe_push (pushop);
5253       avail[SSA_NAME_VERSION (valnum)] = op;
5254     }
5255 }
5256 
5257 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
5258    the leader for the expression if insertion was successful.  */
5259 
5260 tree
5261 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5262 {
5263   /* We can insert a sequence with a single assignment only.  */
5264   gimple_seq stmts = VN_INFO (val)->expr;
5265   if (!gimple_seq_singleton_p (stmts))
5266     return NULL_TREE;
5267   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5268   if (!stmt
5269       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5270 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5271 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5272 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5273 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5274     return NULL_TREE;
5275 
5276   tree op = gimple_assign_rhs1 (stmt);
5277   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5278       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5279     op = TREE_OPERAND (op, 0);
5280   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5281   if (!leader)
5282     return NULL_TREE;
5283 
5284   tree res;
5285   stmts = NULL;
5286   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5287     res = gimple_build (&stmts, BIT_FIELD_REF,
5288 			TREE_TYPE (val), leader,
5289 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5290 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5291   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5292     res = gimple_build (&stmts, BIT_AND_EXPR,
5293 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5294   else
5295     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5296 			TREE_TYPE (val), leader);
5297   if (TREE_CODE (res) != SSA_NAME
5298       || SSA_NAME_IS_DEFAULT_DEF (res)
5299       || gimple_bb (SSA_NAME_DEF_STMT (res)))
5300     {
5301       gimple_seq_discard (stmts);
5302 
5303       /* During propagation we have to treat SSA info conservatively
5304          and thus we can end up simplifying the inserted expression
5305 	 at elimination time to sth not defined in stmts.  */
5306       /* But then this is a redundancy we failed to detect.  Which means
5307          res now has two values.  That doesn't play well with how
5308 	 we track availability here, so give up.  */
5309       if (dump_file && (dump_flags & TDF_DETAILS))
5310 	{
5311 	  if (TREE_CODE (res) == SSA_NAME)
5312 	    res = eliminate_avail (res);
5313 	  if (res)
5314 	    {
5315 	      fprintf (dump_file, "Failed to insert expression for value ");
5316 	      print_generic_expr (dump_file, val);
5317 	      fprintf (dump_file, " which is really fully redundant to ");
5318 	      print_generic_expr (dump_file, res);
5319 	      fprintf (dump_file, "\n");
5320 	    }
5321 	}
5322 
5323       return NULL_TREE;
5324     }
5325   else
5326     {
5327       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5328       VN_INFO_GET (res)->valnum = val;
5329     }
5330 
5331   insertions++;
5332   if (dump_file && (dump_flags & TDF_DETAILS))
5333     {
5334       fprintf (dump_file, "Inserted ");
5335       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5336     }
5337 
5338   return res;
5339 }
5340 
5341 
5342 
5343 /* Perform elimination for the basic-block B during the domwalk.  */
5344 
5345 edge
5346 eliminate_dom_walker::before_dom_children (basic_block b)
5347 {
5348   /* Mark new bb.  */
5349   avail_stack.safe_push (NULL_TREE);
5350 
5351   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
5352   edge_iterator ei;
5353   edge e;
5354   FOR_EACH_EDGE (e, ei, b->preds)
5355     if (e->flags & EDGE_EXECUTABLE)
5356       break;
5357   if (! e)
5358     return NULL;
5359 
5360   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5361     {
5362       gphi *phi = gsi.phi ();
5363       tree res = PHI_RESULT (phi);
5364 
5365       if (virtual_operand_p (res))
5366 	{
5367 	  gsi_next (&gsi);
5368 	  continue;
5369 	}
5370 
5371       tree sprime = eliminate_avail (res);
5372       if (sprime
5373 	  && sprime != res)
5374 	{
5375 	  if (dump_file && (dump_flags & TDF_DETAILS))
5376 	    {
5377 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
5378 	      print_generic_expr (dump_file, res);
5379 	      fprintf (dump_file, " with ");
5380 	      print_generic_expr (dump_file, sprime);
5381 	      fprintf (dump_file, "\n");
5382 	    }
5383 
5384 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
5385 	  if (! inserted_exprs
5386 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5387 	    eliminations++;
5388 
5389 	  /* If we will propagate into all uses don't bother to do
5390 	     anything.  */
5391 	  if (may_propagate_copy (res, sprime))
5392 	    {
5393 	      /* Mark the PHI for removal.  */
5394 	      to_remove.safe_push (phi);
5395 	      gsi_next (&gsi);
5396 	      continue;
5397 	    }
5398 
5399 	  remove_phi_node (&gsi, false);
5400 
5401 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5402 	    sprime = fold_convert (TREE_TYPE (res), sprime);
5403 	  gimple *stmt = gimple_build_assign (res, sprime);
5404 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5405 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5406 	  continue;
5407 	}
5408 
5409       eliminate_push_avail (res);
5410       gsi_next (&gsi);
5411     }
5412 
5413   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5414        !gsi_end_p (gsi);
5415        gsi_next (&gsi))
5416     {
5417       tree sprime = NULL_TREE;
5418       gimple *stmt = gsi_stmt (gsi);
5419       tree lhs = gimple_get_lhs (stmt);
5420       if (lhs && TREE_CODE (lhs) == SSA_NAME
5421 	  && !gimple_has_volatile_ops (stmt)
5422 	  /* See PR43491.  Do not replace a global register variable when
5423 	     it is a the RHS of an assignment.  Do replace local register
5424 	     variables since gcc does not guarantee a local variable will
5425 	     be allocated in register.
5426 	     ???  The fix isn't effective here.  This should instead
5427 	     be ensured by not value-numbering them the same but treating
5428 	     them like volatiles?  */
5429 	  && !(gimple_assign_single_p (stmt)
5430 	       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5431 		   && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5432 		   && is_global_var (gimple_assign_rhs1 (stmt)))))
5433 	{
5434 	  sprime = eliminate_avail (lhs);
5435 	  if (!sprime)
5436 	    {
5437 	      /* If there is no existing usable leader but SCCVN thinks
5438 		 it has an expression it wants to use as replacement,
5439 		 insert that.  */
5440 	      tree val = VN_INFO (lhs)->valnum;
5441 	      if (val != VN_TOP
5442 		  && TREE_CODE (val) == SSA_NAME
5443 		  && VN_INFO (val)->needs_insertion
5444 		  && VN_INFO (val)->expr != NULL
5445 		  && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5446 		eliminate_push_avail (sprime);
5447 	    }
5448 
5449 	  /* If this now constitutes a copy duplicate points-to
5450 	     and range info appropriately.  This is especially
5451 	     important for inserted code.  See tree-ssa-copy.c
5452 	     for similar code.  */
5453 	  if (sprime
5454 	      && TREE_CODE (sprime) == SSA_NAME)
5455 	    {
5456 	      basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5457 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
5458 		  && VN_INFO_PTR_INFO (lhs)
5459 		  && ! VN_INFO_PTR_INFO (sprime))
5460 		{
5461 		  duplicate_ssa_name_ptr_info (sprime,
5462 					       VN_INFO_PTR_INFO (lhs));
5463 		  if (b != sprime_b)
5464 		    mark_ptr_info_alignment_unknown
5465 			(SSA_NAME_PTR_INFO (sprime));
5466 		}
5467 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5468 		       && VN_INFO_RANGE_INFO (lhs)
5469 		       && ! VN_INFO_RANGE_INFO (sprime)
5470 		       && b == sprime_b)
5471 		duplicate_ssa_name_range_info (sprime,
5472 					       VN_INFO_RANGE_TYPE (lhs),
5473 					       VN_INFO_RANGE_INFO (lhs));
5474 	    }
5475 
5476 	  /* Inhibit the use of an inserted PHI on a loop header when
5477 	     the address of the memory reference is a simple induction
5478 	     variable.  In other cases the vectorizer won't do anything
5479 	     anyway (either it's loop invariant or a complicated
5480 	     expression).  */
5481 	  if (sprime
5482 	      && TREE_CODE (sprime) == SSA_NAME
5483 	      && do_pre
5484 	      && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5485 	      && loop_outer (b->loop_father)
5486 	      && has_zero_uses (sprime)
5487 	      && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5488 	      && gimple_assign_load_p (stmt))
5489 	    {
5490 	      gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5491 	      basic_block def_bb = gimple_bb (def_stmt);
5492 	      if (gimple_code (def_stmt) == GIMPLE_PHI
5493 		  && def_bb->loop_father->header == def_bb)
5494 		{
5495 		  loop_p loop = def_bb->loop_father;
5496 		  ssa_op_iter iter;
5497 		  tree op;
5498 		  bool found = false;
5499 		  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5500 		    {
5501 		      affine_iv iv;
5502 		      def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5503 		      if (def_bb
5504 			  && flow_bb_inside_loop_p (loop, def_bb)
5505 			  && simple_iv (loop, loop, op, &iv, true))
5506 			{
5507 			  found = true;
5508 			  break;
5509 			}
5510 		    }
5511 		  if (found)
5512 		    {
5513 		      if (dump_file && (dump_flags & TDF_DETAILS))
5514 			{
5515 			  fprintf (dump_file, "Not replacing ");
5516 			  print_gimple_expr (dump_file, stmt, 0);
5517 			  fprintf (dump_file, " with ");
5518 			  print_generic_expr (dump_file, sprime);
5519 			  fprintf (dump_file, " which would add a loop"
5520 				   " carried dependence to loop %d\n",
5521 				   loop->num);
5522 			}
5523 		      /* Don't keep sprime available.  */
5524 		      sprime = NULL_TREE;
5525 		    }
5526 		}
5527 	    }
5528 
5529 	  if (sprime)
5530 	    {
5531 	      /* If we can propagate the value computed for LHS into
5532 		 all uses don't bother doing anything with this stmt.  */
5533 	      if (may_propagate_copy (lhs, sprime))
5534 		{
5535 		  /* Mark it for removal.  */
5536 		  to_remove.safe_push (stmt);
5537 
5538 		  /* ???  Don't count copy/constant propagations.  */
5539 		  if (gimple_assign_single_p (stmt)
5540 		      && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5541 			  || gimple_assign_rhs1 (stmt) == sprime))
5542 		    continue;
5543 
5544 		  if (dump_file && (dump_flags & TDF_DETAILS))
5545 		    {
5546 		      fprintf (dump_file, "Replaced ");
5547 		      print_gimple_expr (dump_file, stmt, 0);
5548 		      fprintf (dump_file, " with ");
5549 		      print_generic_expr (dump_file, sprime);
5550 		      fprintf (dump_file, " in all uses of ");
5551 		      print_gimple_stmt (dump_file, stmt, 0);
5552 		    }
5553 
5554 		  eliminations++;
5555 		  continue;
5556 		}
5557 
5558 	      /* If this is an assignment from our leader (which
5559 	         happens in the case the value-number is a constant)
5560 		 then there is nothing to do.  */
5561 	      if (gimple_assign_single_p (stmt)
5562 		  && sprime == gimple_assign_rhs1 (stmt))
5563 		continue;
5564 
5565 	      /* Else replace its RHS.  */
5566 	      bool can_make_abnormal_goto
5567 		  = is_gimple_call (stmt)
5568 		  && stmt_can_make_abnormal_goto (stmt);
5569 
5570 	      if (dump_file && (dump_flags & TDF_DETAILS))
5571 		{
5572 		  fprintf (dump_file, "Replaced ");
5573 		  print_gimple_expr (dump_file, stmt, 0);
5574 		  fprintf (dump_file, " with ");
5575 		  print_generic_expr (dump_file, sprime);
5576 		  fprintf (dump_file, " in ");
5577 		  print_gimple_stmt (dump_file, stmt, 0);
5578 		}
5579 
5580 	      eliminations++;
5581 	      gimple *orig_stmt = stmt;
5582 	      if (!useless_type_conversion_p (TREE_TYPE (lhs),
5583 					      TREE_TYPE (sprime)))
5584 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
5585 	      tree vdef = gimple_vdef (stmt);
5586 	      tree vuse = gimple_vuse (stmt);
5587 	      propagate_tree_value_into_stmt (&gsi, sprime);
5588 	      stmt = gsi_stmt (gsi);
5589 	      update_stmt (stmt);
5590 	      if (vdef != gimple_vdef (stmt))
5591 		VN_INFO (vdef)->valnum = vuse;
5592 
5593 	      /* If we removed EH side-effects from the statement, clean
5594 		 its EH information.  */
5595 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5596 		{
5597 		  bitmap_set_bit (need_eh_cleanup,
5598 				  gimple_bb (stmt)->index);
5599 		  if (dump_file && (dump_flags & TDF_DETAILS))
5600 		    fprintf (dump_file, "  Removed EH side-effects.\n");
5601 		}
5602 
5603 	      /* Likewise for AB side-effects.  */
5604 	      if (can_make_abnormal_goto
5605 		  && !stmt_can_make_abnormal_goto (stmt))
5606 		{
5607 		  bitmap_set_bit (need_ab_cleanup,
5608 				  gimple_bb (stmt)->index);
5609 		  if (dump_file && (dump_flags & TDF_DETAILS))
5610 		    fprintf (dump_file, "  Removed AB side-effects.\n");
5611 		}
5612 
5613 	      continue;
5614 	    }
5615 	}
5616 
5617       /* If the statement is a scalar store, see if the expression
5618          has the same value number as its rhs.  If so, the store is
5619          dead.  */
5620       if (gimple_assign_single_p (stmt)
5621 	  && !gimple_has_volatile_ops (stmt)
5622 	  && !is_gimple_reg (gimple_assign_lhs (stmt))
5623 	  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5624 	      || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5625 	{
5626 	  tree val;
5627 	  tree rhs = gimple_assign_rhs1 (stmt);
5628 	  vn_reference_t vnresult;
5629 	  val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5630 				     &vnresult, false);
5631 	  if (TREE_CODE (rhs) == SSA_NAME)
5632 	    rhs = VN_INFO (rhs)->valnum;
5633 	  if (val
5634 	      && operand_equal_p (val, rhs, 0))
5635 	    {
5636 	      /* We can only remove the later store if the former aliases
5637 		 at least all accesses the later one does or if the store
5638 		 was to readonly memory storing the same value.  */
5639 	      alias_set_type set = get_alias_set (lhs);
5640 	      if (! vnresult
5641 		  || vnresult->set == set
5642 		  || alias_set_subset_of (set, vnresult->set))
5643 		{
5644 		  if (dump_file && (dump_flags & TDF_DETAILS))
5645 		    {
5646 		      fprintf (dump_file, "Deleted redundant store ");
5647 		      print_gimple_stmt (dump_file, stmt, 0);
5648 		    }
5649 
5650 		  /* Queue stmt for removal.  */
5651 		  to_remove.safe_push (stmt);
5652 		  continue;
5653 		}
5654 	    }
5655 	}
5656 
5657       /* If this is a control statement value numbering left edges
5658 	 unexecuted on force the condition in a way consistent with
5659 	 that.  */
5660       if (gcond *cond = dyn_cast <gcond *> (stmt))
5661 	{
5662 	  if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5663 	      ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5664 	    {
5665               if (dump_file && (dump_flags & TDF_DETAILS))
5666                 {
5667                   fprintf (dump_file, "Removing unexecutable edge from ");
5668 		  print_gimple_stmt (dump_file, stmt, 0);
5669                 }
5670 	      if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5671 		  == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5672 		gimple_cond_make_true (cond);
5673 	      else
5674 		gimple_cond_make_false (cond);
5675 	      update_stmt (cond);
5676 	      el_todo |= TODO_cleanup_cfg;
5677 	      continue;
5678 	    }
5679 	}
5680 
5681       bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5682       bool was_noreturn = (is_gimple_call (stmt)
5683 			   && gimple_call_noreturn_p (stmt));
5684       tree vdef = gimple_vdef (stmt);
5685       tree vuse = gimple_vuse (stmt);
5686 
5687       /* If we didn't replace the whole stmt (or propagate the result
5688          into all uses), replace all uses on this stmt with their
5689 	 leaders.  */
5690       bool modified = false;
5691       use_operand_p use_p;
5692       ssa_op_iter iter;
5693       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5694 	{
5695 	  tree use = USE_FROM_PTR (use_p);
5696 	  /* ???  The call code above leaves stmt operands un-updated.  */
5697 	  if (TREE_CODE (use) != SSA_NAME)
5698 	    continue;
5699 	  tree sprime = eliminate_avail (use);
5700 	  if (sprime && sprime != use
5701 	      && may_propagate_copy (use, sprime)
5702 	      /* We substitute into debug stmts to avoid excessive
5703 	         debug temporaries created by removed stmts, but we need
5704 		 to avoid doing so for inserted sprimes as we never want
5705 		 to create debug temporaries for them.  */
5706 	      && (!inserted_exprs
5707 		  || TREE_CODE (sprime) != SSA_NAME
5708 		  || !is_gimple_debug (stmt)
5709 		  || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5710 	    {
5711 	      propagate_value (use_p, sprime);
5712 	      modified = true;
5713 	    }
5714 	}
5715 
5716       /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5717          into which is a requirement for the IPA devirt machinery.  */
5718       gimple *old_stmt = stmt;
5719       if (modified)
5720 	{
5721 	  /* If a formerly non-invariant ADDR_EXPR is turned into an
5722 	     invariant one it was on a separate stmt.  */
5723 	  if (gimple_assign_single_p (stmt)
5724 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5725 	    recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5726 	  gimple_stmt_iterator prev = gsi;
5727 	  gsi_prev (&prev);
5728 	  if (fold_stmt (&gsi))
5729 	    {
5730 	      /* fold_stmt may have created new stmts inbetween
5731 		 the previous stmt and the folded stmt.  Mark
5732 		 all defs created there as varying to not confuse
5733 		 the SCCVN machinery as we're using that even during
5734 		 elimination.  */
5735 	      if (gsi_end_p (prev))
5736 		prev = gsi_start_bb (b);
5737 	      else
5738 		gsi_next (&prev);
5739 	      if (gsi_stmt (prev) != gsi_stmt (gsi))
5740 		do
5741 		  {
5742 		    tree def;
5743 		    ssa_op_iter dit;
5744 		    FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5745 					       dit, SSA_OP_ALL_DEFS)
5746 		      /* As existing DEFs may move between stmts
5747 			 we have to guard VN_INFO_GET.  */
5748 		      if (! has_VN_INFO (def))
5749 			VN_INFO_GET (def)->valnum = def;
5750 		    if (gsi_stmt (prev) == gsi_stmt (gsi))
5751 		      break;
5752 		    gsi_next (&prev);
5753 		  }
5754 		while (1);
5755 	    }
5756 	  stmt = gsi_stmt (gsi);
5757 	  /* In case we folded the stmt away schedule the NOP for removal.  */
5758 	  if (gimple_nop_p (stmt))
5759 	    to_remove.safe_push (stmt);
5760 	}
5761 
5762       /* Visit indirect calls and turn them into direct calls if
5763 	 possible using the devirtualization machinery.  Do this before
5764 	 checking for required EH/abnormal/noreturn cleanup as devird
5765 	 may expose more of those.  */
5766       if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5767 	{
5768 	  tree fn = gimple_call_fn (call_stmt);
5769 	  if (fn
5770 	      && flag_devirtualize
5771 	      && virtual_method_call_p (fn))
5772 	    {
5773 	      tree otr_type = obj_type_ref_class (fn);
5774 	      unsigned HOST_WIDE_INT otr_tok
5775 		= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5776 	      tree instance;
5777 	      ipa_polymorphic_call_context context (current_function_decl,
5778 						    fn, stmt, &instance);
5779 	      context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5780 					otr_type, stmt);
5781 	      bool final;
5782 	      vec <cgraph_node *> targets
5783 		= possible_polymorphic_call_targets (obj_type_ref_class (fn),
5784 						     otr_tok, context, &final);
5785 	      if (dump_file)
5786 		dump_possible_polymorphic_call_targets (dump_file,
5787 							obj_type_ref_class (fn),
5788 							otr_tok, context);
5789 	      if (final && targets.length () <= 1 && dbg_cnt (devirt))
5790 		{
5791 		  tree fn;
5792 		  if (targets.length () == 1)
5793 		    fn = targets[0]->decl;
5794 		  else
5795 		    fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5796 		  if (dump_enabled_p ())
5797 		    {
5798 		      location_t loc = gimple_location (stmt);
5799 		      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5800 				       "converting indirect call to "
5801 				       "function %s\n",
5802 				       lang_hooks.decl_printable_name (fn, 2));
5803 		    }
5804 		  gimple_call_set_fndecl (call_stmt, fn);
5805 		  /* If changing the call to __builtin_unreachable
5806 		     or similar noreturn function, adjust gimple_call_fntype
5807 		     too.  */
5808 		  if (gimple_call_noreturn_p (call_stmt)
5809 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5810 		      && TYPE_ARG_TYPES (TREE_TYPE (fn))
5811 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5812 			  == void_type_node))
5813 		    gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5814 		  maybe_remove_unused_call_args (cfun, call_stmt);
5815 		  modified = true;
5816 		}
5817 	    }
5818 	}
5819 
5820       if (modified)
5821 	{
5822 	  /* When changing a call into a noreturn call, cfg cleanup
5823 	     is needed to fix up the noreturn call.  */
5824 	  if (!was_noreturn
5825 	      && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5826 	    to_fixup.safe_push  (stmt);
5827 	  /* When changing a condition or switch into one we know what
5828 	     edge will be executed, schedule a cfg cleanup.  */
5829 	  if ((gimple_code (stmt) == GIMPLE_COND
5830 	       && (gimple_cond_true_p (as_a <gcond *> (stmt))
5831 		   || gimple_cond_false_p (as_a <gcond *> (stmt))))
5832 	      || (gimple_code (stmt) == GIMPLE_SWITCH
5833 		  && TREE_CODE (gimple_switch_index
5834 				  (as_a <gswitch *> (stmt))) == INTEGER_CST))
5835 	    el_todo |= TODO_cleanup_cfg;
5836 	  /* If we removed EH side-effects from the statement, clean
5837 	     its EH information.  */
5838 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5839 	    {
5840 	      bitmap_set_bit (need_eh_cleanup,
5841 			      gimple_bb (stmt)->index);
5842 	      if (dump_file && (dump_flags & TDF_DETAILS))
5843 		fprintf (dump_file, "  Removed EH side-effects.\n");
5844 	    }
5845 	  /* Likewise for AB side-effects.  */
5846 	  if (can_make_abnormal_goto
5847 	      && !stmt_can_make_abnormal_goto (stmt))
5848 	    {
5849 	      bitmap_set_bit (need_ab_cleanup,
5850 			      gimple_bb (stmt)->index);
5851 	      if (dump_file && (dump_flags & TDF_DETAILS))
5852 		fprintf (dump_file, "  Removed AB side-effects.\n");
5853 	    }
5854 	  update_stmt (stmt);
5855 	  if (vdef != gimple_vdef (stmt))
5856 	    VN_INFO (vdef)->valnum = vuse;
5857 	}
5858 
5859       /* Make new values available - for fully redundant LHS we
5860          continue with the next stmt above and skip this.  */
5861       def_operand_p defp;
5862       FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5863 	eliminate_push_avail (DEF_FROM_PTR (defp));
5864     }
5865 
5866   /* Replace destination PHI arguments.  */
5867   FOR_EACH_EDGE (e, ei, b->succs)
5868     if (e->flags & EDGE_EXECUTABLE)
5869       for (gphi_iterator gsi = gsi_start_phis (e->dest);
5870 	   !gsi_end_p (gsi);
5871 	   gsi_next (&gsi))
5872 	{
5873 	  gphi *phi = gsi.phi ();
5874 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5875 	  tree arg = USE_FROM_PTR (use_p);
5876 	  if (TREE_CODE (arg) != SSA_NAME
5877 	      || virtual_operand_p (arg))
5878 	    continue;
5879 	  tree sprime = eliminate_avail (arg);
5880 	  if (sprime && may_propagate_copy (arg, sprime))
5881 	    propagate_value (use_p, sprime);
5882 	}
5883   return NULL;
5884 }
5885 
5886 /* Make no longer available leaders no longer available.  */
5887 
5888 void
5889 eliminate_dom_walker::after_dom_children (basic_block)
5890 {
5891   tree entry;
5892   while ((entry = avail_stack.pop ()) != NULL_TREE)
5893     {
5894       tree valnum = VN_INFO (entry)->valnum;
5895       tree old = avail[SSA_NAME_VERSION (valnum)];
5896       if (old == entry)
5897 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5898       else
5899 	avail[SSA_NAME_VERSION (valnum)] = entry;
5900     }
5901 }
5902 
5903 /* Eliminate fully redundant computations.  */
5904 
5905 unsigned int
5906 vn_eliminate (bitmap inserted_exprs)
5907 {
5908   eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5909   el.avail.reserve (num_ssa_names);
5910 
5911   el.walk (cfun->cfg->x_entry_block_ptr);
5912 
5913   /* We cannot remove stmts during BB walk, especially not release SSA
5914      names there as this confuses the VN machinery.  The stmts ending
5915      up in to_remove are either stores or simple copies.
5916      Remove stmts in reverse order to make debug stmt creation possible.  */
5917   while (!el.to_remove.is_empty ())
5918     {
5919       gimple *stmt = el.to_remove.pop ();
5920 
5921       if (dump_file && (dump_flags & TDF_DETAILS))
5922 	{
5923 	  fprintf (dump_file, "Removing dead stmt ");
5924 	  print_gimple_stmt (dump_file, stmt, 0, 0);
5925 	}
5926 
5927       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5928       if (gimple_code (stmt) == GIMPLE_PHI)
5929 	remove_phi_node (&gsi, true);
5930       else
5931 	{
5932 	  basic_block bb = gimple_bb (stmt);
5933 	  unlink_stmt_vdef (stmt);
5934 	  if (gsi_remove (&gsi, true))
5935 	    bitmap_set_bit (el.need_eh_cleanup, bb->index);
5936 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5937 	    bitmap_set_bit (el.need_ab_cleanup, bb->index);
5938 	  release_defs (stmt);
5939 	}
5940 
5941       /* Removing a stmt may expose a forwarder block.  */
5942       el.el_todo |= TODO_cleanup_cfg;
5943     }
5944 
5945   /* Fixup stmts that became noreturn calls.  This may require splitting
5946      blocks and thus isn't possible during the dominator walk.  Do this
5947      in reverse order so we don't inadvertedly remove a stmt we want to
5948      fixup by visiting a dominating now noreturn call first.  */
5949   while (!el.to_fixup.is_empty ())
5950     {
5951       gimple *stmt = el.to_fixup.pop ();
5952 
5953       if (dump_file && (dump_flags & TDF_DETAILS))
5954 	{
5955 	  fprintf (dump_file, "Fixing up noreturn call ");
5956 	  print_gimple_stmt (dump_file, stmt, 0);
5957 	}
5958 
5959       if (fixup_noreturn_call (stmt))
5960 	el.el_todo |= TODO_cleanup_cfg;
5961     }
5962 
5963   bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5964   bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5965 
5966   if (do_eh_cleanup)
5967     gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5968 
5969   if (do_ab_cleanup)
5970     gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5971 
5972   if (do_eh_cleanup || do_ab_cleanup)
5973     el.el_todo |= TODO_cleanup_cfg;
5974 
5975   statistics_counter_event (cfun, "Eliminated", el.eliminations);
5976   statistics_counter_event (cfun, "Insertions", el.insertions);
5977 
5978   return el.el_todo;
5979 }
5980 
5981 
5982 namespace {
5983 
5984 const pass_data pass_data_fre =
5985 {
5986   GIMPLE_PASS, /* type */
5987   "fre", /* name */
5988   OPTGROUP_NONE, /* optinfo_flags */
5989   TV_TREE_FRE, /* tv_id */
5990   ( PROP_cfg | PROP_ssa ), /* properties_required */
5991   0, /* properties_provided */
5992   0, /* properties_destroyed */
5993   0, /* todo_flags_start */
5994   0, /* todo_flags_finish */
5995 };
5996 
5997 class pass_fre : public gimple_opt_pass
5998 {
5999 public:
6000   pass_fre (gcc::context *ctxt)
6001     : gimple_opt_pass (pass_data_fre, ctxt)
6002   {}
6003 
6004   /* opt_pass methods: */
6005   opt_pass * clone () { return new pass_fre (m_ctxt); }
6006   virtual bool gate (function *) { return flag_tree_fre != 0; }
6007   virtual unsigned int execute (function *);
6008 
6009 }; // class pass_fre
6010 
6011 unsigned int
6012 pass_fre::execute (function *)
6013 {
6014   unsigned int todo = 0;
6015 
6016   run_scc_vn (VN_WALKREWRITE);
6017 
6018   /* Remove all the redundant expressions.  */
6019   todo |= vn_eliminate (NULL);
6020 
6021   scc_vn_restore_ssa_info ();
6022   free_scc_vn ();
6023 
6024   return todo;
6025 }
6026 
6027 } // anon namespace
6028 
6029 gimple_opt_pass *
6030 make_pass_fre (gcc::context *ctxt)
6031 {
6032   return new pass_fre (ctxt);
6033 }
6034