xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-alias.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Alias analysis for trees.
2    Copyright (C) 2004-2017 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 
42 /* Broad overview of how alias analysis on gimple works:
43 
44    Statements clobbering or using memory are linked through the
45    virtual operand factored use-def chain.  The virtual operand
46    is unique per function, its symbol is accessible via gimple_vop (cfun).
47    Virtual operands are used for efficiently walking memory statements
48    in the gimple IL and are useful for things like value-numbering as
49    a generation count for memory references.
50 
51    SSA_NAME pointers may have associated points-to information
52    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
53    points-to information is (re-)computed by the TODO_rebuild_alias
54    pass manager todo.  Points-to information is also used for more
55    precise tracking of call-clobbered and call-used variables and
56    related disambiguations.
57 
58    This file contains functions for disambiguating memory references,
59    the so called alias-oracle and tools for walking of the gimple IL.
60 
61    The main alias-oracle entry-points are
62 
63    bool stmt_may_clobber_ref_p (gimple *, tree)
64 
65      This function queries if a statement may invalidate (parts of)
66      the memory designated by the reference tree argument.
67 
68    bool ref_maybe_used_by_stmt_p (gimple *, tree)
69 
70      This function queries if a statement may need (parts of) the
71      memory designated by the reference tree argument.
72 
73    There are variants of these functions that only handle the call
74    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75    Note that these do not disambiguate against a possible call lhs.
76 
77    bool refs_may_alias_p (tree, tree)
78 
79      This function tries to disambiguate two reference trees.
80 
81    bool ptr_deref_may_alias_global_p (tree)
82 
83      This function queries if dereferencing a pointer variable may
84      alias global memory.
85 
86    More low-level disambiguators are available and documented in
87    this file.  Low-level disambiguators dealing with points-to
88    information are in tree-ssa-structalias.c.  */
89 
90 
91 /* Query statistics for the different low-level disambiguators.
92    A high-level query may trigger multiple of them.  */
93 
94 static struct {
95   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 } alias_stats;
102 
103 void
104 dump_alias_stats (FILE *s)
105 {
106   fprintf (s, "\nAlias oracle query stats:\n");
107   fprintf (s, "  refs_may_alias_p: "
108 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
109 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
110 	   alias_stats.refs_may_alias_p_no_alias,
111 	   alias_stats.refs_may_alias_p_no_alias
112 	   + alias_stats.refs_may_alias_p_may_alias);
113   fprintf (s, "  ref_maybe_used_by_call_p: "
114 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
116 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
117 	   alias_stats.refs_may_alias_p_no_alias
118 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
119   fprintf (s, "  call_may_clobber_ref_p: "
120 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
122 	   alias_stats.call_may_clobber_ref_p_no_alias,
123 	   alias_stats.call_may_clobber_ref_p_no_alias
124 	   + alias_stats.call_may_clobber_ref_p_may_alias);
125   dump_alias_stats_in_alias_c (s);
126 }
127 
128 
129 /* Return true, if dereferencing PTR may alias with a global variable.  */
130 
131 bool
132 ptr_deref_may_alias_global_p (tree ptr)
133 {
134   struct ptr_info_def *pi;
135 
136   /* If we end up with a pointer constant here that may point
137      to global memory.  */
138   if (TREE_CODE (ptr) != SSA_NAME)
139     return true;
140 
141   pi = SSA_NAME_PTR_INFO (ptr);
142 
143   /* If we do not have points-to information for this variable,
144      we have to punt.  */
145   if (!pi)
146     return true;
147 
148   /* ???  This does not use TBAA to prune globals ptr may not access.  */
149   return pt_solution_includes_global (&pi->pt);
150 }
151 
152 /* Return true if dereferencing PTR may alias DECL.
153    The caller is responsible for applying TBAA to see if PTR
154    may access DECL at all.  */
155 
156 static bool
157 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
158 {
159   struct ptr_info_def *pi;
160 
161   /* Conversions are irrelevant for points-to information and
162      data-dependence analysis can feed us those.  */
163   STRIP_NOPS (ptr);
164 
165   /* Anything we do not explicilty handle aliases.  */
166   if ((TREE_CODE (ptr) != SSA_NAME
167        && TREE_CODE (ptr) != ADDR_EXPR
168        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
169       || !POINTER_TYPE_P (TREE_TYPE (ptr))
170       || (!VAR_P (decl)
171 	  && TREE_CODE (decl) != PARM_DECL
172 	  && TREE_CODE (decl) != RESULT_DECL))
173     return true;
174 
175   /* Disregard pointer offsetting.  */
176   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
177     {
178       do
179 	{
180 	  ptr = TREE_OPERAND (ptr, 0);
181 	}
182       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
183       return ptr_deref_may_alias_decl_p (ptr, decl);
184     }
185 
186   /* ADDR_EXPR pointers either just offset another pointer or directly
187      specify the pointed-to set.  */
188   if (TREE_CODE (ptr) == ADDR_EXPR)
189     {
190       tree base = get_base_address (TREE_OPERAND (ptr, 0));
191       if (base
192 	  && (TREE_CODE (base) == MEM_REF
193 	      || TREE_CODE (base) == TARGET_MEM_REF))
194 	ptr = TREE_OPERAND (base, 0);
195       else if (base
196 	       && DECL_P (base))
197 	return compare_base_decls (base, decl) != 0;
198       else if (base
199 	       && CONSTANT_CLASS_P (base))
200 	return false;
201       else
202 	return true;
203     }
204 
205   /* Non-aliased variables can not be pointed to.  */
206   if (!may_be_aliased (decl))
207     return false;
208 
209   /* If we do not have useful points-to information for this pointer
210      we cannot disambiguate anything else.  */
211   pi = SSA_NAME_PTR_INFO (ptr);
212   if (!pi)
213     return true;
214 
215   return pt_solution_includes (&pi->pt, decl);
216 }
217 
218 /* Return true if dereferenced PTR1 and PTR2 may alias.
219    The caller is responsible for applying TBAA to see if accesses
220    through PTR1 and PTR2 may conflict at all.  */
221 
222 bool
223 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
224 {
225   struct ptr_info_def *pi1, *pi2;
226 
227   /* Conversions are irrelevant for points-to information and
228      data-dependence analysis can feed us those.  */
229   STRIP_NOPS (ptr1);
230   STRIP_NOPS (ptr2);
231 
232   /* Disregard pointer offsetting.  */
233   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
234     {
235       do
236 	{
237 	  ptr1 = TREE_OPERAND (ptr1, 0);
238 	}
239       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
240       return ptr_derefs_may_alias_p (ptr1, ptr2);
241     }
242   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
243     {
244       do
245 	{
246 	  ptr2 = TREE_OPERAND (ptr2, 0);
247 	}
248       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
249       return ptr_derefs_may_alias_p (ptr1, ptr2);
250     }
251 
252   /* ADDR_EXPR pointers either just offset another pointer or directly
253      specify the pointed-to set.  */
254   if (TREE_CODE (ptr1) == ADDR_EXPR)
255     {
256       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
257       if (base
258 	  && (TREE_CODE (base) == MEM_REF
259 	      || TREE_CODE (base) == TARGET_MEM_REF))
260 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
261       else if (base
262 	       && DECL_P (base))
263 	return ptr_deref_may_alias_decl_p (ptr2, base);
264       else
265 	return true;
266     }
267   if (TREE_CODE (ptr2) == ADDR_EXPR)
268     {
269       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
270       if (base
271 	  && (TREE_CODE (base) == MEM_REF
272 	      || TREE_CODE (base) == TARGET_MEM_REF))
273 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
274       else if (base
275 	       && DECL_P (base))
276 	return ptr_deref_may_alias_decl_p (ptr1, base);
277       else
278 	return true;
279     }
280 
281   /* From here we require SSA name pointers.  Anything else aliases.  */
282   if (TREE_CODE (ptr1) != SSA_NAME
283       || TREE_CODE (ptr2) != SSA_NAME
284       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
285       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
286     return true;
287 
288   /* We may end up with two empty points-to solutions for two same pointers.
289      In this case we still want to say both pointers alias, so shortcut
290      that here.  */
291   if (ptr1 == ptr2)
292     return true;
293 
294   /* If we do not have useful points-to information for either pointer
295      we cannot disambiguate anything else.  */
296   pi1 = SSA_NAME_PTR_INFO (ptr1);
297   pi2 = SSA_NAME_PTR_INFO (ptr2);
298   if (!pi1 || !pi2)
299     return true;
300 
301   /* ???  This does not use TBAA to prune decls from the intersection
302      that not both pointers may access.  */
303   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
304 }
305 
306 /* Return true if dereferencing PTR may alias *REF.
307    The caller is responsible for applying TBAA to see if PTR
308    may access *REF at all.  */
309 
310 static bool
311 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
312 {
313   tree base = ao_ref_base (ref);
314 
315   if (TREE_CODE (base) == MEM_REF
316       || TREE_CODE (base) == TARGET_MEM_REF)
317     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
318   else if (DECL_P (base))
319     return ptr_deref_may_alias_decl_p (ptr, base);
320 
321   return true;
322 }
323 
324 /* Returns true if PTR1 and PTR2 compare unequal because of points-to.  */
325 
326 bool
327 ptrs_compare_unequal (tree ptr1, tree ptr2)
328 {
329   /* First resolve the pointers down to a SSA name pointer base or
330      a VAR_DECL, PARM_DECL or RESULT_DECL.  This explicitely does
331      not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
332      or STRING_CSTs which needs points-to adjustments to track them
333      in the points-to sets.  */
334   tree obj1 = NULL_TREE;
335   tree obj2 = NULL_TREE;
336   if (TREE_CODE (ptr1) == ADDR_EXPR)
337     {
338       tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
339       if (! tem)
340 	return false;
341       if (VAR_P (tem)
342 	  || TREE_CODE (tem) == PARM_DECL
343 	  || TREE_CODE (tem) == RESULT_DECL)
344 	obj1 = tem;
345       else if (TREE_CODE (tem) == MEM_REF)
346 	ptr1 = TREE_OPERAND (tem, 0);
347     }
348   if (TREE_CODE (ptr2) == ADDR_EXPR)
349     {
350       tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
351       if (! tem)
352 	return false;
353       if (VAR_P (tem)
354 	  || TREE_CODE (tem) == PARM_DECL
355 	  || TREE_CODE (tem) == RESULT_DECL)
356 	obj2 = tem;
357       else if (TREE_CODE (tem) == MEM_REF)
358 	ptr2 = TREE_OPERAND (tem, 0);
359     }
360 
361   /* Canonicalize ptr vs. object.  */
362   if (TREE_CODE (ptr1) == SSA_NAME && obj2)
363     {
364       std::swap (ptr1, ptr2);
365       std::swap (obj1, obj2);
366     }
367 
368   if (obj1 && obj2)
369     /* Other code handles this correctly, no need to duplicate it here.  */;
370   else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
371     {
372       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
373       /* We may not use restrict to optimize pointer comparisons.
374          See PR71062.  So we have to assume that restrict-pointed-to
375 	 may be in fact obj1.  */
376       if (!pi
377 	  || pi->pt.vars_contains_restrict
378 	  || pi->pt.vars_contains_interposable)
379 	return false;
380       if (VAR_P (obj1)
381 	  && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
382 	{
383 	  varpool_node *node = varpool_node::get (obj1);
384 	  /* If obj1 may bind to NULL give up (see below).  */
385 	  if (! node
386 	      || ! node->nonzero_address ()
387 	      || ! decl_binds_to_current_def_p (obj1))
388 	    return false;
389 	}
390       return !pt_solution_includes (&pi->pt, obj1);
391     }
392 
393   /* ???  We'd like to handle ptr1 != NULL and ptr1 != ptr2
394      but those require pt.null to be conservatively correct.  */
395 
396   return false;
397 }
398 
399 /* Returns whether reference REF to BASE may refer to global memory.  */
400 
401 static bool
402 ref_may_alias_global_p_1 (tree base)
403 {
404   if (DECL_P (base))
405     return is_global_var (base);
406   else if (TREE_CODE (base) == MEM_REF
407 	   || TREE_CODE (base) == TARGET_MEM_REF)
408     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
409   return true;
410 }
411 
412 bool
413 ref_may_alias_global_p (ao_ref *ref)
414 {
415   tree base = ao_ref_base (ref);
416   return ref_may_alias_global_p_1 (base);
417 }
418 
419 bool
420 ref_may_alias_global_p (tree ref)
421 {
422   tree base = get_base_address (ref);
423   return ref_may_alias_global_p_1 (base);
424 }
425 
426 /* Return true whether STMT may clobber global memory.  */
427 
428 bool
429 stmt_may_clobber_global_p (gimple *stmt)
430 {
431   tree lhs;
432 
433   if (!gimple_vdef (stmt))
434     return false;
435 
436   /* ???  We can ask the oracle whether an artificial pointer
437      dereference with a pointer with points-to information covering
438      all global memory (what about non-address taken memory?) maybe
439      clobbered by this call.  As there is at the moment no convenient
440      way of doing that without generating garbage do some manual
441      checking instead.
442      ???  We could make a NULL ao_ref argument to the various
443      predicates special, meaning any global memory.  */
444 
445   switch (gimple_code (stmt))
446     {
447     case GIMPLE_ASSIGN:
448       lhs = gimple_assign_lhs (stmt);
449       return (TREE_CODE (lhs) != SSA_NAME
450 	      && ref_may_alias_global_p (lhs));
451     case GIMPLE_CALL:
452       return true;
453     default:
454       return true;
455     }
456 }
457 
458 
459 /* Dump alias information on FILE.  */
460 
461 void
462 dump_alias_info (FILE *file)
463 {
464   unsigned i;
465   tree ptr;
466   const char *funcname
467     = lang_hooks.decl_printable_name (current_function_decl, 2);
468   tree var;
469 
470   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
471 
472   fprintf (file, "Aliased symbols\n\n");
473 
474   FOR_EACH_LOCAL_DECL (cfun, i, var)
475     {
476       if (may_be_aliased (var))
477 	dump_variable (file, var);
478     }
479 
480   fprintf (file, "\nCall clobber information\n");
481 
482   fprintf (file, "\nESCAPED");
483   dump_points_to_solution (file, &cfun->gimple_df->escaped);
484 
485   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
486 
487   FOR_EACH_SSA_NAME (i, ptr, cfun)
488     {
489       struct ptr_info_def *pi;
490 
491       if (!POINTER_TYPE_P (TREE_TYPE (ptr))
492 	  || SSA_NAME_IN_FREE_LIST (ptr))
493 	continue;
494 
495       pi = SSA_NAME_PTR_INFO (ptr);
496       if (pi)
497 	dump_points_to_info_for (file, ptr);
498     }
499 
500   fprintf (file, "\n");
501 }
502 
503 
504 /* Dump alias information on stderr.  */
505 
506 DEBUG_FUNCTION void
507 debug_alias_info (void)
508 {
509   dump_alias_info (stderr);
510 }
511 
512 
513 /* Dump the points-to set *PT into FILE.  */
514 
515 void
516 dump_points_to_solution (FILE *file, struct pt_solution *pt)
517 {
518   if (pt->anything)
519     fprintf (file, ", points-to anything");
520 
521   if (pt->nonlocal)
522     fprintf (file, ", points-to non-local");
523 
524   if (pt->escaped)
525     fprintf (file, ", points-to escaped");
526 
527   if (pt->ipa_escaped)
528     fprintf (file, ", points-to unit escaped");
529 
530   if (pt->null)
531     fprintf (file, ", points-to NULL");
532 
533   if (pt->vars)
534     {
535       fprintf (file, ", points-to vars: ");
536       dump_decl_set (file, pt->vars);
537       if (pt->vars_contains_nonlocal
538 	  || pt->vars_contains_escaped
539 	  || pt->vars_contains_escaped_heap
540 	  || pt->vars_contains_restrict)
541 	{
542 	  const char *comma = "";
543 	  fprintf (file, " (");
544 	  if (pt->vars_contains_nonlocal)
545 	    {
546 	      fprintf (file, "nonlocal");
547 	      comma = ", ";
548 	    }
549 	  if (pt->vars_contains_escaped)
550 	    {
551 	      fprintf (file, "%sescaped", comma);
552 	      comma = ", ";
553 	    }
554 	  if (pt->vars_contains_escaped_heap)
555 	    {
556 	      fprintf (file, "%sescaped heap", comma);
557 	      comma = ", ";
558 	    }
559 	  if (pt->vars_contains_restrict)
560 	    {
561 	      fprintf (file, "%srestrict", comma);
562 	      comma = ", ";
563 	    }
564 	  if (pt->vars_contains_interposable)
565 	    fprintf (file, "%sinterposable", comma);
566 	  fprintf (file, ")");
567 	}
568     }
569 }
570 
571 
572 /* Unified dump function for pt_solution.  */
573 
574 DEBUG_FUNCTION void
575 debug (pt_solution &ref)
576 {
577   dump_points_to_solution (stderr, &ref);
578 }
579 
580 DEBUG_FUNCTION void
581 debug (pt_solution *ptr)
582 {
583   if (ptr)
584     debug (*ptr);
585   else
586     fprintf (stderr, "<nil>\n");
587 }
588 
589 
590 /* Dump points-to information for SSA_NAME PTR into FILE.  */
591 
592 void
593 dump_points_to_info_for (FILE *file, tree ptr)
594 {
595   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
596 
597   print_generic_expr (file, ptr, dump_flags);
598 
599   if (pi)
600     dump_points_to_solution (file, &pi->pt);
601   else
602     fprintf (file, ", points-to anything");
603 
604   fprintf (file, "\n");
605 }
606 
607 
608 /* Dump points-to information for VAR into stderr.  */
609 
610 DEBUG_FUNCTION void
611 debug_points_to_info_for (tree var)
612 {
613   dump_points_to_info_for (stderr, var);
614 }
615 
616 
617 /* Initializes the alias-oracle reference representation *R from REF.  */
618 
619 void
620 ao_ref_init (ao_ref *r, tree ref)
621 {
622   r->ref = ref;
623   r->base = NULL_TREE;
624   r->offset = 0;
625   r->size = -1;
626   r->max_size = -1;
627   r->ref_alias_set = -1;
628   r->base_alias_set = -1;
629   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
630 }
631 
632 /* Returns the base object of the memory reference *REF.  */
633 
634 tree
635 ao_ref_base (ao_ref *ref)
636 {
637   bool reverse;
638 
639   if (ref->base)
640     return ref->base;
641   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
642 				       &ref->max_size, &reverse);
643   return ref->base;
644 }
645 
646 /* Returns the base object alias set of the memory reference *REF.  */
647 
648 alias_set_type
649 ao_ref_base_alias_set (ao_ref *ref)
650 {
651   tree base_ref;
652   if (ref->base_alias_set != -1)
653     return ref->base_alias_set;
654   if (!ref->ref)
655     return 0;
656   base_ref = ref->ref;
657   while (handled_component_p (base_ref))
658     base_ref = TREE_OPERAND (base_ref, 0);
659   ref->base_alias_set = get_alias_set (base_ref);
660   return ref->base_alias_set;
661 }
662 
663 /* Returns the reference alias set of the memory reference *REF.  */
664 
665 alias_set_type
666 ao_ref_alias_set (ao_ref *ref)
667 {
668   if (ref->ref_alias_set != -1)
669     return ref->ref_alias_set;
670   ref->ref_alias_set = get_alias_set (ref->ref);
671   return ref->ref_alias_set;
672 }
673 
674 /* Init an alias-oracle reference representation from a gimple pointer
675    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
676    size is assumed to be unknown.  The access is assumed to be only
677    to or after of the pointer target, not before it.  */
678 
679 void
680 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
681 {
682   HOST_WIDE_INT t, size_hwi, extra_offset = 0;
683   ref->ref = NULL_TREE;
684   if (TREE_CODE (ptr) == SSA_NAME)
685     {
686       gimple *stmt = SSA_NAME_DEF_STMT (ptr);
687       if (gimple_assign_single_p (stmt)
688 	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
689 	ptr = gimple_assign_rhs1 (stmt);
690       else if (is_gimple_assign (stmt)
691 	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
692 	       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
693 	{
694 	  ptr = gimple_assign_rhs1 (stmt);
695 	  extra_offset = BITS_PER_UNIT
696 			 * int_cst_value (gimple_assign_rhs2 (stmt));
697 	}
698     }
699 
700   if (TREE_CODE (ptr) == ADDR_EXPR)
701     {
702       ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
703       if (ref->base)
704 	ref->offset = BITS_PER_UNIT * t;
705       else
706 	{
707 	  size = NULL_TREE;
708 	  ref->offset = 0;
709 	  ref->base = get_base_address (TREE_OPERAND (ptr, 0));
710 	}
711     }
712   else
713     {
714       ref->base = build2 (MEM_REF, char_type_node,
715 			  ptr, null_pointer_node);
716       ref->offset = 0;
717     }
718   ref->offset += extra_offset;
719   if (size
720       && tree_fits_shwi_p (size)
721       && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
722     ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
723   else
724     ref->max_size = ref->size = -1;
725   ref->ref_alias_set = 0;
726   ref->base_alias_set = 0;
727   ref->volatile_p = false;
728 }
729 
730 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
731    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
732    decide.  */
733 
734 static inline int
735 same_type_for_tbaa (tree type1, tree type2)
736 {
737   type1 = TYPE_MAIN_VARIANT (type1);
738   type2 = TYPE_MAIN_VARIANT (type2);
739 
740   /* If we would have to do structural comparison bail out.  */
741   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
742       || TYPE_STRUCTURAL_EQUALITY_P (type2))
743     return -1;
744 
745   /* Compare the canonical types.  */
746   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
747     return 1;
748 
749   /* ??? Array types are not properly unified in all cases as we have
750      spurious changes in the index types for example.  Removing this
751      causes all sorts of problems with the Fortran frontend.  */
752   if (TREE_CODE (type1) == ARRAY_TYPE
753       && TREE_CODE (type2) == ARRAY_TYPE)
754     return -1;
755 
756   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
757      object of one of its constrained subtypes, e.g. when a function with an
758      unconstrained parameter passed by reference is called on an object and
759      inlined.  But, even in the case of a fixed size, type and subtypes are
760      not equivalent enough as to share the same TYPE_CANONICAL, since this
761      would mean that conversions between them are useless, whereas they are
762      not (e.g. type and subtypes can have different modes).  So, in the end,
763      they are only guaranteed to have the same alias set.  */
764   if (get_alias_set (type1) == get_alias_set (type2))
765     return -1;
766 
767   /* The types are known to be not equal.  */
768   return 0;
769 }
770 
771 /* Determine if the two component references REF1 and REF2 which are
772    based on access types TYPE1 and TYPE2 and of which at least one is based
773    on an indirect reference may alias.  REF2 is the only one that can
774    be a decl in which case REF2_IS_DECL is true.
775    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
776    are the respective alias sets.  */
777 
778 static bool
779 aliasing_component_refs_p (tree ref1,
780 			   alias_set_type ref1_alias_set,
781 			   alias_set_type base1_alias_set,
782 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
783 			   tree ref2,
784 			   alias_set_type ref2_alias_set,
785 			   alias_set_type base2_alias_set,
786 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
787 			   bool ref2_is_decl)
788 {
789   /* If one reference is a component references through pointers try to find a
790      common base and apply offset based disambiguation.  This handles
791      for example
792        struct A { int i; int j; } *q;
793        struct B { struct A a; int k; } *p;
794      disambiguating q->i and p->a.j.  */
795   tree base1, base2;
796   tree type1, type2;
797   tree *refp;
798   int same_p;
799 
800   /* Choose bases and base types to search for.  */
801   base1 = ref1;
802   while (handled_component_p (base1))
803     base1 = TREE_OPERAND (base1, 0);
804   type1 = TREE_TYPE (base1);
805   base2 = ref2;
806   while (handled_component_p (base2))
807     base2 = TREE_OPERAND (base2, 0);
808   type2 = TREE_TYPE (base2);
809 
810   /* Now search for the type1 in the access path of ref2.  This
811      would be a common base for doing offset based disambiguation on.  */
812   refp = &ref2;
813   while (handled_component_p (*refp)
814 	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
815     refp = &TREE_OPERAND (*refp, 0);
816   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
817   /* If we couldn't compare types we have to bail out.  */
818   if (same_p == -1)
819     return true;
820   else if (same_p == 1)
821     {
822       HOST_WIDE_INT offadj, sztmp, msztmp;
823       bool reverse;
824       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
825       offset2 -= offadj;
826       get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
827       offset1 -= offadj;
828       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
829     }
830   /* If we didn't find a common base, try the other way around.  */
831   refp = &ref1;
832   while (handled_component_p (*refp)
833 	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
834     refp = &TREE_OPERAND (*refp, 0);
835   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
836   /* If we couldn't compare types we have to bail out.  */
837   if (same_p == -1)
838     return true;
839   else if (same_p == 1)
840     {
841       HOST_WIDE_INT offadj, sztmp, msztmp;
842       bool reverse;
843       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
844       offset1 -= offadj;
845       get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
846       offset2 -= offadj;
847       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
848     }
849 
850   /* If we have two type access paths B1.path1 and B2.path2 they may
851      only alias if either B1 is in B2.path2 or B2 is in B1.path1.
852      But we can still have a path that goes B1.path1...B2.path2 with
853      a part that we do not see.  So we can only disambiguate now
854      if there is no B2 in the tail of path1 and no B1 on the
855      tail of path2.  */
856   if (base1_alias_set == ref2_alias_set
857       || alias_set_subset_of (base1_alias_set, ref2_alias_set))
858     return true;
859   /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
860   if (!ref2_is_decl)
861     return (base2_alias_set == ref1_alias_set
862 	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
863   return false;
864 }
865 
866 /* Return true if we can determine that component references REF1 and REF2,
867    that are within a common DECL, cannot overlap.  */
868 
869 static bool
870 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
871 {
872   auto_vec<tree, 16> component_refs1;
873   auto_vec<tree, 16> component_refs2;
874 
875   /* Create the stack of handled components for REF1.  */
876   while (handled_component_p (ref1))
877     {
878       component_refs1.safe_push (ref1);
879       ref1 = TREE_OPERAND (ref1, 0);
880     }
881   if (TREE_CODE (ref1) == MEM_REF)
882     {
883       if (!integer_zerop (TREE_OPERAND (ref1, 1)))
884 	return false;
885       ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
886     }
887 
888   /* Create the stack of handled components for REF2.  */
889   while (handled_component_p (ref2))
890     {
891       component_refs2.safe_push (ref2);
892       ref2 = TREE_OPERAND (ref2, 0);
893     }
894   if (TREE_CODE (ref2) == MEM_REF)
895     {
896       if (!integer_zerop (TREE_OPERAND (ref2, 1)))
897 	return false;
898       ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
899     }
900 
901   /* Bases must be either same or uncomparable.  */
902   gcc_checking_assert (ref1 == ref2
903 		       || (DECL_P (ref1) && DECL_P (ref2)
904 			   && compare_base_decls (ref1, ref2) != 0));
905 
906   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
907      rank.  This is sufficient because we start from the same DECL and you
908      cannot reference several fields at a time with COMPONENT_REFs (unlike
909      with ARRAY_RANGE_REFs for arrays) so you always need the same number
910      of them to access a sub-component, unless you're in a union, in which
911      case the return value will precisely be false.  */
912   while (true)
913     {
914       do
915 	{
916 	  if (component_refs1.is_empty ())
917 	    return false;
918 	  ref1 = component_refs1.pop ();
919 	}
920       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
921 
922       do
923 	{
924 	  if (component_refs2.is_empty ())
925 	     return false;
926 	  ref2 = component_refs2.pop ();
927 	}
928       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
929 
930       /* Beware of BIT_FIELD_REF.  */
931       if (TREE_CODE (ref1) != COMPONENT_REF
932 	  || TREE_CODE (ref2) != COMPONENT_REF)
933 	return false;
934 
935       tree field1 = TREE_OPERAND (ref1, 1);
936       tree field2 = TREE_OPERAND (ref2, 1);
937 
938       /* ??? We cannot simply use the type of operand #0 of the refs here
939 	 as the Fortran compiler smuggles type punning into COMPONENT_REFs
940 	 for common blocks instead of using unions like everyone else.  */
941       tree type1 = DECL_CONTEXT (field1);
942       tree type2 = DECL_CONTEXT (field2);
943 
944       /* We cannot disambiguate fields in a union or qualified union.  */
945       if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
946 	 return false;
947 
948       if (field1 != field2)
949 	{
950 	  /* A field and its representative need to be considered the
951 	     same.  */
952 	  if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
953 	      || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
954 	    return false;
955 	  /* Different fields of the same record type cannot overlap.
956 	     ??? Bitfields can overlap at RTL level so punt on them.  */
957 	  if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
958 	    return false;
959 	  return true;
960 	}
961     }
962 
963   return false;
964 }
965 
966 /* qsort compare function to sort FIELD_DECLs after their
967    DECL_FIELD_CONTEXT TYPE_UID.  */
968 
969 static inline int
970 ncr_compar (const void *field1_, const void *field2_)
971 {
972   const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
973   const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
974   unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
975   unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
976   if (uid1 < uid2)
977     return -1;
978   else if (uid1 > uid2)
979     return 1;
980   return 0;
981 }
982 
983 /* Return true if we can determine that the fields referenced cannot
984    overlap for any pair of objects.  */
985 
986 static bool
987 nonoverlapping_component_refs_p (const_tree x, const_tree y)
988 {
989   if (!flag_strict_aliasing
990       || !x || !y
991       || TREE_CODE (x) != COMPONENT_REF
992       || TREE_CODE (y) != COMPONENT_REF)
993     return false;
994 
995   auto_vec<const_tree, 16> fieldsx;
996   while (TREE_CODE (x) == COMPONENT_REF)
997     {
998       tree field = TREE_OPERAND (x, 1);
999       tree type = DECL_FIELD_CONTEXT (field);
1000       if (TREE_CODE (type) == RECORD_TYPE)
1001 	fieldsx.safe_push (field);
1002       x = TREE_OPERAND (x, 0);
1003     }
1004   if (fieldsx.length () == 0)
1005     return false;
1006   auto_vec<const_tree, 16> fieldsy;
1007   while (TREE_CODE (y) == COMPONENT_REF)
1008     {
1009       tree field = TREE_OPERAND (y, 1);
1010       tree type = DECL_FIELD_CONTEXT (field);
1011       if (TREE_CODE (type) == RECORD_TYPE)
1012 	fieldsy.safe_push (TREE_OPERAND (y, 1));
1013       y = TREE_OPERAND (y, 0);
1014     }
1015   if (fieldsy.length () == 0)
1016     return false;
1017 
1018   /* Most common case first.  */
1019   if (fieldsx.length () == 1
1020       && fieldsy.length () == 1)
1021     return ((DECL_FIELD_CONTEXT (fieldsx[0])
1022 	     == DECL_FIELD_CONTEXT (fieldsy[0]))
1023 	    && fieldsx[0] != fieldsy[0]
1024 	    && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1025 
1026   if (fieldsx.length () == 2)
1027     {
1028       if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1029 	std::swap (fieldsx[0], fieldsx[1]);
1030     }
1031   else
1032     fieldsx.qsort (ncr_compar);
1033 
1034   if (fieldsy.length () == 2)
1035     {
1036       if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1037 	std::swap (fieldsy[0], fieldsy[1]);
1038     }
1039   else
1040     fieldsy.qsort (ncr_compar);
1041 
1042   unsigned i = 0, j = 0;
1043   do
1044     {
1045       const_tree fieldx = fieldsx[i];
1046       const_tree fieldy = fieldsy[j];
1047       tree typex = DECL_FIELD_CONTEXT (fieldx);
1048       tree typey = DECL_FIELD_CONTEXT (fieldy);
1049       if (typex == typey)
1050 	{
1051 	  /* We're left with accessing different fields of a structure,
1052 	     no possible overlap.  */
1053 	  if (fieldx != fieldy)
1054 	    {
1055 	      /* A field and its representative need to be considered the
1056 		 same.  */
1057 	      if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1058 		  || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1059 		return false;
1060 	      /* Different fields of the same record type cannot overlap.
1061 		 ??? Bitfields can overlap at RTL level so punt on them.  */
1062 	      if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1063 		return false;
1064 	      return true;
1065 	    }
1066 	}
1067       if (TYPE_UID (typex) < TYPE_UID (typey))
1068 	{
1069 	  i++;
1070 	  if (i == fieldsx.length ())
1071 	    break;
1072 	}
1073       else
1074 	{
1075 	  j++;
1076 	  if (j == fieldsy.length ())
1077 	    break;
1078 	}
1079     }
1080   while (1);
1081 
1082   return false;
1083 }
1084 
1085 
1086 /* Return true if two memory references based on the variables BASE1
1087    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1088    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
1089    if non-NULL are the complete memory reference trees.  */
1090 
1091 static bool
1092 decl_refs_may_alias_p (tree ref1, tree base1,
1093 		       HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1094 		       tree ref2, tree base2,
1095 		       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
1096 {
1097   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1098 
1099   /* If both references are based on different variables, they cannot alias.  */
1100   if (compare_base_decls (base1, base2) == 0)
1101     return false;
1102 
1103   /* If both references are based on the same variable, they cannot alias if
1104      the accesses do not overlap.  */
1105   if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1106     return false;
1107 
1108   /* For components with variable position, the above test isn't sufficient,
1109      so we disambiguate component references manually.  */
1110   if (ref1 && ref2
1111       && handled_component_p (ref1) && handled_component_p (ref2)
1112       && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1113     return false;
1114 
1115   return true;
1116 }
1117 
1118 /* Return true if an indirect reference based on *PTR1 constrained
1119    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1120    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
1121    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1122    in which case they are computed on-demand.  REF1 and REF2
1123    if non-NULL are the complete memory reference trees.  */
1124 
1125 static bool
1126 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1127 			       HOST_WIDE_INT offset1,
1128 			       HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1129 			       alias_set_type ref1_alias_set,
1130 			       alias_set_type base1_alias_set,
1131 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
1132 			       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1133 			       alias_set_type ref2_alias_set,
1134 			       alias_set_type base2_alias_set, bool tbaa_p)
1135 {
1136   tree ptr1;
1137   tree ptrtype1, dbase2;
1138   HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1139   HOST_WIDE_INT doffset1, doffset2;
1140 
1141   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1142 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1143 		       && DECL_P (base2));
1144 
1145   ptr1 = TREE_OPERAND (base1, 0);
1146 
1147   /* The offset embedded in MEM_REFs can be negative.  Bias them
1148      so that the resulting offset adjustment is positive.  */
1149   offset_int moff = mem_ref_offset (base1);
1150   moff <<= LOG2_BITS_PER_UNIT;
1151   if (wi::neg_p (moff))
1152     offset2p += (-moff).to_short_addr ();
1153   else
1154     offset1p += moff.to_short_addr ();
1155 
1156   /* If only one reference is based on a variable, they cannot alias if
1157      the pointer access is beyond the extent of the variable access.
1158      (the pointer base cannot validly point to an offset less than zero
1159      of the variable).
1160      ???  IVOPTs creates bases that do not honor this restriction,
1161      so do not apply this optimization for TARGET_MEM_REFs.  */
1162   if (TREE_CODE (base1) != TARGET_MEM_REF
1163       && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1164     return false;
1165   /* They also cannot alias if the pointer may not point to the decl.  */
1166   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1167     return false;
1168 
1169   /* Disambiguations that rely on strict aliasing rules follow.  */
1170   if (!flag_strict_aliasing || !tbaa_p)
1171     return true;
1172 
1173   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1174 
1175   /* If the alias set for a pointer access is zero all bets are off.  */
1176   if (base1_alias_set == 0)
1177     return true;
1178 
1179   /* When we are trying to disambiguate an access with a pointer dereference
1180      as base versus one with a decl as base we can use both the size
1181      of the decl and its dynamic type for extra disambiguation.
1182      ???  We do not know anything about the dynamic type of the decl
1183      other than that its alias-set contains base2_alias_set as a subset
1184      which does not help us here.  */
1185   /* As we know nothing useful about the dynamic type of the decl just
1186      use the usual conflict check rather than a subset test.
1187      ???  We could introduce -fvery-strict-aliasing when the language
1188      does not allow decls to have a dynamic type that differs from their
1189      static type.  Then we can check
1190      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
1191   if (base1_alias_set != base2_alias_set
1192       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1193     return false;
1194   /* If the size of the access relevant for TBAA through the pointer
1195      is bigger than the size of the decl we can't possibly access the
1196      decl via that pointer.  */
1197   if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1198       && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1199       && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1200       /* ???  This in turn may run afoul when a decl of type T which is
1201 	 a member of union type U is accessed through a pointer to
1202 	 type U and sizeof T is smaller than sizeof U.  */
1203       && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1204       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1205       && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1206     return false;
1207 
1208   if (!ref2)
1209     return true;
1210 
1211   /* If the decl is accessed via a MEM_REF, reconstruct the base
1212      we can use for TBAA and an appropriately adjusted offset.  */
1213   dbase2 = ref2;
1214   while (handled_component_p (dbase2))
1215     dbase2 = TREE_OPERAND (dbase2, 0);
1216   doffset1 = offset1;
1217   doffset2 = offset2;
1218   if (TREE_CODE (dbase2) == MEM_REF
1219       || TREE_CODE (dbase2) == TARGET_MEM_REF)
1220     {
1221       offset_int moff = mem_ref_offset (dbase2);
1222       moff <<= LOG2_BITS_PER_UNIT;
1223       if (wi::neg_p (moff))
1224 	doffset1 -= (-moff).to_short_addr ();
1225       else
1226 	doffset2 -= moff.to_short_addr ();
1227     }
1228 
1229   /* If either reference is view-converted, give up now.  */
1230   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1231       || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1232     return true;
1233 
1234   /* If both references are through the same type, they do not alias
1235      if the accesses do not overlap.  This does extra disambiguation
1236      for mixed/pointer accesses but requires strict aliasing.
1237      For MEM_REFs we require that the component-ref offset we computed
1238      is relative to the start of the type which we ensure by
1239      comparing rvalue and access type and disregarding the constant
1240      pointer offset.  */
1241   if ((TREE_CODE (base1) != TARGET_MEM_REF
1242        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1243       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1244     return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1245 
1246   if (ref1 && ref2
1247       && nonoverlapping_component_refs_p (ref1, ref2))
1248     return false;
1249 
1250   /* Do access-path based disambiguation.  */
1251   if (ref1 && ref2
1252       && (handled_component_p (ref1) || handled_component_p (ref2)))
1253     return aliasing_component_refs_p (ref1,
1254 				      ref1_alias_set, base1_alias_set,
1255 				      offset1, max_size1,
1256 				      ref2,
1257 				      ref2_alias_set, base2_alias_set,
1258 				      offset2, max_size2, true);
1259 
1260   return true;
1261 }
1262 
1263 /* Return true if two indirect references based on *PTR1
1264    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1265    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
1266    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1267    in which case they are computed on-demand.  REF1 and REF2
1268    if non-NULL are the complete memory reference trees. */
1269 
1270 static bool
1271 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1272 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1273 			   alias_set_type ref1_alias_set,
1274 			   alias_set_type base1_alias_set,
1275 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
1276 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1277 			   alias_set_type ref2_alias_set,
1278 			   alias_set_type base2_alias_set, bool tbaa_p)
1279 {
1280   tree ptr1;
1281   tree ptr2;
1282   tree ptrtype1, ptrtype2;
1283 
1284   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1285 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1286 		       && (TREE_CODE (base2) == MEM_REF
1287 			   || TREE_CODE (base2) == TARGET_MEM_REF));
1288 
1289   ptr1 = TREE_OPERAND (base1, 0);
1290   ptr2 = TREE_OPERAND (base2, 0);
1291 
1292   /* If both bases are based on pointers they cannot alias if they may not
1293      point to the same memory object or if they point to the same object
1294      and the accesses do not overlap.  */
1295   if ((!cfun || gimple_in_ssa_p (cfun))
1296       && operand_equal_p (ptr1, ptr2, 0)
1297       && (((TREE_CODE (base1) != TARGET_MEM_REF
1298 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1299 	   && (TREE_CODE (base2) != TARGET_MEM_REF
1300 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1301 	  || (TREE_CODE (base1) == TARGET_MEM_REF
1302 	      && TREE_CODE (base2) == TARGET_MEM_REF
1303 	      && (TMR_STEP (base1) == TMR_STEP (base2)
1304 		  || (TMR_STEP (base1) && TMR_STEP (base2)
1305 		      && operand_equal_p (TMR_STEP (base1),
1306 					  TMR_STEP (base2), 0)))
1307 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
1308 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
1309 		      && operand_equal_p (TMR_INDEX (base1),
1310 					  TMR_INDEX (base2), 0)))
1311 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1312 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1313 		      && operand_equal_p (TMR_INDEX2 (base1),
1314 					  TMR_INDEX2 (base2), 0))))))
1315     {
1316       offset_int moff;
1317       /* The offset embedded in MEM_REFs can be negative.  Bias them
1318 	 so that the resulting offset adjustment is positive.  */
1319       moff = mem_ref_offset (base1);
1320       moff <<= LOG2_BITS_PER_UNIT;
1321       if (wi::neg_p (moff))
1322 	offset2 += (-moff).to_short_addr ();
1323       else
1324 	offset1 += moff.to_shwi ();
1325       moff = mem_ref_offset (base2);
1326       moff <<= LOG2_BITS_PER_UNIT;
1327       if (wi::neg_p (moff))
1328 	offset1 += (-moff).to_short_addr ();
1329       else
1330 	offset2 += moff.to_short_addr ();
1331       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1332     }
1333   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1334     return false;
1335 
1336   /* Disambiguations that rely on strict aliasing rules follow.  */
1337   if (!flag_strict_aliasing || !tbaa_p)
1338     return true;
1339 
1340   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1341   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1342 
1343   /* If the alias set for a pointer access is zero all bets are off.  */
1344   if (base1_alias_set == 0
1345       || base2_alias_set == 0)
1346     return true;
1347 
1348   /* If both references are through the same type, they do not alias
1349      if the accesses do not overlap.  This does extra disambiguation
1350      for mixed/pointer accesses but requires strict aliasing.  */
1351   if ((TREE_CODE (base1) != TARGET_MEM_REF
1352        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1353       && (TREE_CODE (base2) != TARGET_MEM_REF
1354 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1355       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1356       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1357       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1358 			     TREE_TYPE (ptrtype2)) == 1
1359       /* But avoid treating arrays as "objects", instead assume they
1360          can overlap by an exact multiple of their element size.  */
1361       && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1362     return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1363 
1364   /* Do type-based disambiguation.  */
1365   if (base1_alias_set != base2_alias_set
1366       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1367     return false;
1368 
1369   /* If either reference is view-converted, give up now.  */
1370   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1371       || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1372     return true;
1373 
1374   if (ref1 && ref2
1375       && nonoverlapping_component_refs_p (ref1, ref2))
1376     return false;
1377 
1378   /* Do access-path based disambiguation.  */
1379   if (ref1 && ref2
1380       && (handled_component_p (ref1) || handled_component_p (ref2)))
1381     return aliasing_component_refs_p (ref1,
1382 				      ref1_alias_set, base1_alias_set,
1383 				      offset1, max_size1,
1384 				      ref2,
1385 				      ref2_alias_set, base2_alias_set,
1386 				      offset2, max_size2, false);
1387 
1388   return true;
1389 }
1390 
1391 /* Return true, if the two memory references REF1 and REF2 may alias.  */
1392 
1393 bool
1394 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1395 {
1396   tree base1, base2;
1397   HOST_WIDE_INT offset1 = 0, offset2 = 0;
1398   HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1399   bool var1_p, var2_p, ind1_p, ind2_p;
1400 
1401   gcc_checking_assert ((!ref1->ref
1402 			|| TREE_CODE (ref1->ref) == SSA_NAME
1403 			|| DECL_P (ref1->ref)
1404 			|| TREE_CODE (ref1->ref) == STRING_CST
1405 			|| handled_component_p (ref1->ref)
1406 			|| TREE_CODE (ref1->ref) == MEM_REF
1407 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1408 		       && (!ref2->ref
1409 			   || TREE_CODE (ref2->ref) == SSA_NAME
1410 			   || DECL_P (ref2->ref)
1411 			   || TREE_CODE (ref2->ref) == STRING_CST
1412 			   || handled_component_p (ref2->ref)
1413 			   || TREE_CODE (ref2->ref) == MEM_REF
1414 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1415 
1416   /* Decompose the references into their base objects and the access.  */
1417   base1 = ao_ref_base (ref1);
1418   offset1 = ref1->offset;
1419   max_size1 = ref1->max_size;
1420   base2 = ao_ref_base (ref2);
1421   offset2 = ref2->offset;
1422   max_size2 = ref2->max_size;
1423 
1424   /* We can end up with registers or constants as bases for example from
1425      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1426      which is seen as a struct copy.  */
1427   if (TREE_CODE (base1) == SSA_NAME
1428       || TREE_CODE (base1) == CONST_DECL
1429       || TREE_CODE (base1) == CONSTRUCTOR
1430       || TREE_CODE (base1) == ADDR_EXPR
1431       || CONSTANT_CLASS_P (base1)
1432       || TREE_CODE (base2) == SSA_NAME
1433       || TREE_CODE (base2) == CONST_DECL
1434       || TREE_CODE (base2) == CONSTRUCTOR
1435       || TREE_CODE (base2) == ADDR_EXPR
1436       || CONSTANT_CLASS_P (base2))
1437     return false;
1438 
1439   /* We can end up referring to code via function and label decls.
1440      As we likely do not properly track code aliases conservatively
1441      bail out.  */
1442   if (TREE_CODE (base1) == FUNCTION_DECL
1443       || TREE_CODE (base1) == LABEL_DECL
1444       || TREE_CODE (base2) == FUNCTION_DECL
1445       || TREE_CODE (base2) == LABEL_DECL)
1446     return true;
1447 
1448   /* Two volatile accesses always conflict.  */
1449   if (ref1->volatile_p
1450       && ref2->volatile_p)
1451     return true;
1452 
1453   /* Defer to simple offset based disambiguation if we have
1454      references based on two decls.  Do this before defering to
1455      TBAA to handle must-alias cases in conformance with the
1456      GCC extension of allowing type-punning through unions.  */
1457   var1_p = DECL_P (base1);
1458   var2_p = DECL_P (base2);
1459   if (var1_p && var2_p)
1460     return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1461 				  ref2->ref, base2, offset2, max_size2);
1462 
1463   /* Handle restrict based accesses.
1464      ???  ao_ref_base strips inner MEM_REF [&decl], recover from that
1465      here.  */
1466   tree rbase1 = base1;
1467   tree rbase2 = base2;
1468   if (var1_p)
1469     {
1470       rbase1 = ref1->ref;
1471       if (rbase1)
1472 	while (handled_component_p (rbase1))
1473 	  rbase1 = TREE_OPERAND (rbase1, 0);
1474     }
1475   if (var2_p)
1476     {
1477       rbase2 = ref2->ref;
1478       if (rbase2)
1479 	while (handled_component_p (rbase2))
1480 	  rbase2 = TREE_OPERAND (rbase2, 0);
1481     }
1482   if (rbase1 && rbase2
1483       && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1484       && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1485       /* If the accesses are in the same restrict clique... */
1486       && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1487       /* But based on different pointers they do not alias.  */
1488       && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1489     return false;
1490 
1491   ind1_p = (TREE_CODE (base1) == MEM_REF
1492 	    || TREE_CODE (base1) == TARGET_MEM_REF);
1493   ind2_p = (TREE_CODE (base2) == MEM_REF
1494 	    || TREE_CODE (base2) == TARGET_MEM_REF);
1495 
1496   /* Canonicalize the pointer-vs-decl case.  */
1497   if (ind1_p && var2_p)
1498     {
1499       std::swap (offset1, offset2);
1500       std::swap (max_size1, max_size2);
1501       std::swap (base1, base2);
1502       std::swap (ref1, ref2);
1503       var1_p = true;
1504       ind1_p = false;
1505       var2_p = false;
1506       ind2_p = true;
1507     }
1508 
1509   /* First defer to TBAA if possible.  */
1510   if (tbaa_p
1511       && flag_strict_aliasing
1512       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1513 				 ao_ref_alias_set (ref2)))
1514     return false;
1515 
1516   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1517   if (var1_p && ind2_p)
1518     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1519 					  offset2, max_size2,
1520 					  ao_ref_alias_set (ref2),
1521 					  ao_ref_base_alias_set (ref2),
1522 					  ref1->ref, base1,
1523 					  offset1, max_size1,
1524 					  ao_ref_alias_set (ref1),
1525 					  ao_ref_base_alias_set (ref1),
1526 					  tbaa_p);
1527   else if (ind1_p && ind2_p)
1528     return indirect_refs_may_alias_p (ref1->ref, base1,
1529 				      offset1, max_size1,
1530 				      ao_ref_alias_set (ref1),
1531 				      ao_ref_base_alias_set (ref1),
1532 				      ref2->ref, base2,
1533 				      offset2, max_size2,
1534 				      ao_ref_alias_set (ref2),
1535 				      ao_ref_base_alias_set (ref2),
1536 				      tbaa_p);
1537 
1538   gcc_unreachable ();
1539 }
1540 
1541 static bool
1542 refs_may_alias_p (tree ref1, ao_ref *ref2)
1543 {
1544   ao_ref r1;
1545   ao_ref_init (&r1, ref1);
1546   return refs_may_alias_p_1 (&r1, ref2, true);
1547 }
1548 
1549 bool
1550 refs_may_alias_p (tree ref1, tree ref2)
1551 {
1552   ao_ref r1, r2;
1553   bool res;
1554   ao_ref_init (&r1, ref1);
1555   ao_ref_init (&r2, ref2);
1556   res = refs_may_alias_p_1 (&r1, &r2, true);
1557   if (res)
1558     ++alias_stats.refs_may_alias_p_may_alias;
1559   else
1560     ++alias_stats.refs_may_alias_p_no_alias;
1561   return res;
1562 }
1563 
1564 /* Returns true if there is a anti-dependence for the STORE that
1565    executes after the LOAD.  */
1566 
1567 bool
1568 refs_anti_dependent_p (tree load, tree store)
1569 {
1570   ao_ref r1, r2;
1571   ao_ref_init (&r1, load);
1572   ao_ref_init (&r2, store);
1573   return refs_may_alias_p_1 (&r1, &r2, false);
1574 }
1575 
1576 /* Returns true if there is a output dependence for the stores
1577    STORE1 and STORE2.  */
1578 
1579 bool
1580 refs_output_dependent_p (tree store1, tree store2)
1581 {
1582   ao_ref r1, r2;
1583   ao_ref_init (&r1, store1);
1584   ao_ref_init (&r2, store2);
1585   return refs_may_alias_p_1 (&r1, &r2, false);
1586 }
1587 
1588 /* If the call CALL may use the memory reference REF return true,
1589    otherwise return false.  */
1590 
1591 static bool
1592 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1593 {
1594   tree base, callee;
1595   unsigned i;
1596   int flags = gimple_call_flags (call);
1597 
1598   /* Const functions without a static chain do not implicitly use memory.  */
1599   if (!gimple_call_chain (call)
1600       && (flags & (ECF_CONST|ECF_NOVOPS)))
1601     goto process_args;
1602 
1603   base = ao_ref_base (ref);
1604   if (!base)
1605     return true;
1606 
1607   /* A call that is not without side-effects might involve volatile
1608      accesses and thus conflicts with all other volatile accesses.  */
1609   if (ref->volatile_p)
1610     return true;
1611 
1612   /* If the reference is based on a decl that is not aliased the call
1613      cannot possibly use it.  */
1614   if (DECL_P (base)
1615       && !may_be_aliased (base)
1616       /* But local statics can be used through recursion.  */
1617       && !is_global_var (base))
1618     goto process_args;
1619 
1620   callee = gimple_call_fndecl (call);
1621 
1622   /* Handle those builtin functions explicitly that do not act as
1623      escape points.  See tree-ssa-structalias.c:find_func_aliases
1624      for the list of builtins we might need to handle here.  */
1625   if (callee != NULL_TREE
1626       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1627     switch (DECL_FUNCTION_CODE (callee))
1628       {
1629 	/* All the following functions read memory pointed to by
1630 	   their second argument.  strcat/strncat additionally
1631 	   reads memory pointed to by the first argument.  */
1632 	case BUILT_IN_STRCAT:
1633 	case BUILT_IN_STRNCAT:
1634 	  {
1635 	    ao_ref dref;
1636 	    ao_ref_init_from_ptr_and_size (&dref,
1637 					   gimple_call_arg (call, 0),
1638 					   NULL_TREE);
1639 	    if (refs_may_alias_p_1 (&dref, ref, false))
1640 	      return true;
1641 	  }
1642 	  /* FALLTHRU */
1643 	case BUILT_IN_STRCPY:
1644 	case BUILT_IN_STRNCPY:
1645 	case BUILT_IN_MEMCPY:
1646 	case BUILT_IN_MEMMOVE:
1647 	case BUILT_IN_MEMPCPY:
1648 	case BUILT_IN_STPCPY:
1649 	case BUILT_IN_STPNCPY:
1650 	case BUILT_IN_TM_MEMCPY:
1651 	case BUILT_IN_TM_MEMMOVE:
1652 	  {
1653 	    ao_ref dref;
1654 	    tree size = NULL_TREE;
1655 	    if (gimple_call_num_args (call) == 3)
1656 	      size = gimple_call_arg (call, 2);
1657 	    ao_ref_init_from_ptr_and_size (&dref,
1658 					   gimple_call_arg (call, 1),
1659 					   size);
1660 	    return refs_may_alias_p_1 (&dref, ref, false);
1661 	  }
1662 	case BUILT_IN_STRCAT_CHK:
1663 	case BUILT_IN_STRNCAT_CHK:
1664 	  {
1665 	    ao_ref dref;
1666 	    ao_ref_init_from_ptr_and_size (&dref,
1667 					   gimple_call_arg (call, 0),
1668 					   NULL_TREE);
1669 	    if (refs_may_alias_p_1 (&dref, ref, false))
1670 	      return true;
1671 	  }
1672 	  /* FALLTHRU */
1673 	case BUILT_IN_STRCPY_CHK:
1674 	case BUILT_IN_STRNCPY_CHK:
1675 	case BUILT_IN_MEMCPY_CHK:
1676 	case BUILT_IN_MEMMOVE_CHK:
1677 	case BUILT_IN_MEMPCPY_CHK:
1678 	case BUILT_IN_STPCPY_CHK:
1679 	case BUILT_IN_STPNCPY_CHK:
1680 	  {
1681 	    ao_ref dref;
1682 	    tree size = NULL_TREE;
1683 	    if (gimple_call_num_args (call) == 4)
1684 	      size = gimple_call_arg (call, 2);
1685 	    ao_ref_init_from_ptr_and_size (&dref,
1686 					   gimple_call_arg (call, 1),
1687 					   size);
1688 	    return refs_may_alias_p_1 (&dref, ref, false);
1689 	  }
1690 	case BUILT_IN_BCOPY:
1691 	  {
1692 	    ao_ref dref;
1693 	    tree size = gimple_call_arg (call, 2);
1694 	    ao_ref_init_from_ptr_and_size (&dref,
1695 					   gimple_call_arg (call, 0),
1696 					   size);
1697 	    return refs_may_alias_p_1 (&dref, ref, false);
1698 	  }
1699 
1700 	/* The following functions read memory pointed to by their
1701 	   first argument.  */
1702 	CASE_BUILT_IN_TM_LOAD (1):
1703 	CASE_BUILT_IN_TM_LOAD (2):
1704 	CASE_BUILT_IN_TM_LOAD (4):
1705 	CASE_BUILT_IN_TM_LOAD (8):
1706 	CASE_BUILT_IN_TM_LOAD (FLOAT):
1707 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1708 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1709 	CASE_BUILT_IN_TM_LOAD (M64):
1710 	CASE_BUILT_IN_TM_LOAD (M128):
1711 	CASE_BUILT_IN_TM_LOAD (M256):
1712 	case BUILT_IN_TM_LOG:
1713 	case BUILT_IN_TM_LOG_1:
1714 	case BUILT_IN_TM_LOG_2:
1715 	case BUILT_IN_TM_LOG_4:
1716 	case BUILT_IN_TM_LOG_8:
1717 	case BUILT_IN_TM_LOG_FLOAT:
1718 	case BUILT_IN_TM_LOG_DOUBLE:
1719 	case BUILT_IN_TM_LOG_LDOUBLE:
1720 	case BUILT_IN_TM_LOG_M64:
1721 	case BUILT_IN_TM_LOG_M128:
1722 	case BUILT_IN_TM_LOG_M256:
1723 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1724 
1725 	/* These read memory pointed to by the first argument.  */
1726 	case BUILT_IN_STRDUP:
1727 	case BUILT_IN_STRNDUP:
1728 	case BUILT_IN_REALLOC:
1729 	  {
1730 	    ao_ref dref;
1731 	    tree size = NULL_TREE;
1732 	    if (gimple_call_num_args (call) == 2)
1733 	      size = gimple_call_arg (call, 1);
1734 	    ao_ref_init_from_ptr_and_size (&dref,
1735 					   gimple_call_arg (call, 0),
1736 					   size);
1737 	    return refs_may_alias_p_1 (&dref, ref, false);
1738 	  }
1739 	/* These read memory pointed to by the first argument.  */
1740 	case BUILT_IN_INDEX:
1741 	case BUILT_IN_STRCHR:
1742 	case BUILT_IN_STRRCHR:
1743 	  {
1744 	    ao_ref dref;
1745 	    ao_ref_init_from_ptr_and_size (&dref,
1746 					   gimple_call_arg (call, 0),
1747 					   NULL_TREE);
1748 	    return refs_may_alias_p_1 (&dref, ref, false);
1749 	  }
1750 	/* These read memory pointed to by the first argument with size
1751 	   in the third argument.  */
1752 	case BUILT_IN_MEMCHR:
1753 	  {
1754 	    ao_ref dref;
1755 	    ao_ref_init_from_ptr_and_size (&dref,
1756 					   gimple_call_arg (call, 0),
1757 					   gimple_call_arg (call, 2));
1758 	    return refs_may_alias_p_1 (&dref, ref, false);
1759 	  }
1760 	/* These read memory pointed to by the first and second arguments.  */
1761 	case BUILT_IN_STRSTR:
1762 	case BUILT_IN_STRPBRK:
1763 	  {
1764 	    ao_ref dref;
1765 	    ao_ref_init_from_ptr_and_size (&dref,
1766 					   gimple_call_arg (call, 0),
1767 					   NULL_TREE);
1768 	    if (refs_may_alias_p_1 (&dref, ref, false))
1769 	      return true;
1770 	    ao_ref_init_from_ptr_and_size (&dref,
1771 					   gimple_call_arg (call, 1),
1772 					   NULL_TREE);
1773 	    return refs_may_alias_p_1 (&dref, ref, false);
1774 	  }
1775 
1776 	/* The following builtins do not read from memory.  */
1777 	case BUILT_IN_FREE:
1778 	case BUILT_IN_MALLOC:
1779 	case BUILT_IN_POSIX_MEMALIGN:
1780 	case BUILT_IN_ALIGNED_ALLOC:
1781 	case BUILT_IN_CALLOC:
1782 	case BUILT_IN_ALLOCA:
1783 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1784 	case BUILT_IN_STACK_SAVE:
1785 	case BUILT_IN_STACK_RESTORE:
1786 	case BUILT_IN_MEMSET:
1787 	case BUILT_IN_TM_MEMSET:
1788 	case BUILT_IN_MEMSET_CHK:
1789 	case BUILT_IN_FREXP:
1790 	case BUILT_IN_FREXPF:
1791 	case BUILT_IN_FREXPL:
1792 	case BUILT_IN_GAMMA_R:
1793 	case BUILT_IN_GAMMAF_R:
1794 	case BUILT_IN_GAMMAL_R:
1795 	case BUILT_IN_LGAMMA_R:
1796 	case BUILT_IN_LGAMMAF_R:
1797 	case BUILT_IN_LGAMMAL_R:
1798 	case BUILT_IN_MODF:
1799 	case BUILT_IN_MODFF:
1800 	case BUILT_IN_MODFL:
1801 	case BUILT_IN_REMQUO:
1802 	case BUILT_IN_REMQUOF:
1803 	case BUILT_IN_REMQUOL:
1804 	case BUILT_IN_SINCOS:
1805 	case BUILT_IN_SINCOSF:
1806 	case BUILT_IN_SINCOSL:
1807 	case BUILT_IN_ASSUME_ALIGNED:
1808 	case BUILT_IN_VA_END:
1809 	  return false;
1810 	/* __sync_* builtins and some OpenMP builtins act as threading
1811 	   barriers.  */
1812 #undef DEF_SYNC_BUILTIN
1813 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1814 #include "sync-builtins.def"
1815 #undef DEF_SYNC_BUILTIN
1816 	case BUILT_IN_GOMP_ATOMIC_START:
1817 	case BUILT_IN_GOMP_ATOMIC_END:
1818 	case BUILT_IN_GOMP_BARRIER:
1819 	case BUILT_IN_GOMP_BARRIER_CANCEL:
1820 	case BUILT_IN_GOMP_TASKWAIT:
1821 	case BUILT_IN_GOMP_TASKGROUP_END:
1822 	case BUILT_IN_GOMP_CRITICAL_START:
1823 	case BUILT_IN_GOMP_CRITICAL_END:
1824 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1825 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1826 	case BUILT_IN_GOMP_LOOP_END:
1827 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
1828 	case BUILT_IN_GOMP_ORDERED_START:
1829 	case BUILT_IN_GOMP_ORDERED_END:
1830 	case BUILT_IN_GOMP_SECTIONS_END:
1831 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1832 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1833 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1834 	  return true;
1835 
1836 	default:
1837 	  /* Fallthru to general call handling.  */;
1838       }
1839 
1840   /* Check if base is a global static variable that is not read
1841      by the function.  */
1842   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1843     {
1844       struct cgraph_node *node = cgraph_node::get (callee);
1845       bitmap not_read;
1846 
1847       /* FIXME: Callee can be an OMP builtin that does not have a call graph
1848 	 node yet.  We should enforce that there are nodes for all decls in the
1849 	 IL and remove this check instead.  */
1850       if (node
1851 	  && (not_read = ipa_reference_get_not_read_global (node))
1852 	  && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1853 	goto process_args;
1854     }
1855 
1856   /* Check if the base variable is call-used.  */
1857   if (DECL_P (base))
1858     {
1859       if (pt_solution_includes (gimple_call_use_set (call), base))
1860 	return true;
1861     }
1862   else if ((TREE_CODE (base) == MEM_REF
1863 	    || TREE_CODE (base) == TARGET_MEM_REF)
1864 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1865     {
1866       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1867       if (!pi)
1868 	return true;
1869 
1870       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1871 	return true;
1872     }
1873   else
1874     return true;
1875 
1876   /* Inspect call arguments for passed-by-value aliases.  */
1877 process_args:
1878   for (i = 0; i < gimple_call_num_args (call); ++i)
1879     {
1880       tree op = gimple_call_arg (call, i);
1881       int flags = gimple_call_arg_flags (call, i);
1882 
1883       if (flags & EAF_UNUSED)
1884 	continue;
1885 
1886       if (TREE_CODE (op) == WITH_SIZE_EXPR)
1887 	op = TREE_OPERAND (op, 0);
1888 
1889       if (TREE_CODE (op) != SSA_NAME
1890 	  && !is_gimple_min_invariant (op))
1891 	{
1892 	  ao_ref r;
1893 	  ao_ref_init (&r, op);
1894 	  if (refs_may_alias_p_1 (&r, ref, true))
1895 	    return true;
1896 	}
1897     }
1898 
1899   return false;
1900 }
1901 
1902 static bool
1903 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1904 {
1905   bool res;
1906   res = ref_maybe_used_by_call_p_1 (call, ref);
1907   if (res)
1908     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1909   else
1910     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1911   return res;
1912 }
1913 
1914 
1915 /* If the statement STMT may use the memory reference REF return
1916    true, otherwise return false.  */
1917 
1918 bool
1919 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
1920 {
1921   if (is_gimple_assign (stmt))
1922     {
1923       tree rhs;
1924 
1925       /* All memory assign statements are single.  */
1926       if (!gimple_assign_single_p (stmt))
1927 	return false;
1928 
1929       rhs = gimple_assign_rhs1 (stmt);
1930       if (is_gimple_reg (rhs)
1931 	  || is_gimple_min_invariant (rhs)
1932 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1933 	return false;
1934 
1935       return refs_may_alias_p (rhs, ref);
1936     }
1937   else if (is_gimple_call (stmt))
1938     return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1939   else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1940     {
1941       tree retval = gimple_return_retval (return_stmt);
1942       if (retval
1943 	  && TREE_CODE (retval) != SSA_NAME
1944 	  && !is_gimple_min_invariant (retval)
1945 	  && refs_may_alias_p (retval, ref))
1946 	return true;
1947       /* If ref escapes the function then the return acts as a use.  */
1948       tree base = ao_ref_base (ref);
1949       if (!base)
1950 	;
1951       else if (DECL_P (base))
1952 	return is_global_var (base);
1953       else if (TREE_CODE (base) == MEM_REF
1954 	       || TREE_CODE (base) == TARGET_MEM_REF)
1955 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1956       return false;
1957     }
1958 
1959   return true;
1960 }
1961 
1962 bool
1963 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
1964 {
1965   ao_ref r;
1966   ao_ref_init (&r, ref);
1967   return ref_maybe_used_by_stmt_p (stmt, &r);
1968 }
1969 
1970 /* If the call in statement CALL may clobber the memory reference REF
1971    return true, otherwise return false.  */
1972 
1973 bool
1974 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1975 {
1976   tree base;
1977   tree callee;
1978 
1979   /* If the call is pure or const it cannot clobber anything.  */
1980   if (gimple_call_flags (call)
1981       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1982     return false;
1983   if (gimple_call_internal_p (call))
1984     switch (gimple_call_internal_fn (call))
1985       {
1986 	/* Treat these internal calls like ECF_PURE for aliasing,
1987 	   they don't write to any memory the program should care about.
1988 	   They have important other side-effects, and read memory,
1989 	   so can't be ECF_NOVOPS.  */
1990       case IFN_UBSAN_NULL:
1991       case IFN_UBSAN_BOUNDS:
1992       case IFN_UBSAN_VPTR:
1993       case IFN_UBSAN_OBJECT_SIZE:
1994       case IFN_ASAN_CHECK:
1995 	return false;
1996       default:
1997 	break;
1998       }
1999 
2000   base = ao_ref_base (ref);
2001   if (!base)
2002     return true;
2003 
2004   if (TREE_CODE (base) == SSA_NAME
2005       || CONSTANT_CLASS_P (base))
2006     return false;
2007 
2008   /* A call that is not without side-effects might involve volatile
2009      accesses and thus conflicts with all other volatile accesses.  */
2010   if (ref->volatile_p)
2011     return true;
2012 
2013   /* If the reference is based on a decl that is not aliased the call
2014      cannot possibly clobber it.  */
2015   if (DECL_P (base)
2016       && !may_be_aliased (base)
2017       /* But local non-readonly statics can be modified through recursion
2018          or the call may implement a threading barrier which we must
2019 	 treat as may-def.  */
2020       && (TREE_READONLY (base)
2021 	  || !is_global_var (base)))
2022     return false;
2023 
2024   callee = gimple_call_fndecl (call);
2025 
2026   /* Handle those builtin functions explicitly that do not act as
2027      escape points.  See tree-ssa-structalias.c:find_func_aliases
2028      for the list of builtins we might need to handle here.  */
2029   if (callee != NULL_TREE
2030       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2031     switch (DECL_FUNCTION_CODE (callee))
2032       {
2033 	/* All the following functions clobber memory pointed to by
2034 	   their first argument.  */
2035 	case BUILT_IN_STRCPY:
2036 	case BUILT_IN_STRNCPY:
2037 	case BUILT_IN_MEMCPY:
2038 	case BUILT_IN_MEMMOVE:
2039 	case BUILT_IN_MEMPCPY:
2040 	case BUILT_IN_STPCPY:
2041 	case BUILT_IN_STPNCPY:
2042 	case BUILT_IN_STRCAT:
2043 	case BUILT_IN_STRNCAT:
2044 	case BUILT_IN_MEMSET:
2045 	case BUILT_IN_TM_MEMSET:
2046 	CASE_BUILT_IN_TM_STORE (1):
2047 	CASE_BUILT_IN_TM_STORE (2):
2048 	CASE_BUILT_IN_TM_STORE (4):
2049 	CASE_BUILT_IN_TM_STORE (8):
2050 	CASE_BUILT_IN_TM_STORE (FLOAT):
2051 	CASE_BUILT_IN_TM_STORE (DOUBLE):
2052 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
2053 	CASE_BUILT_IN_TM_STORE (M64):
2054 	CASE_BUILT_IN_TM_STORE (M128):
2055 	CASE_BUILT_IN_TM_STORE (M256):
2056 	case BUILT_IN_TM_MEMCPY:
2057 	case BUILT_IN_TM_MEMMOVE:
2058 	  {
2059 	    ao_ref dref;
2060 	    tree size = NULL_TREE;
2061 	    /* Don't pass in size for strncat, as the maximum size
2062 	       is strlen (dest) + n + 1 instead of n, resp.
2063 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2064 	       known.  */
2065 	    if (gimple_call_num_args (call) == 3
2066 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2067 	      size = gimple_call_arg (call, 2);
2068 	    ao_ref_init_from_ptr_and_size (&dref,
2069 					   gimple_call_arg (call, 0),
2070 					   size);
2071 	    return refs_may_alias_p_1 (&dref, ref, false);
2072 	  }
2073 	case BUILT_IN_STRCPY_CHK:
2074 	case BUILT_IN_STRNCPY_CHK:
2075 	case BUILT_IN_MEMCPY_CHK:
2076 	case BUILT_IN_MEMMOVE_CHK:
2077 	case BUILT_IN_MEMPCPY_CHK:
2078 	case BUILT_IN_STPCPY_CHK:
2079 	case BUILT_IN_STPNCPY_CHK:
2080 	case BUILT_IN_STRCAT_CHK:
2081 	case BUILT_IN_STRNCAT_CHK:
2082 	case BUILT_IN_MEMSET_CHK:
2083 	  {
2084 	    ao_ref dref;
2085 	    tree size = NULL_TREE;
2086 	    /* Don't pass in size for __strncat_chk, as the maximum size
2087 	       is strlen (dest) + n + 1 instead of n, resp.
2088 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2089 	       known.  */
2090 	    if (gimple_call_num_args (call) == 4
2091 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2092 	      size = gimple_call_arg (call, 2);
2093 	    ao_ref_init_from_ptr_and_size (&dref,
2094 					   gimple_call_arg (call, 0),
2095 					   size);
2096 	    return refs_may_alias_p_1 (&dref, ref, false);
2097 	  }
2098 	case BUILT_IN_BCOPY:
2099 	  {
2100 	    ao_ref dref;
2101 	    tree size = gimple_call_arg (call, 2);
2102 	    ao_ref_init_from_ptr_and_size (&dref,
2103 					   gimple_call_arg (call, 1),
2104 					   size);
2105 	    return refs_may_alias_p_1 (&dref, ref, false);
2106 	  }
2107 	/* Allocating memory does not have any side-effects apart from
2108 	   being the definition point for the pointer.  */
2109 	case BUILT_IN_MALLOC:
2110 	case BUILT_IN_ALIGNED_ALLOC:
2111 	case BUILT_IN_CALLOC:
2112 	case BUILT_IN_STRDUP:
2113 	case BUILT_IN_STRNDUP:
2114 	  /* Unix98 specifies that errno is set on allocation failure.  */
2115 	  if (flag_errno_math
2116 	      && targetm.ref_may_alias_errno (ref))
2117 	    return true;
2118 	  return false;
2119 	case BUILT_IN_STACK_SAVE:
2120 	case BUILT_IN_ALLOCA:
2121 	case BUILT_IN_ALLOCA_WITH_ALIGN:
2122 	case BUILT_IN_ASSUME_ALIGNED:
2123 	  return false;
2124 	/* But posix_memalign stores a pointer into the memory pointed to
2125 	   by its first argument.  */
2126 	case BUILT_IN_POSIX_MEMALIGN:
2127 	  {
2128 	    tree ptrptr = gimple_call_arg (call, 0);
2129 	    ao_ref dref;
2130 	    ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2131 					   TYPE_SIZE_UNIT (ptr_type_node));
2132 	    return (refs_may_alias_p_1 (&dref, ref, false)
2133 		    || (flag_errno_math
2134 			&& targetm.ref_may_alias_errno (ref)));
2135 	  }
2136 	/* Freeing memory kills the pointed-to memory.  More importantly
2137 	   the call has to serve as a barrier for moving loads and stores
2138 	   across it.  */
2139 	case BUILT_IN_FREE:
2140 	case BUILT_IN_VA_END:
2141 	  {
2142 	    tree ptr = gimple_call_arg (call, 0);
2143 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2144 	  }
2145 	/* Realloc serves both as allocation point and deallocation point.  */
2146 	case BUILT_IN_REALLOC:
2147 	  {
2148 	    tree ptr = gimple_call_arg (call, 0);
2149 	    /* Unix98 specifies that errno is set on allocation failure.  */
2150 	    return ((flag_errno_math
2151 		     && targetm.ref_may_alias_errno (ref))
2152 		    || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2153 	  }
2154 	case BUILT_IN_GAMMA_R:
2155 	case BUILT_IN_GAMMAF_R:
2156 	case BUILT_IN_GAMMAL_R:
2157 	case BUILT_IN_LGAMMA_R:
2158 	case BUILT_IN_LGAMMAF_R:
2159 	case BUILT_IN_LGAMMAL_R:
2160 	  {
2161 	    tree out = gimple_call_arg (call, 1);
2162 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2163 	      return true;
2164 	    if (flag_errno_math)
2165 	      break;
2166 	    return false;
2167 	  }
2168 	case BUILT_IN_FREXP:
2169 	case BUILT_IN_FREXPF:
2170 	case BUILT_IN_FREXPL:
2171 	case BUILT_IN_MODF:
2172 	case BUILT_IN_MODFF:
2173 	case BUILT_IN_MODFL:
2174 	  {
2175 	    tree out = gimple_call_arg (call, 1);
2176 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
2177 	  }
2178 	case BUILT_IN_REMQUO:
2179 	case BUILT_IN_REMQUOF:
2180 	case BUILT_IN_REMQUOL:
2181 	  {
2182 	    tree out = gimple_call_arg (call, 2);
2183 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2184 	      return true;
2185 	    if (flag_errno_math)
2186 	      break;
2187 	    return false;
2188 	  }
2189 	case BUILT_IN_SINCOS:
2190 	case BUILT_IN_SINCOSF:
2191 	case BUILT_IN_SINCOSL:
2192 	  {
2193 	    tree sin = gimple_call_arg (call, 1);
2194 	    tree cos = gimple_call_arg (call, 2);
2195 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2196 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
2197 	  }
2198 	/* __sync_* builtins and some OpenMP builtins act as threading
2199 	   barriers.  */
2200 #undef DEF_SYNC_BUILTIN
2201 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2202 #include "sync-builtins.def"
2203 #undef DEF_SYNC_BUILTIN
2204 	case BUILT_IN_GOMP_ATOMIC_START:
2205 	case BUILT_IN_GOMP_ATOMIC_END:
2206 	case BUILT_IN_GOMP_BARRIER:
2207 	case BUILT_IN_GOMP_BARRIER_CANCEL:
2208 	case BUILT_IN_GOMP_TASKWAIT:
2209 	case BUILT_IN_GOMP_TASKGROUP_END:
2210 	case BUILT_IN_GOMP_CRITICAL_START:
2211 	case BUILT_IN_GOMP_CRITICAL_END:
2212 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
2213 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
2214 	case BUILT_IN_GOMP_LOOP_END:
2215 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
2216 	case BUILT_IN_GOMP_ORDERED_START:
2217 	case BUILT_IN_GOMP_ORDERED_END:
2218 	case BUILT_IN_GOMP_SECTIONS_END:
2219 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2220 	case BUILT_IN_GOMP_SINGLE_COPY_START:
2221 	case BUILT_IN_GOMP_SINGLE_COPY_END:
2222 	  return true;
2223 	default:
2224 	  /* Fallthru to general call handling.  */;
2225       }
2226 
2227   /* Check if base is a global static variable that is not written
2228      by the function.  */
2229   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2230     {
2231       struct cgraph_node *node = cgraph_node::get (callee);
2232       bitmap not_written;
2233 
2234       if (node
2235 	  && (not_written = ipa_reference_get_not_written_global (node))
2236 	  && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2237 	return false;
2238     }
2239 
2240   /* Check if the base variable is call-clobbered.  */
2241   if (DECL_P (base))
2242     return pt_solution_includes (gimple_call_clobber_set (call), base);
2243   else if ((TREE_CODE (base) == MEM_REF
2244 	    || TREE_CODE (base) == TARGET_MEM_REF)
2245 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2246     {
2247       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2248       if (!pi)
2249 	return true;
2250 
2251       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2252     }
2253 
2254   return true;
2255 }
2256 
2257 /* If the call in statement CALL may clobber the memory reference REF
2258    return true, otherwise return false.  */
2259 
2260 bool
2261 call_may_clobber_ref_p (gcall *call, tree ref)
2262 {
2263   bool res;
2264   ao_ref r;
2265   ao_ref_init (&r, ref);
2266   res = call_may_clobber_ref_p_1 (call, &r);
2267   if (res)
2268     ++alias_stats.call_may_clobber_ref_p_may_alias;
2269   else
2270     ++alias_stats.call_may_clobber_ref_p_no_alias;
2271   return res;
2272 }
2273 
2274 
2275 /* If the statement STMT may clobber the memory reference REF return true,
2276    otherwise return false.  */
2277 
2278 bool
2279 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
2280 {
2281   if (is_gimple_call (stmt))
2282     {
2283       tree lhs = gimple_call_lhs (stmt);
2284       if (lhs
2285 	  && TREE_CODE (lhs) != SSA_NAME)
2286 	{
2287 	  ao_ref r;
2288 	  ao_ref_init (&r, lhs);
2289 	  if (refs_may_alias_p_1 (ref, &r, true))
2290 	    return true;
2291 	}
2292 
2293       return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2294     }
2295   else if (gimple_assign_single_p (stmt))
2296     {
2297       tree lhs = gimple_assign_lhs (stmt);
2298       if (TREE_CODE (lhs) != SSA_NAME)
2299 	{
2300 	  ao_ref r;
2301 	  ao_ref_init (&r, lhs);
2302 	  return refs_may_alias_p_1 (ref, &r, true);
2303 	}
2304     }
2305   else if (gimple_code (stmt) == GIMPLE_ASM)
2306     return true;
2307 
2308   return false;
2309 }
2310 
2311 bool
2312 stmt_may_clobber_ref_p (gimple *stmt, tree ref)
2313 {
2314   ao_ref r;
2315   ao_ref_init (&r, ref);
2316   return stmt_may_clobber_ref_p_1 (stmt, &r);
2317 }
2318 
2319 /* Return true if store1 and store2 described by corresponding tuples
2320    <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2321    address.  */
2322 
2323 static bool
2324 same_addr_size_stores_p (tree base1, HOST_WIDE_INT offset1, HOST_WIDE_INT size1,
2325 			 HOST_WIDE_INT max_size1,
2326 			 tree base2, HOST_WIDE_INT offset2, HOST_WIDE_INT size2,
2327 			 HOST_WIDE_INT max_size2)
2328 {
2329   /* Offsets need to be 0.  */
2330   if (offset1 != 0
2331       || offset2 != 0)
2332     return false;
2333 
2334   bool base1_obj_p = SSA_VAR_P (base1);
2335   bool base2_obj_p = SSA_VAR_P (base2);
2336 
2337   /* We need one object.  */
2338   if (base1_obj_p == base2_obj_p)
2339     return false;
2340   tree obj = base1_obj_p ? base1 : base2;
2341 
2342   /* And we need one MEM_REF.  */
2343   bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2344   bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2345   if (base1_memref_p == base2_memref_p)
2346     return false;
2347   tree memref = base1_memref_p ? base1 : base2;
2348 
2349   /* Sizes need to be valid.  */
2350   if (max_size1 == -1 || max_size2 == -1
2351       || size1 == -1 || size2 == -1)
2352     return false;
2353 
2354   /* Max_size needs to match size.  */
2355   if (max_size1 != size1
2356       || max_size2 != size2)
2357     return false;
2358 
2359   /* Sizes need to match.  */
2360   if (size1 != size2)
2361     return false;
2362 
2363 
2364   /* Check that memref is a store to pointer with singleton points-to info.  */
2365   if (!integer_zerop (TREE_OPERAND (memref, 1)))
2366     return false;
2367   tree ptr = TREE_OPERAND (memref, 0);
2368   if (TREE_CODE (ptr) != SSA_NAME)
2369     return false;
2370   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2371   unsigned int pt_uid;
2372   if (pi == NULL
2373       || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2374     return false;
2375 
2376   /* Be conservative with non-call exceptions when the address might
2377      be NULL.  */
2378   if (flag_non_call_exceptions && pi->pt.null)
2379     return false;
2380 
2381   /* Check that ptr points relative to obj.  */
2382   unsigned int obj_uid = DECL_PT_UID (obj);
2383   if (obj_uid != pt_uid)
2384     return false;
2385 
2386   /* Check that the object size is the same as the store size.  That ensures us
2387      that ptr points to the start of obj.  */
2388   if (!tree_fits_shwi_p (DECL_SIZE (obj)))
2389     return false;
2390   HOST_WIDE_INT obj_size = tree_to_shwi (DECL_SIZE (obj));
2391   return obj_size == size1;
2392 }
2393 
2394 /* If STMT kills the memory reference REF return true, otherwise
2395    return false.  */
2396 
2397 bool
2398 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2399 {
2400   if (!ao_ref_base (ref))
2401     return false;
2402 
2403   if (gimple_has_lhs (stmt)
2404       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2405       /* The assignment is not necessarily carried out if it can throw
2406 	 and we can catch it in the current function where we could inspect
2407 	 the previous value.
2408 	 ???  We only need to care about the RHS throwing.  For aggregate
2409 	 assignments or similar calls and non-call exceptions the LHS
2410 	 might throw as well.  */
2411       && !stmt_can_throw_internal (stmt))
2412     {
2413       tree lhs = gimple_get_lhs (stmt);
2414       /* If LHS is literally a base of the access we are done.  */
2415       if (ref->ref)
2416 	{
2417 	  tree base = ref->ref;
2418 	  tree innermost_dropped_array_ref = NULL_TREE;
2419 	  if (handled_component_p (base))
2420 	    {
2421 	      tree saved_lhs0 = NULL_TREE;
2422 	      if (handled_component_p (lhs))
2423 		{
2424 		  saved_lhs0 = TREE_OPERAND (lhs, 0);
2425 		  TREE_OPERAND (lhs, 0) = integer_zero_node;
2426 		}
2427 	      do
2428 		{
2429 		  /* Just compare the outermost handled component, if
2430 		     they are equal we have found a possible common
2431 		     base.  */
2432 		  tree saved_base0 = TREE_OPERAND (base, 0);
2433 		  TREE_OPERAND (base, 0) = integer_zero_node;
2434 		  bool res = operand_equal_p (lhs, base, 0);
2435 		  TREE_OPERAND (base, 0) = saved_base0;
2436 		  if (res)
2437 		    break;
2438 		  /* Remember if we drop an array-ref that we need to
2439 		     double-check not being at struct end.  */
2440 		  if (TREE_CODE (base) == ARRAY_REF
2441 		      || TREE_CODE (base) == ARRAY_RANGE_REF)
2442 		    innermost_dropped_array_ref = base;
2443 		  /* Otherwise drop handled components of the access.  */
2444 		  base = saved_base0;
2445 		}
2446 	      while (handled_component_p (base));
2447 	      if (saved_lhs0)
2448 		TREE_OPERAND (lhs, 0) = saved_lhs0;
2449 	    }
2450 	  /* Finally check if the lhs has the same address and size as the
2451 	     base candidate of the access.  Watch out if we have dropped
2452 	     an array-ref that was at struct end, this means ref->ref may
2453 	     be outside of the TYPE_SIZE of its base.  */
2454 	  if ((! innermost_dropped_array_ref
2455 	       || ! array_at_struct_end_p (innermost_dropped_array_ref))
2456 	      && (lhs == base
2457 		  || (((TYPE_SIZE (TREE_TYPE (lhs))
2458 			== TYPE_SIZE (TREE_TYPE (base)))
2459 		       || (TYPE_SIZE (TREE_TYPE (lhs))
2460 			   && TYPE_SIZE (TREE_TYPE (base))
2461 			   && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2462 					       TYPE_SIZE (TREE_TYPE (base)),
2463 					       0)))
2464 		      && operand_equal_p (lhs, base,
2465 					  OEP_ADDRESS_OF
2466 					  | OEP_MATCH_SIDE_EFFECTS))))
2467 	    return true;
2468 	}
2469 
2470       /* Now look for non-literal equal bases with the restriction of
2471          handling constant offset and size.  */
2472       /* For a must-alias check we need to be able to constrain
2473 	 the access properly.  */
2474       if (ref->max_size == -1)
2475 	return false;
2476       HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2477       bool reverse;
2478       tree base
2479 	= get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
2480       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2481 	 so base == ref->base does not always hold.  */
2482       if (base != ref->base)
2483 	{
2484 	  /* Try using points-to info.  */
2485 	  if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2486 				       ref->offset, ref->size, ref->max_size))
2487 	    return true;
2488 
2489 	  /* If both base and ref->base are MEM_REFs, only compare the
2490 	     first operand, and if the second operand isn't equal constant,
2491 	     try to add the offsets into offset and ref_offset.  */
2492 	  if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2493 	      && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2494 	    {
2495 	      if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2496 				       TREE_OPERAND (ref->base, 1)))
2497 		{
2498 		  offset_int off1 = mem_ref_offset (base);
2499 		  off1 <<= LOG2_BITS_PER_UNIT;
2500 		  off1 += offset;
2501 		  offset_int off2 = mem_ref_offset (ref->base);
2502 		  off2 <<= LOG2_BITS_PER_UNIT;
2503 		  off2 += ref_offset;
2504 		  if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2505 		    {
2506 		      offset = off1.to_shwi ();
2507 		      ref_offset = off2.to_shwi ();
2508 		    }
2509 		  else
2510 		    size = -1;
2511 		}
2512 	    }
2513 	  else
2514 	    size = -1;
2515 	}
2516       /* For a must-alias check we need to be able to constrain
2517 	 the access properly.  */
2518       if (size != -1 && size == max_size)
2519 	{
2520 	  if (offset <= ref_offset
2521 	      && offset + size >= ref_offset + ref->max_size)
2522 	    return true;
2523 	}
2524     }
2525 
2526   if (is_gimple_call (stmt))
2527     {
2528       tree callee = gimple_call_fndecl (stmt);
2529       if (callee != NULL_TREE
2530 	  && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2531 	switch (DECL_FUNCTION_CODE (callee))
2532 	  {
2533 	  case BUILT_IN_FREE:
2534 	    {
2535 	      tree ptr = gimple_call_arg (stmt, 0);
2536 	      tree base = ao_ref_base (ref);
2537 	      if (base && TREE_CODE (base) == MEM_REF
2538 		  && TREE_OPERAND (base, 0) == ptr)
2539 		return true;
2540 	      break;
2541 	    }
2542 
2543 	  case BUILT_IN_MEMCPY:
2544 	  case BUILT_IN_MEMPCPY:
2545 	  case BUILT_IN_MEMMOVE:
2546 	  case BUILT_IN_MEMSET:
2547 	  case BUILT_IN_MEMCPY_CHK:
2548 	  case BUILT_IN_MEMPCPY_CHK:
2549 	  case BUILT_IN_MEMMOVE_CHK:
2550 	  case BUILT_IN_MEMSET_CHK:
2551 	    {
2552 	      /* For a must-alias check we need to be able to constrain
2553 		 the access properly.  */
2554 	      if (ref->max_size == -1)
2555 		return false;
2556 	      tree dest = gimple_call_arg (stmt, 0);
2557 	      tree len = gimple_call_arg (stmt, 2);
2558 	      if (!tree_fits_shwi_p (len))
2559 		return false;
2560 	      tree rbase = ref->base;
2561 	      offset_int roffset = ref->offset;
2562 	      ao_ref dref;
2563 	      ao_ref_init_from_ptr_and_size (&dref, dest, len);
2564 	      tree base = ao_ref_base (&dref);
2565 	      offset_int offset = dref.offset;
2566 	      if (!base || dref.size == -1)
2567 		return false;
2568 	      if (TREE_CODE (base) == MEM_REF)
2569 		{
2570 		  if (TREE_CODE (rbase) != MEM_REF)
2571 		    return false;
2572 		  // Compare pointers.
2573 		  offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2574 		  roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2575 		  base = TREE_OPERAND (base, 0);
2576 		  rbase = TREE_OPERAND (rbase, 0);
2577 		}
2578 	      if (base == rbase
2579 		  && offset <= roffset
2580 		  && (roffset + ref->max_size
2581 		      <= offset + (wi::to_offset (len) << LOG2_BITS_PER_UNIT)))
2582 		return true;
2583 	      break;
2584 	    }
2585 
2586 	  case BUILT_IN_VA_END:
2587 	    {
2588 	      tree ptr = gimple_call_arg (stmt, 0);
2589 	      if (TREE_CODE (ptr) == ADDR_EXPR)
2590 		{
2591 		  tree base = ao_ref_base (ref);
2592 		  if (TREE_OPERAND (ptr, 0) == base)
2593 		    return true;
2594 		}
2595 	      break;
2596 	    }
2597 
2598 	  default:;
2599 	  }
2600     }
2601   return false;
2602 }
2603 
2604 bool
2605 stmt_kills_ref_p (gimple *stmt, tree ref)
2606 {
2607   ao_ref r;
2608   ao_ref_init (&r, ref);
2609   return stmt_kills_ref_p (stmt, &r);
2610 }
2611 
2612 
2613 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2614    TARGET or a statement clobbering the memory reference REF in which
2615    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
2616 
2617 static bool
2618 maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
2619 		  tree vuse, unsigned int *cnt, bitmap *visited,
2620 		  bool abort_on_visited,
2621 		  void *(*translate)(ao_ref *, tree, void *, bool *),
2622 		  void *data)
2623 {
2624   basic_block bb = gimple_bb (phi);
2625 
2626   if (!*visited)
2627     *visited = BITMAP_ALLOC (NULL);
2628 
2629   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2630 
2631   /* Walk until we hit the target.  */
2632   while (vuse != target)
2633     {
2634       gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2635       /* Recurse for PHI nodes.  */
2636       if (gimple_code (def_stmt) == GIMPLE_PHI)
2637 	{
2638 	  /* An already visited PHI node ends the walk successfully.  */
2639 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2640 	    return !abort_on_visited;
2641 	  vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2642 					   visited, abort_on_visited,
2643 					   translate, data);
2644 	  if (!vuse)
2645 	    return false;
2646 	  continue;
2647 	}
2648       else if (gimple_nop_p (def_stmt))
2649 	return false;
2650       else
2651 	{
2652 	  /* A clobbering statement or the end of the IL ends it failing.  */
2653 	  ++*cnt;
2654 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2655 	    {
2656 	      bool disambiguate_only = true;
2657 	      if (translate
2658 		  && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2659 		;
2660 	      else
2661 		return false;
2662 	    }
2663 	}
2664       /* If we reach a new basic-block see if we already skipped it
2665          in a previous walk that ended successfully.  */
2666       if (gimple_bb (def_stmt) != bb)
2667 	{
2668 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2669 	    return !abort_on_visited;
2670 	  bb = gimple_bb (def_stmt);
2671 	}
2672       vuse = gimple_vuse (def_stmt);
2673     }
2674   return true;
2675 }
2676 
2677 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2678    until we hit the phi argument definition that dominates the other one.
2679    Return that, or NULL_TREE if there is no such definition.  */
2680 
2681 static tree
2682 get_continuation_for_phi_1 (gimple *phi, tree arg0, tree arg1,
2683 			    ao_ref *ref, unsigned int *cnt,
2684 			    bitmap *visited, bool abort_on_visited,
2685 			    void *(*translate)(ao_ref *, tree, void *, bool *),
2686 			    void *data)
2687 {
2688   gimple *def0 = SSA_NAME_DEF_STMT (arg0);
2689   gimple *def1 = SSA_NAME_DEF_STMT (arg1);
2690   tree common_vuse;
2691 
2692   if (arg0 == arg1)
2693     return arg0;
2694   else if (gimple_nop_p (def0)
2695 	   || (!gimple_nop_p (def1)
2696 	       && dominated_by_p (CDI_DOMINATORS,
2697 				  gimple_bb (def1), gimple_bb (def0))))
2698     {
2699       if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2700 			    visited, abort_on_visited, translate, data))
2701 	return arg0;
2702     }
2703   else if (gimple_nop_p (def1)
2704 	   || dominated_by_p (CDI_DOMINATORS,
2705 			      gimple_bb (def0), gimple_bb (def1)))
2706     {
2707       if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2708 			    visited, abort_on_visited, translate, data))
2709 	return arg1;
2710     }
2711   /* Special case of a diamond:
2712        MEM_1 = ...
2713        goto (cond) ? L1 : L2
2714        L1: store1 = ...    #MEM_2 = vuse(MEM_1)
2715 	   goto L3
2716        L2: store2 = ...    #MEM_3 = vuse(MEM_1)
2717        L3: MEM_4 = PHI<MEM_2, MEM_3>
2718      We were called with the PHI at L3, MEM_2 and MEM_3 don't
2719      dominate each other, but still we can easily skip this PHI node
2720      if we recognize that the vuse MEM operand is the same for both,
2721      and that we can skip both statements (they don't clobber us).
2722      This is still linear.  Don't use maybe_skip_until, that might
2723      potentially be slow.  */
2724   else if ((common_vuse = gimple_vuse (def0))
2725 	   && common_vuse == gimple_vuse (def1))
2726     {
2727       bool disambiguate_only = true;
2728       *cnt += 2;
2729       if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2730 	   || (translate
2731 	       && (*translate) (ref, arg0, data, &disambiguate_only) == NULL))
2732 	  && (!stmt_may_clobber_ref_p_1 (def1, ref)
2733 	      || (translate
2734 		  && (*translate) (ref, arg1, data, &disambiguate_only) == NULL)))
2735 	return common_vuse;
2736     }
2737 
2738   return NULL_TREE;
2739 }
2740 
2741 
2742 /* Starting from a PHI node for the virtual operand of the memory reference
2743    REF find a continuation virtual operand that allows to continue walking
2744    statements dominating PHI skipping only statements that cannot possibly
2745    clobber REF.  Increments *CNT for each alias disambiguation done.
2746    Returns NULL_TREE if no suitable virtual operand can be found.  */
2747 
2748 tree
2749 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2750 			  unsigned int *cnt, bitmap *visited,
2751 			  bool abort_on_visited,
2752 			  void *(*translate)(ao_ref *, tree, void *, bool *),
2753 			  void *data)
2754 {
2755   unsigned nargs = gimple_phi_num_args (phi);
2756 
2757   /* Through a single-argument PHI we can simply look through.  */
2758   if (nargs == 1)
2759     return PHI_ARG_DEF (phi, 0);
2760 
2761   /* For two or more arguments try to pairwise skip non-aliasing code
2762      until we hit the phi argument definition that dominates the other one.  */
2763   else if (nargs >= 2)
2764     {
2765       tree arg0, arg1;
2766       unsigned i;
2767 
2768       /* Find a candidate for the virtual operand which definition
2769 	 dominates those of all others.  */
2770       arg0 = PHI_ARG_DEF (phi, 0);
2771       if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2772 	for (i = 1; i < nargs; ++i)
2773 	  {
2774 	    arg1 = PHI_ARG_DEF (phi, i);
2775 	    if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2776 	      {
2777 		arg0 = arg1;
2778 		break;
2779 	      }
2780 	    if (dominated_by_p (CDI_DOMINATORS,
2781 				gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2782 				gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2783 	      arg0 = arg1;
2784 	  }
2785 
2786       /* Then pairwise reduce against the found candidate.  */
2787       for (i = 0; i < nargs; ++i)
2788 	{
2789 	  arg1 = PHI_ARG_DEF (phi, i);
2790 	  arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2791 					     cnt, visited, abort_on_visited,
2792 					     translate, data);
2793 	  if (!arg0)
2794 	    return NULL_TREE;
2795 	}
2796 
2797       return arg0;
2798     }
2799 
2800   return NULL_TREE;
2801 }
2802 
2803 /* Based on the memory reference REF and its virtual use VUSE call
2804    WALKER for each virtual use that is equivalent to VUSE, including VUSE
2805    itself.  That is, for each virtual use for which its defining statement
2806    does not clobber REF.
2807 
2808    WALKER is called with REF, the current virtual use and DATA.  If
2809    WALKER returns non-NULL the walk stops and its result is returned.
2810    At the end of a non-successful walk NULL is returned.
2811 
2812    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2813    use which definition is a statement that may clobber REF and DATA.
2814    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2815    If TRANSLATE returns non-NULL the walk stops and its result is returned.
2816    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2817    to adjust REF and *DATA to make that valid.
2818 
2819    VALUEIZE if non-NULL is called with the next VUSE that is considered
2820    and return value is substituted for that.  This can be used to
2821    implement optimistic value-numbering for example.  Note that the
2822    VUSE argument is assumed to be valueized already.
2823 
2824    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2825 
2826 void *
2827 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2828 			void *(*walker)(ao_ref *, tree, unsigned int, void *),
2829 			void *(*translate)(ao_ref *, tree, void *, bool *),
2830 			tree (*valueize)(tree),
2831 			void *data)
2832 {
2833   bitmap visited = NULL;
2834   void *res;
2835   unsigned int cnt = 0;
2836   bool translated = false;
2837 
2838   timevar_push (TV_ALIAS_STMT_WALK);
2839 
2840   do
2841     {
2842       gimple *def_stmt;
2843 
2844       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2845       res = (*walker) (ref, vuse, cnt, data);
2846       /* Abort walk.  */
2847       if (res == (void *)-1)
2848 	{
2849 	  res = NULL;
2850 	  break;
2851 	}
2852       /* Lookup succeeded.  */
2853       else if (res != NULL)
2854 	break;
2855 
2856       if (valueize)
2857 	vuse = valueize (vuse);
2858       def_stmt = SSA_NAME_DEF_STMT (vuse);
2859       if (gimple_nop_p (def_stmt))
2860 	break;
2861       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2862 	vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2863 					 &visited, translated, translate, data);
2864       else
2865 	{
2866 	  cnt++;
2867 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2868 	    {
2869 	      if (!translate)
2870 		break;
2871 	      bool disambiguate_only = false;
2872 	      res = (*translate) (ref, vuse, data, &disambiguate_only);
2873 	      /* Failed lookup and translation.  */
2874 	      if (res == (void *)-1)
2875 		{
2876 		  res = NULL;
2877 		  break;
2878 		}
2879 	      /* Lookup succeeded.  */
2880 	      else if (res != NULL)
2881 		break;
2882 	      /* Translation succeeded, continue walking.  */
2883 	      translated = translated || !disambiguate_only;
2884 	    }
2885 	  vuse = gimple_vuse (def_stmt);
2886 	}
2887     }
2888   while (vuse);
2889 
2890   if (visited)
2891     BITMAP_FREE (visited);
2892 
2893   timevar_pop (TV_ALIAS_STMT_WALK);
2894 
2895   return res;
2896 }
2897 
2898 
2899 /* Based on the memory reference REF call WALKER for each vdef which
2900    defining statement may clobber REF, starting with VDEF.  If REF
2901    is NULL_TREE, each defining statement is visited.
2902 
2903    WALKER is called with REF, the current vdef and DATA.  If WALKER
2904    returns true the walk is stopped, otherwise it continues.
2905 
2906    If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2907    The pointer may be NULL and then we do not track this information.
2908 
2909    At PHI nodes walk_aliased_vdefs forks into one walk for reach
2910    PHI argument (but only one walk continues on merge points), the
2911    return value is true if any of the walks was successful.
2912 
2913    The function returns the number of statements walked or -1 if
2914    LIMIT stmts were walked and the walk was aborted at this point.
2915    If LIMIT is zero the walk is not aborted.  */
2916 
2917 static int
2918 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2919 		      bool (*walker)(ao_ref *, tree, void *), void *data,
2920 		      bitmap *visited, unsigned int cnt,
2921 		      bool *function_entry_reached, unsigned limit)
2922 {
2923   do
2924     {
2925       gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2926 
2927       if (*visited
2928 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2929 	return cnt;
2930 
2931       if (gimple_nop_p (def_stmt))
2932 	{
2933 	  if (function_entry_reached)
2934 	    *function_entry_reached = true;
2935 	  return cnt;
2936 	}
2937       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2938 	{
2939 	  unsigned i;
2940 	  if (!*visited)
2941 	    *visited = BITMAP_ALLOC (NULL);
2942 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2943 	    {
2944 	      int res = walk_aliased_vdefs_1 (ref,
2945 					      gimple_phi_arg_def (def_stmt, i),
2946 					      walker, data, visited, cnt,
2947 					      function_entry_reached, limit);
2948 	      if (res == -1)
2949 		return -1;
2950 	      cnt = res;
2951 	    }
2952 	  return cnt;
2953 	}
2954 
2955       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2956       cnt++;
2957       if (cnt == limit)
2958 	return -1;
2959       if ((!ref
2960 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2961 	  && (*walker) (ref, vdef, data))
2962 	return cnt;
2963 
2964       vdef = gimple_vuse (def_stmt);
2965     }
2966   while (1);
2967 }
2968 
2969 int
2970 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2971 		    bool (*walker)(ao_ref *, tree, void *), void *data,
2972 		    bitmap *visited,
2973 		    bool *function_entry_reached, unsigned int limit)
2974 {
2975   bitmap local_visited = NULL;
2976   int ret;
2977 
2978   timevar_push (TV_ALIAS_STMT_WALK);
2979 
2980   if (function_entry_reached)
2981     *function_entry_reached = false;
2982 
2983   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2984 			      visited ? visited : &local_visited, 0,
2985 			      function_entry_reached, limit);
2986   if (local_visited)
2987     BITMAP_FREE (local_visited);
2988 
2989   timevar_pop (TV_ALIAS_STMT_WALK);
2990 
2991   return ret;
2992 }
2993 
2994