xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-alias.c (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1 /* Alias analysis for trees.
2    Copyright (C) 2004-2016 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 
36 #include "langhooks.h"
37 #include "dumpfile.h"
38 #include "tree-eh.h"
39 #include "tree-dfa.h"
40 #include "ipa-reference.h"
41 
42 /* Broad overview of how alias analysis on gimple works:
43 
44    Statements clobbering or using memory are linked through the
45    virtual operand factored use-def chain.  The virtual operand
46    is unique per function, its symbol is accessible via gimple_vop (cfun).
47    Virtual operands are used for efficiently walking memory statements
48    in the gimple IL and are useful for things like value-numbering as
49    a generation count for memory references.
50 
51    SSA_NAME pointers may have associated points-to information
52    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
53    points-to information is (re-)computed by the TODO_rebuild_alias
54    pass manager todo.  Points-to information is also used for more
55    precise tracking of call-clobbered and call-used variables and
56    related disambiguations.
57 
58    This file contains functions for disambiguating memory references,
59    the so called alias-oracle and tools for walking of the gimple IL.
60 
61    The main alias-oracle entry-points are
62 
63    bool stmt_may_clobber_ref_p (gimple *, tree)
64 
65      This function queries if a statement may invalidate (parts of)
66      the memory designated by the reference tree argument.
67 
68    bool ref_maybe_used_by_stmt_p (gimple *, tree)
69 
70      This function queries if a statement may need (parts of) the
71      memory designated by the reference tree argument.
72 
73    There are variants of these functions that only handle the call
74    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75    Note that these do not disambiguate against a possible call lhs.
76 
77    bool refs_may_alias_p (tree, tree)
78 
79      This function tries to disambiguate two reference trees.
80 
81    bool ptr_deref_may_alias_global_p (tree)
82 
83      This function queries if dereferencing a pointer variable may
84      alias global memory.
85 
86    More low-level disambiguators are available and documented in
87    this file.  Low-level disambiguators dealing with points-to
88    information are in tree-ssa-structalias.c.  */
89 
90 
91 /* Query statistics for the different low-level disambiguators.
92    A high-level query may trigger multiple of them.  */
93 
94 static struct {
95   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 } alias_stats;
102 
103 void
104 dump_alias_stats (FILE *s)
105 {
106   fprintf (s, "\nAlias oracle query stats:\n");
107   fprintf (s, "  refs_may_alias_p: "
108 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
109 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
110 	   alias_stats.refs_may_alias_p_no_alias,
111 	   alias_stats.refs_may_alias_p_no_alias
112 	   + alias_stats.refs_may_alias_p_may_alias);
113   fprintf (s, "  ref_maybe_used_by_call_p: "
114 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
116 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
117 	   alias_stats.refs_may_alias_p_no_alias
118 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
119   fprintf (s, "  call_may_clobber_ref_p: "
120 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
122 	   alias_stats.call_may_clobber_ref_p_no_alias,
123 	   alias_stats.call_may_clobber_ref_p_no_alias
124 	   + alias_stats.call_may_clobber_ref_p_may_alias);
125   dump_alias_stats_in_alias_c (s);
126 }
127 
128 
129 /* Return true, if dereferencing PTR may alias with a global variable.  */
130 
131 bool
132 ptr_deref_may_alias_global_p (tree ptr)
133 {
134   struct ptr_info_def *pi;
135 
136   /* If we end up with a pointer constant here that may point
137      to global memory.  */
138   if (TREE_CODE (ptr) != SSA_NAME)
139     return true;
140 
141   pi = SSA_NAME_PTR_INFO (ptr);
142 
143   /* If we do not have points-to information for this variable,
144      we have to punt.  */
145   if (!pi)
146     return true;
147 
148   /* ???  This does not use TBAA to prune globals ptr may not access.  */
149   return pt_solution_includes_global (&pi->pt);
150 }
151 
152 /* Return true if dereferencing PTR may alias DECL.
153    The caller is responsible for applying TBAA to see if PTR
154    may access DECL at all.  */
155 
156 static bool
157 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
158 {
159   struct ptr_info_def *pi;
160 
161   /* Conversions are irrelevant for points-to information and
162      data-dependence analysis can feed us those.  */
163   STRIP_NOPS (ptr);
164 
165   /* Anything we do not explicilty handle aliases.  */
166   if ((TREE_CODE (ptr) != SSA_NAME
167        && TREE_CODE (ptr) != ADDR_EXPR
168        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
169       || !POINTER_TYPE_P (TREE_TYPE (ptr))
170       || (TREE_CODE (decl) != VAR_DECL
171 	  && TREE_CODE (decl) != PARM_DECL
172 	  && TREE_CODE (decl) != RESULT_DECL))
173     return true;
174 
175   /* Disregard pointer offsetting.  */
176   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
177     {
178       do
179 	{
180 	  ptr = TREE_OPERAND (ptr, 0);
181 	}
182       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
183       return ptr_deref_may_alias_decl_p (ptr, decl);
184     }
185 
186   /* ADDR_EXPR pointers either just offset another pointer or directly
187      specify the pointed-to set.  */
188   if (TREE_CODE (ptr) == ADDR_EXPR)
189     {
190       tree base = get_base_address (TREE_OPERAND (ptr, 0));
191       if (base
192 	  && (TREE_CODE (base) == MEM_REF
193 	      || TREE_CODE (base) == TARGET_MEM_REF))
194 	ptr = TREE_OPERAND (base, 0);
195       else if (base
196 	       && DECL_P (base))
197 	return compare_base_decls (base, decl) != 0;
198       else if (base
199 	       && CONSTANT_CLASS_P (base))
200 	return false;
201       else
202 	return true;
203     }
204 
205   /* Non-aliased variables can not be pointed to.  */
206   if (!may_be_aliased (decl))
207     return false;
208 
209   /* If we do not have useful points-to information for this pointer
210      we cannot disambiguate anything else.  */
211   pi = SSA_NAME_PTR_INFO (ptr);
212   if (!pi)
213     return true;
214 
215   return pt_solution_includes (&pi->pt, decl);
216 }
217 
218 /* Return true if dereferenced PTR1 and PTR2 may alias.
219    The caller is responsible for applying TBAA to see if accesses
220    through PTR1 and PTR2 may conflict at all.  */
221 
222 bool
223 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
224 {
225   struct ptr_info_def *pi1, *pi2;
226 
227   /* Conversions are irrelevant for points-to information and
228      data-dependence analysis can feed us those.  */
229   STRIP_NOPS (ptr1);
230   STRIP_NOPS (ptr2);
231 
232   /* Disregard pointer offsetting.  */
233   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
234     {
235       do
236 	{
237 	  ptr1 = TREE_OPERAND (ptr1, 0);
238 	}
239       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
240       return ptr_derefs_may_alias_p (ptr1, ptr2);
241     }
242   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
243     {
244       do
245 	{
246 	  ptr2 = TREE_OPERAND (ptr2, 0);
247 	}
248       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
249       return ptr_derefs_may_alias_p (ptr1, ptr2);
250     }
251 
252   /* ADDR_EXPR pointers either just offset another pointer or directly
253      specify the pointed-to set.  */
254   if (TREE_CODE (ptr1) == ADDR_EXPR)
255     {
256       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
257       if (base
258 	  && (TREE_CODE (base) == MEM_REF
259 	      || TREE_CODE (base) == TARGET_MEM_REF))
260 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
261       else if (base
262 	       && DECL_P (base))
263 	return ptr_deref_may_alias_decl_p (ptr2, base);
264       else
265 	return true;
266     }
267   if (TREE_CODE (ptr2) == ADDR_EXPR)
268     {
269       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
270       if (base
271 	  && (TREE_CODE (base) == MEM_REF
272 	      || TREE_CODE (base) == TARGET_MEM_REF))
273 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
274       else if (base
275 	       && DECL_P (base))
276 	return ptr_deref_may_alias_decl_p (ptr1, base);
277       else
278 	return true;
279     }
280 
281   /* From here we require SSA name pointers.  Anything else aliases.  */
282   if (TREE_CODE (ptr1) != SSA_NAME
283       || TREE_CODE (ptr2) != SSA_NAME
284       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
285       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
286     return true;
287 
288   /* We may end up with two empty points-to solutions for two same pointers.
289      In this case we still want to say both pointers alias, so shortcut
290      that here.  */
291   if (ptr1 == ptr2)
292     return true;
293 
294   /* If we do not have useful points-to information for either pointer
295      we cannot disambiguate anything else.  */
296   pi1 = SSA_NAME_PTR_INFO (ptr1);
297   pi2 = SSA_NAME_PTR_INFO (ptr2);
298   if (!pi1 || !pi2)
299     return true;
300 
301   /* ???  This does not use TBAA to prune decls from the intersection
302      that not both pointers may access.  */
303   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
304 }
305 
306 /* Return true if dereferencing PTR may alias *REF.
307    The caller is responsible for applying TBAA to see if PTR
308    may access *REF at all.  */
309 
310 static bool
311 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
312 {
313   tree base = ao_ref_base (ref);
314 
315   if (TREE_CODE (base) == MEM_REF
316       || TREE_CODE (base) == TARGET_MEM_REF)
317     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
318   else if (DECL_P (base))
319     return ptr_deref_may_alias_decl_p (ptr, base);
320 
321   return true;
322 }
323 
324 /* Returns whether reference REF to BASE may refer to global memory.  */
325 
326 static bool
327 ref_may_alias_global_p_1 (tree base)
328 {
329   if (DECL_P (base))
330     return is_global_var (base);
331   else if (TREE_CODE (base) == MEM_REF
332 	   || TREE_CODE (base) == TARGET_MEM_REF)
333     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
334   return true;
335 }
336 
337 bool
338 ref_may_alias_global_p (ao_ref *ref)
339 {
340   tree base = ao_ref_base (ref);
341   return ref_may_alias_global_p_1 (base);
342 }
343 
344 bool
345 ref_may_alias_global_p (tree ref)
346 {
347   tree base = get_base_address (ref);
348   return ref_may_alias_global_p_1 (base);
349 }
350 
351 /* Return true whether STMT may clobber global memory.  */
352 
353 bool
354 stmt_may_clobber_global_p (gimple *stmt)
355 {
356   tree lhs;
357 
358   if (!gimple_vdef (stmt))
359     return false;
360 
361   /* ???  We can ask the oracle whether an artificial pointer
362      dereference with a pointer with points-to information covering
363      all global memory (what about non-address taken memory?) maybe
364      clobbered by this call.  As there is at the moment no convenient
365      way of doing that without generating garbage do some manual
366      checking instead.
367      ???  We could make a NULL ao_ref argument to the various
368      predicates special, meaning any global memory.  */
369 
370   switch (gimple_code (stmt))
371     {
372     case GIMPLE_ASSIGN:
373       lhs = gimple_assign_lhs (stmt);
374       return (TREE_CODE (lhs) != SSA_NAME
375 	      && ref_may_alias_global_p (lhs));
376     case GIMPLE_CALL:
377       return true;
378     default:
379       return true;
380     }
381 }
382 
383 
384 /* Dump alias information on FILE.  */
385 
386 void
387 dump_alias_info (FILE *file)
388 {
389   unsigned i;
390   const char *funcname
391     = lang_hooks.decl_printable_name (current_function_decl, 2);
392   tree var;
393 
394   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
395 
396   fprintf (file, "Aliased symbols\n\n");
397 
398   FOR_EACH_LOCAL_DECL (cfun, i, var)
399     {
400       if (may_be_aliased (var))
401 	dump_variable (file, var);
402     }
403 
404   fprintf (file, "\nCall clobber information\n");
405 
406   fprintf (file, "\nESCAPED");
407   dump_points_to_solution (file, &cfun->gimple_df->escaped);
408 
409   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
410 
411   for (i = 1; i < num_ssa_names; i++)
412     {
413       tree ptr = ssa_name (i);
414       struct ptr_info_def *pi;
415 
416       if (ptr == NULL_TREE
417 	  || !POINTER_TYPE_P (TREE_TYPE (ptr))
418 	  || SSA_NAME_IN_FREE_LIST (ptr))
419 	continue;
420 
421       pi = SSA_NAME_PTR_INFO (ptr);
422       if (pi)
423 	dump_points_to_info_for (file, ptr);
424     }
425 
426   fprintf (file, "\n");
427 }
428 
429 
430 /* Dump alias information on stderr.  */
431 
432 DEBUG_FUNCTION void
433 debug_alias_info (void)
434 {
435   dump_alias_info (stderr);
436 }
437 
438 
439 /* Dump the points-to set *PT into FILE.  */
440 
441 void
442 dump_points_to_solution (FILE *file, struct pt_solution *pt)
443 {
444   if (pt->anything)
445     fprintf (file, ", points-to anything");
446 
447   if (pt->nonlocal)
448     fprintf (file, ", points-to non-local");
449 
450   if (pt->escaped)
451     fprintf (file, ", points-to escaped");
452 
453   if (pt->ipa_escaped)
454     fprintf (file, ", points-to unit escaped");
455 
456   if (pt->null)
457     fprintf (file, ", points-to NULL");
458 
459   if (pt->vars)
460     {
461       fprintf (file, ", points-to vars: ");
462       dump_decl_set (file, pt->vars);
463       if (pt->vars_contains_nonlocal
464 	  && pt->vars_contains_escaped_heap)
465 	fprintf (file, " (nonlocal, escaped heap)");
466       else if (pt->vars_contains_nonlocal
467 	       && pt->vars_contains_escaped)
468 	fprintf (file, " (nonlocal, escaped)");
469       else if (pt->vars_contains_nonlocal)
470 	fprintf (file, " (nonlocal)");
471       else if (pt->vars_contains_escaped_heap)
472 	fprintf (file, " (escaped heap)");
473       else if (pt->vars_contains_escaped)
474 	fprintf (file, " (escaped)");
475     }
476 }
477 
478 
479 /* Unified dump function for pt_solution.  */
480 
481 DEBUG_FUNCTION void
482 debug (pt_solution &ref)
483 {
484   dump_points_to_solution (stderr, &ref);
485 }
486 
487 DEBUG_FUNCTION void
488 debug (pt_solution *ptr)
489 {
490   if (ptr)
491     debug (*ptr);
492   else
493     fprintf (stderr, "<nil>\n");
494 }
495 
496 
497 /* Dump points-to information for SSA_NAME PTR into FILE.  */
498 
499 void
500 dump_points_to_info_for (FILE *file, tree ptr)
501 {
502   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
503 
504   print_generic_expr (file, ptr, dump_flags);
505 
506   if (pi)
507     dump_points_to_solution (file, &pi->pt);
508   else
509     fprintf (file, ", points-to anything");
510 
511   fprintf (file, "\n");
512 }
513 
514 
515 /* Dump points-to information for VAR into stderr.  */
516 
517 DEBUG_FUNCTION void
518 debug_points_to_info_for (tree var)
519 {
520   dump_points_to_info_for (stderr, var);
521 }
522 
523 
524 /* Initializes the alias-oracle reference representation *R from REF.  */
525 
526 void
527 ao_ref_init (ao_ref *r, tree ref)
528 {
529   r->ref = ref;
530   r->base = NULL_TREE;
531   r->offset = 0;
532   r->size = -1;
533   r->max_size = -1;
534   r->ref_alias_set = -1;
535   r->base_alias_set = -1;
536   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
537 }
538 
539 /* Returns the base object of the memory reference *REF.  */
540 
541 tree
542 ao_ref_base (ao_ref *ref)
543 {
544   bool reverse;
545 
546   if (ref->base)
547     return ref->base;
548   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
549 				       &ref->max_size, &reverse);
550   return ref->base;
551 }
552 
553 /* Returns the base object alias set of the memory reference *REF.  */
554 
555 alias_set_type
556 ao_ref_base_alias_set (ao_ref *ref)
557 {
558   tree base_ref;
559   if (ref->base_alias_set != -1)
560     return ref->base_alias_set;
561   if (!ref->ref)
562     return 0;
563   base_ref = ref->ref;
564   while (handled_component_p (base_ref))
565     base_ref = TREE_OPERAND (base_ref, 0);
566   ref->base_alias_set = get_alias_set (base_ref);
567   return ref->base_alias_set;
568 }
569 
570 /* Returns the reference alias set of the memory reference *REF.  */
571 
572 alias_set_type
573 ao_ref_alias_set (ao_ref *ref)
574 {
575   if (ref->ref_alias_set != -1)
576     return ref->ref_alias_set;
577   ref->ref_alias_set = get_alias_set (ref->ref);
578   return ref->ref_alias_set;
579 }
580 
581 /* Init an alias-oracle reference representation from a gimple pointer
582    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
583    size is assumed to be unknown.  The access is assumed to be only
584    to or after of the pointer target, not before it.  */
585 
586 void
587 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
588 {
589   HOST_WIDE_INT t, size_hwi, extra_offset = 0;
590   ref->ref = NULL_TREE;
591   if (TREE_CODE (ptr) == SSA_NAME)
592     {
593       gimple *stmt = SSA_NAME_DEF_STMT (ptr);
594       if (gimple_assign_single_p (stmt)
595 	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
596 	ptr = gimple_assign_rhs1 (stmt);
597       else if (is_gimple_assign (stmt)
598 	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
599 	       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
600 	{
601 	  ptr = gimple_assign_rhs1 (stmt);
602 	  extra_offset = BITS_PER_UNIT
603 			 * int_cst_value (gimple_assign_rhs2 (stmt));
604 	}
605     }
606 
607   if (TREE_CODE (ptr) == ADDR_EXPR)
608     {
609       ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
610       if (ref->base)
611 	ref->offset = BITS_PER_UNIT * t;
612       else
613 	{
614 	  size = NULL_TREE;
615 	  ref->offset = 0;
616 	  ref->base = get_base_address (TREE_OPERAND (ptr, 0));
617 	}
618     }
619   else
620     {
621       ref->base = build2 (MEM_REF, char_type_node,
622 			  ptr, null_pointer_node);
623       ref->offset = 0;
624     }
625   ref->offset += extra_offset;
626   if (size
627       && tree_fits_shwi_p (size)
628       && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
629     ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
630   else
631     ref->max_size = ref->size = -1;
632   ref->ref_alias_set = 0;
633   ref->base_alias_set = 0;
634   ref->volatile_p = false;
635 }
636 
637 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
638    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
639    decide.  */
640 
641 static inline int
642 same_type_for_tbaa (tree type1, tree type2)
643 {
644   type1 = TYPE_MAIN_VARIANT (type1);
645   type2 = TYPE_MAIN_VARIANT (type2);
646 
647   /* If we would have to do structural comparison bail out.  */
648   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
649       || TYPE_STRUCTURAL_EQUALITY_P (type2))
650     return -1;
651 
652   /* Compare the canonical types.  */
653   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
654     return 1;
655 
656   /* ??? Array types are not properly unified in all cases as we have
657      spurious changes in the index types for example.  Removing this
658      causes all sorts of problems with the Fortran frontend.  */
659   if (TREE_CODE (type1) == ARRAY_TYPE
660       && TREE_CODE (type2) == ARRAY_TYPE)
661     return -1;
662 
663   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
664      object of one of its constrained subtypes, e.g. when a function with an
665      unconstrained parameter passed by reference is called on an object and
666      inlined.  But, even in the case of a fixed size, type and subtypes are
667      not equivalent enough as to share the same TYPE_CANONICAL, since this
668      would mean that conversions between them are useless, whereas they are
669      not (e.g. type and subtypes can have different modes).  So, in the end,
670      they are only guaranteed to have the same alias set.  */
671   if (get_alias_set (type1) == get_alias_set (type2))
672     return -1;
673 
674   /* The types are known to be not equal.  */
675   return 0;
676 }
677 
678 /* Determine if the two component references REF1 and REF2 which are
679    based on access types TYPE1 and TYPE2 and of which at least one is based
680    on an indirect reference may alias.  REF2 is the only one that can
681    be a decl in which case REF2_IS_DECL is true.
682    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
683    are the respective alias sets.  */
684 
685 static bool
686 aliasing_component_refs_p (tree ref1,
687 			   alias_set_type ref1_alias_set,
688 			   alias_set_type base1_alias_set,
689 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
690 			   tree ref2,
691 			   alias_set_type ref2_alias_set,
692 			   alias_set_type base2_alias_set,
693 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
694 			   bool ref2_is_decl)
695 {
696   /* If one reference is a component references through pointers try to find a
697      common base and apply offset based disambiguation.  This handles
698      for example
699        struct A { int i; int j; } *q;
700        struct B { struct A a; int k; } *p;
701      disambiguating q->i and p->a.j.  */
702   tree base1, base2;
703   tree type1, type2;
704   tree *refp;
705   int same_p;
706 
707   /* Choose bases and base types to search for.  */
708   base1 = ref1;
709   while (handled_component_p (base1))
710     base1 = TREE_OPERAND (base1, 0);
711   type1 = TREE_TYPE (base1);
712   base2 = ref2;
713   while (handled_component_p (base2))
714     base2 = TREE_OPERAND (base2, 0);
715   type2 = TREE_TYPE (base2);
716 
717   /* Now search for the type1 in the access path of ref2.  This
718      would be a common base for doing offset based disambiguation on.  */
719   refp = &ref2;
720   while (handled_component_p (*refp)
721 	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
722     refp = &TREE_OPERAND (*refp, 0);
723   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
724   /* If we couldn't compare types we have to bail out.  */
725   if (same_p == -1)
726     return true;
727   else if (same_p == 1)
728     {
729       HOST_WIDE_INT offadj, sztmp, msztmp;
730       bool reverse;
731       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
732       offset2 -= offadj;
733       get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
734       offset1 -= offadj;
735       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
736     }
737   /* If we didn't find a common base, try the other way around.  */
738   refp = &ref1;
739   while (handled_component_p (*refp)
740 	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
741     refp = &TREE_OPERAND (*refp, 0);
742   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
743   /* If we couldn't compare types we have to bail out.  */
744   if (same_p == -1)
745     return true;
746   else if (same_p == 1)
747     {
748       HOST_WIDE_INT offadj, sztmp, msztmp;
749       bool reverse;
750       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
751       offset1 -= offadj;
752       get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
753       offset2 -= offadj;
754       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
755     }
756 
757   /* If we have two type access paths B1.path1 and B2.path2 they may
758      only alias if either B1 is in B2.path2 or B2 is in B1.path1.
759      But we can still have a path that goes B1.path1...B2.path2 with
760      a part that we do not see.  So we can only disambiguate now
761      if there is no B2 in the tail of path1 and no B1 on the
762      tail of path2.  */
763   if (base1_alias_set == ref2_alias_set
764       || alias_set_subset_of (base1_alias_set, ref2_alias_set))
765     return true;
766   /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
767   if (!ref2_is_decl)
768     return (base2_alias_set == ref1_alias_set
769 	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
770   return false;
771 }
772 
773 /* Return true if we can determine that component references REF1 and REF2,
774    that are within a common DECL, cannot overlap.  */
775 
776 static bool
777 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
778 {
779   auto_vec<tree, 16> component_refs1;
780   auto_vec<tree, 16> component_refs2;
781 
782   /* Create the stack of handled components for REF1.  */
783   while (handled_component_p (ref1))
784     {
785       component_refs1.safe_push (ref1);
786       ref1 = TREE_OPERAND (ref1, 0);
787     }
788   if (TREE_CODE (ref1) == MEM_REF)
789     {
790       if (!integer_zerop (TREE_OPERAND (ref1, 1)))
791 	goto may_overlap;
792       ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
793     }
794 
795   /* Create the stack of handled components for REF2.  */
796   while (handled_component_p (ref2))
797     {
798       component_refs2.safe_push (ref2);
799       ref2 = TREE_OPERAND (ref2, 0);
800     }
801   if (TREE_CODE (ref2) == MEM_REF)
802     {
803       if (!integer_zerop (TREE_OPERAND (ref2, 1)))
804 	goto may_overlap;
805       ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
806     }
807 
808   /* Bases must be either same or uncomparable.  */
809   gcc_checking_assert (ref1 == ref2
810 		       || (DECL_P (ref1) && DECL_P (ref2)
811 			   && compare_base_decls (ref1, ref2) != 0));
812 
813   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
814      rank.  This is sufficient because we start from the same DECL and you
815      cannot reference several fields at a time with COMPONENT_REFs (unlike
816      with ARRAY_RANGE_REFs for arrays) so you always need the same number
817      of them to access a sub-component, unless you're in a union, in which
818      case the return value will precisely be false.  */
819   while (true)
820     {
821       do
822 	{
823 	  if (component_refs1.is_empty ())
824 	    goto may_overlap;
825 	  ref1 = component_refs1.pop ();
826 	}
827       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
828 
829       do
830 	{
831 	  if (component_refs2.is_empty ())
832 	     goto may_overlap;
833 	  ref2 = component_refs2.pop ();
834 	}
835       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
836 
837       /* Beware of BIT_FIELD_REF.  */
838       if (TREE_CODE (ref1) != COMPONENT_REF
839 	  || TREE_CODE (ref2) != COMPONENT_REF)
840 	goto may_overlap;
841 
842       tree field1 = TREE_OPERAND (ref1, 1);
843       tree field2 = TREE_OPERAND (ref2, 1);
844 
845       /* ??? We cannot simply use the type of operand #0 of the refs here
846 	 as the Fortran compiler smuggles type punning into COMPONENT_REFs
847 	 for common blocks instead of using unions like everyone else.  */
848       tree type1 = DECL_CONTEXT (field1);
849       tree type2 = DECL_CONTEXT (field2);
850 
851       /* We cannot disambiguate fields in a union or qualified union.  */
852       if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
853 	 goto may_overlap;
854 
855       /* Different fields of the same record type cannot overlap.
856 	 ??? Bitfields can overlap at RTL level so punt on them.  */
857       if (field1 != field2)
858 	{
859 	  component_refs1.release ();
860 	  component_refs2.release ();
861 	  return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
862 	}
863     }
864 
865 may_overlap:
866   component_refs1.release ();
867   component_refs2.release ();
868   return false;
869 }
870 
871 /* qsort compare function to sort FIELD_DECLs after their
872    DECL_FIELD_CONTEXT TYPE_UID.  */
873 
874 static inline int
875 ncr_compar (const void *field1_, const void *field2_)
876 {
877   const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
878   const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
879   unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
880   unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
881   if (uid1 < uid2)
882     return -1;
883   else if (uid1 > uid2)
884     return 1;
885   return 0;
886 }
887 
888 /* Return true if we can determine that the fields referenced cannot
889    overlap for any pair of objects.  */
890 
891 static bool
892 nonoverlapping_component_refs_p (const_tree x, const_tree y)
893 {
894   if (!flag_strict_aliasing
895       || !x || !y
896       || TREE_CODE (x) != COMPONENT_REF
897       || TREE_CODE (y) != COMPONENT_REF)
898     return false;
899 
900   auto_vec<const_tree, 16> fieldsx;
901   while (TREE_CODE (x) == COMPONENT_REF)
902     {
903       tree field = TREE_OPERAND (x, 1);
904       tree type = DECL_FIELD_CONTEXT (field);
905       if (TREE_CODE (type) == RECORD_TYPE)
906 	fieldsx.safe_push (field);
907       x = TREE_OPERAND (x, 0);
908     }
909   if (fieldsx.length () == 0)
910     return false;
911   auto_vec<const_tree, 16> fieldsy;
912   while (TREE_CODE (y) == COMPONENT_REF)
913     {
914       tree field = TREE_OPERAND (y, 1);
915       tree type = DECL_FIELD_CONTEXT (field);
916       if (TREE_CODE (type) == RECORD_TYPE)
917 	fieldsy.safe_push (TREE_OPERAND (y, 1));
918       y = TREE_OPERAND (y, 0);
919     }
920   if (fieldsy.length () == 0)
921     return false;
922 
923   /* Most common case first.  */
924   if (fieldsx.length () == 1
925       && fieldsy.length () == 1)
926     return ((DECL_FIELD_CONTEXT (fieldsx[0])
927 	     == DECL_FIELD_CONTEXT (fieldsy[0]))
928 	    && fieldsx[0] != fieldsy[0]
929 	    && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
930 
931   if (fieldsx.length () == 2)
932     {
933       if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
934 	std::swap (fieldsx[0], fieldsx[1]);
935     }
936   else
937     fieldsx.qsort (ncr_compar);
938 
939   if (fieldsy.length () == 2)
940     {
941       if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
942 	std::swap (fieldsy[0], fieldsy[1]);
943     }
944   else
945     fieldsy.qsort (ncr_compar);
946 
947   unsigned i = 0, j = 0;
948   do
949     {
950       const_tree fieldx = fieldsx[i];
951       const_tree fieldy = fieldsy[j];
952       tree typex = DECL_FIELD_CONTEXT (fieldx);
953       tree typey = DECL_FIELD_CONTEXT (fieldy);
954       if (typex == typey)
955 	{
956 	  /* We're left with accessing different fields of a structure,
957 	     no possible overlap, unless they are both bitfields.  */
958 	  if (fieldx != fieldy)
959 	    return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
960 	}
961       if (TYPE_UID (typex) < TYPE_UID (typey))
962 	{
963 	  i++;
964 	  if (i == fieldsx.length ())
965 	    break;
966 	}
967       else
968 	{
969 	  j++;
970 	  if (j == fieldsy.length ())
971 	    break;
972 	}
973     }
974   while (1);
975 
976   return false;
977 }
978 
979 
980 /* Return true if two memory references based on the variables BASE1
981    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
982    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
983    if non-NULL are the complete memory reference trees.  */
984 
985 static bool
986 decl_refs_may_alias_p (tree ref1, tree base1,
987 		       HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
988 		       tree ref2, tree base2,
989 		       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
990 {
991   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
992 
993   /* If both references are based on different variables, they cannot alias.  */
994   if (compare_base_decls (base1, base2) == 0)
995     return false;
996 
997   /* If both references are based on the same variable, they cannot alias if
998      the accesses do not overlap.  */
999   if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1000     return false;
1001 
1002   /* For components with variable position, the above test isn't sufficient,
1003      so we disambiguate component references manually.  */
1004   if (ref1 && ref2
1005       && handled_component_p (ref1) && handled_component_p (ref2)
1006       && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1007     return false;
1008 
1009   return true;
1010 }
1011 
1012 /* Return true if an indirect reference based on *PTR1 constrained
1013    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1014    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
1015    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1016    in which case they are computed on-demand.  REF1 and REF2
1017    if non-NULL are the complete memory reference trees.  */
1018 
1019 static bool
1020 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1021 			       HOST_WIDE_INT offset1,
1022 			       HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1023 			       alias_set_type ref1_alias_set,
1024 			       alias_set_type base1_alias_set,
1025 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
1026 			       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1027 			       alias_set_type ref2_alias_set,
1028 			       alias_set_type base2_alias_set, bool tbaa_p)
1029 {
1030   tree ptr1;
1031   tree ptrtype1, dbase2;
1032   HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1033   HOST_WIDE_INT doffset1, doffset2;
1034 
1035   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1036 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1037 		       && DECL_P (base2));
1038 
1039   ptr1 = TREE_OPERAND (base1, 0);
1040 
1041   /* The offset embedded in MEM_REFs can be negative.  Bias them
1042      so that the resulting offset adjustment is positive.  */
1043   offset_int moff = mem_ref_offset (base1);
1044   moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1045   if (wi::neg_p (moff))
1046     offset2p += (-moff).to_short_addr ();
1047   else
1048     offset1p += moff.to_short_addr ();
1049 
1050   /* If only one reference is based on a variable, they cannot alias if
1051      the pointer access is beyond the extent of the variable access.
1052      (the pointer base cannot validly point to an offset less than zero
1053      of the variable).
1054      ???  IVOPTs creates bases that do not honor this restriction,
1055      so do not apply this optimization for TARGET_MEM_REFs.  */
1056   if (TREE_CODE (base1) != TARGET_MEM_REF
1057       && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1058     return false;
1059   /* They also cannot alias if the pointer may not point to the decl.  */
1060   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1061     return false;
1062 
1063   /* Disambiguations that rely on strict aliasing rules follow.  */
1064   if (!flag_strict_aliasing || !tbaa_p)
1065     return true;
1066 
1067   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1068 
1069   /* If the alias set for a pointer access is zero all bets are off.  */
1070   if (base1_alias_set == 0)
1071     return true;
1072 
1073   /* When we are trying to disambiguate an access with a pointer dereference
1074      as base versus one with a decl as base we can use both the size
1075      of the decl and its dynamic type for extra disambiguation.
1076      ???  We do not know anything about the dynamic type of the decl
1077      other than that its alias-set contains base2_alias_set as a subset
1078      which does not help us here.  */
1079   /* As we know nothing useful about the dynamic type of the decl just
1080      use the usual conflict check rather than a subset test.
1081      ???  We could introduce -fvery-strict-aliasing when the language
1082      does not allow decls to have a dynamic type that differs from their
1083      static type.  Then we can check
1084      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
1085   if (base1_alias_set != base2_alias_set
1086       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1087     return false;
1088   /* If the size of the access relevant for TBAA through the pointer
1089      is bigger than the size of the decl we can't possibly access the
1090      decl via that pointer.  */
1091   if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1092       && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1093       && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1094       /* ???  This in turn may run afoul when a decl of type T which is
1095 	 a member of union type U is accessed through a pointer to
1096 	 type U and sizeof T is smaller than sizeof U.  */
1097       && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1098       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1099       && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1100     return false;
1101 
1102   if (!ref2)
1103     return true;
1104 
1105   /* If the decl is accessed via a MEM_REF, reconstruct the base
1106      we can use for TBAA and an appropriately adjusted offset.  */
1107   dbase2 = ref2;
1108   while (handled_component_p (dbase2))
1109     dbase2 = TREE_OPERAND (dbase2, 0);
1110   doffset1 = offset1;
1111   doffset2 = offset2;
1112   if (TREE_CODE (dbase2) == MEM_REF
1113       || TREE_CODE (dbase2) == TARGET_MEM_REF)
1114     {
1115       offset_int moff = mem_ref_offset (dbase2);
1116       moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1117       if (wi::neg_p (moff))
1118 	doffset1 -= (-moff).to_short_addr ();
1119       else
1120 	doffset2 -= moff.to_short_addr ();
1121     }
1122 
1123   /* If either reference is view-converted, give up now.  */
1124   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1125       || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1126     return true;
1127 
1128   /* If both references are through the same type, they do not alias
1129      if the accesses do not overlap.  This does extra disambiguation
1130      for mixed/pointer accesses but requires strict aliasing.
1131      For MEM_REFs we require that the component-ref offset we computed
1132      is relative to the start of the type which we ensure by
1133      comparing rvalue and access type and disregarding the constant
1134      pointer offset.  */
1135   if ((TREE_CODE (base1) != TARGET_MEM_REF
1136        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1137       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1138     return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1139 
1140   if (ref1 && ref2
1141       && nonoverlapping_component_refs_p (ref1, ref2))
1142     return false;
1143 
1144   /* Do access-path based disambiguation.  */
1145   if (ref1 && ref2
1146       && (handled_component_p (ref1) || handled_component_p (ref2)))
1147     return aliasing_component_refs_p (ref1,
1148 				      ref1_alias_set, base1_alias_set,
1149 				      offset1, max_size1,
1150 				      ref2,
1151 				      ref2_alias_set, base2_alias_set,
1152 				      offset2, max_size2, true);
1153 
1154   return true;
1155 }
1156 
1157 /* Return true if two indirect references based on *PTR1
1158    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1159    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
1160    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1161    in which case they are computed on-demand.  REF1 and REF2
1162    if non-NULL are the complete memory reference trees. */
1163 
1164 static bool
1165 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1166 			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1167 			   alias_set_type ref1_alias_set,
1168 			   alias_set_type base1_alias_set,
1169 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
1170 			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1171 			   alias_set_type ref2_alias_set,
1172 			   alias_set_type base2_alias_set, bool tbaa_p)
1173 {
1174   tree ptr1;
1175   tree ptr2;
1176   tree ptrtype1, ptrtype2;
1177 
1178   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1179 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1180 		       && (TREE_CODE (base2) == MEM_REF
1181 			   || TREE_CODE (base2) == TARGET_MEM_REF));
1182 
1183   ptr1 = TREE_OPERAND (base1, 0);
1184   ptr2 = TREE_OPERAND (base2, 0);
1185 
1186   /* If both bases are based on pointers they cannot alias if they may not
1187      point to the same memory object or if they point to the same object
1188      and the accesses do not overlap.  */
1189   if ((!cfun || gimple_in_ssa_p (cfun))
1190       && operand_equal_p (ptr1, ptr2, 0)
1191       && (((TREE_CODE (base1) != TARGET_MEM_REF
1192 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1193 	   && (TREE_CODE (base2) != TARGET_MEM_REF
1194 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1195 	  || (TREE_CODE (base1) == TARGET_MEM_REF
1196 	      && TREE_CODE (base2) == TARGET_MEM_REF
1197 	      && (TMR_STEP (base1) == TMR_STEP (base2)
1198 		  || (TMR_STEP (base1) && TMR_STEP (base2)
1199 		      && operand_equal_p (TMR_STEP (base1),
1200 					  TMR_STEP (base2), 0)))
1201 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
1202 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
1203 		      && operand_equal_p (TMR_INDEX (base1),
1204 					  TMR_INDEX (base2), 0)))
1205 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1206 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1207 		      && operand_equal_p (TMR_INDEX2 (base1),
1208 					  TMR_INDEX2 (base2), 0))))))
1209     {
1210       offset_int moff;
1211       /* The offset embedded in MEM_REFs can be negative.  Bias them
1212 	 so that the resulting offset adjustment is positive.  */
1213       moff = mem_ref_offset (base1);
1214       moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1215       if (wi::neg_p (moff))
1216 	offset2 += (-moff).to_short_addr ();
1217       else
1218 	offset1 += moff.to_shwi ();
1219       moff = mem_ref_offset (base2);
1220       moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1221       if (wi::neg_p (moff))
1222 	offset1 += (-moff).to_short_addr ();
1223       else
1224 	offset2 += moff.to_short_addr ();
1225       return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1226     }
1227   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1228     return false;
1229 
1230   /* Disambiguations that rely on strict aliasing rules follow.  */
1231   if (!flag_strict_aliasing || !tbaa_p)
1232     return true;
1233 
1234   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1235   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1236 
1237   /* If the alias set for a pointer access is zero all bets are off.  */
1238   if (base1_alias_set == 0
1239       || base2_alias_set == 0)
1240     return true;
1241 
1242   /* If both references are through the same type, they do not alias
1243      if the accesses do not overlap.  This does extra disambiguation
1244      for mixed/pointer accesses but requires strict aliasing.  */
1245   if ((TREE_CODE (base1) != TARGET_MEM_REF
1246        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1247       && (TREE_CODE (base2) != TARGET_MEM_REF
1248 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1249       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1250       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1251       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1252 			     TREE_TYPE (ptrtype2)) == 1)
1253     return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1254 
1255   /* Do type-based disambiguation.  */
1256   if (base1_alias_set != base2_alias_set
1257       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1258     return false;
1259 
1260   /* If either reference is view-converted, give up now.  */
1261   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1262       || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1263     return true;
1264 
1265   if (ref1 && ref2
1266       && nonoverlapping_component_refs_p (ref1, ref2))
1267     return false;
1268 
1269   /* Do access-path based disambiguation.  */
1270   if (ref1 && ref2
1271       && (handled_component_p (ref1) || handled_component_p (ref2)))
1272     return aliasing_component_refs_p (ref1,
1273 				      ref1_alias_set, base1_alias_set,
1274 				      offset1, max_size1,
1275 				      ref2,
1276 				      ref2_alias_set, base2_alias_set,
1277 				      offset2, max_size2, false);
1278 
1279   return true;
1280 }
1281 
1282 /* Return true, if the two memory references REF1 and REF2 may alias.  */
1283 
1284 bool
1285 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1286 {
1287   tree base1, base2;
1288   HOST_WIDE_INT offset1 = 0, offset2 = 0;
1289   HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1290   bool var1_p, var2_p, ind1_p, ind2_p;
1291 
1292   gcc_checking_assert ((!ref1->ref
1293 			|| TREE_CODE (ref1->ref) == SSA_NAME
1294 			|| DECL_P (ref1->ref)
1295 			|| TREE_CODE (ref1->ref) == STRING_CST
1296 			|| handled_component_p (ref1->ref)
1297 			|| TREE_CODE (ref1->ref) == MEM_REF
1298 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1299 		       && (!ref2->ref
1300 			   || TREE_CODE (ref2->ref) == SSA_NAME
1301 			   || DECL_P (ref2->ref)
1302 			   || TREE_CODE (ref2->ref) == STRING_CST
1303 			   || handled_component_p (ref2->ref)
1304 			   || TREE_CODE (ref2->ref) == MEM_REF
1305 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1306 
1307   /* Decompose the references into their base objects and the access.  */
1308   base1 = ao_ref_base (ref1);
1309   offset1 = ref1->offset;
1310   max_size1 = ref1->max_size;
1311   base2 = ao_ref_base (ref2);
1312   offset2 = ref2->offset;
1313   max_size2 = ref2->max_size;
1314 
1315   /* We can end up with registers or constants as bases for example from
1316      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1317      which is seen as a struct copy.  */
1318   if (TREE_CODE (base1) == SSA_NAME
1319       || TREE_CODE (base1) == CONST_DECL
1320       || TREE_CODE (base1) == CONSTRUCTOR
1321       || TREE_CODE (base1) == ADDR_EXPR
1322       || CONSTANT_CLASS_P (base1)
1323       || TREE_CODE (base2) == SSA_NAME
1324       || TREE_CODE (base2) == CONST_DECL
1325       || TREE_CODE (base2) == CONSTRUCTOR
1326       || TREE_CODE (base2) == ADDR_EXPR
1327       || CONSTANT_CLASS_P (base2))
1328     return false;
1329 
1330   /* We can end up referring to code via function and label decls.
1331      As we likely do not properly track code aliases conservatively
1332      bail out.  */
1333   if (TREE_CODE (base1) == FUNCTION_DECL
1334       || TREE_CODE (base1) == LABEL_DECL
1335       || TREE_CODE (base2) == FUNCTION_DECL
1336       || TREE_CODE (base2) == LABEL_DECL)
1337     return true;
1338 
1339   /* Two volatile accesses always conflict.  */
1340   if (ref1->volatile_p
1341       && ref2->volatile_p)
1342     return true;
1343 
1344   /* Defer to simple offset based disambiguation if we have
1345      references based on two decls.  Do this before defering to
1346      TBAA to handle must-alias cases in conformance with the
1347      GCC extension of allowing type-punning through unions.  */
1348   var1_p = DECL_P (base1);
1349   var2_p = DECL_P (base2);
1350   if (var1_p && var2_p)
1351     return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1352 				  ref2->ref, base2, offset2, max_size2);
1353 
1354   /* Handle restrict based accesses.
1355      ???  ao_ref_base strips inner MEM_REF [&decl], recover from that
1356      here.  */
1357   tree rbase1 = base1;
1358   tree rbase2 = base2;
1359   if (var1_p)
1360     {
1361       rbase1 = ref1->ref;
1362       if (rbase1)
1363 	while (handled_component_p (rbase1))
1364 	  rbase1 = TREE_OPERAND (rbase1, 0);
1365     }
1366   if (var2_p)
1367     {
1368       rbase2 = ref2->ref;
1369       if (rbase2)
1370 	while (handled_component_p (rbase2))
1371 	  rbase2 = TREE_OPERAND (rbase2, 0);
1372     }
1373   if (rbase1 && rbase2
1374       && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1375       && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1376       /* If the accesses are in the same restrict clique... */
1377       && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1378       /* But based on different pointers they do not alias.  */
1379       && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1380     return false;
1381 
1382   ind1_p = (TREE_CODE (base1) == MEM_REF
1383 	    || TREE_CODE (base1) == TARGET_MEM_REF);
1384   ind2_p = (TREE_CODE (base2) == MEM_REF
1385 	    || TREE_CODE (base2) == TARGET_MEM_REF);
1386 
1387   /* Canonicalize the pointer-vs-decl case.  */
1388   if (ind1_p && var2_p)
1389     {
1390       std::swap (offset1, offset2);
1391       std::swap (max_size1, max_size2);
1392       std::swap (base1, base2);
1393       std::swap (ref1, ref2);
1394       var1_p = true;
1395       ind1_p = false;
1396       var2_p = false;
1397       ind2_p = true;
1398     }
1399 
1400   /* First defer to TBAA if possible.  */
1401   if (tbaa_p
1402       && flag_strict_aliasing
1403       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1404 				 ao_ref_alias_set (ref2)))
1405     return false;
1406 
1407   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1408   if (var1_p && ind2_p)
1409     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1410 					  offset2, max_size2,
1411 					  ao_ref_alias_set (ref2),
1412 					  ao_ref_base_alias_set (ref2),
1413 					  ref1->ref, base1,
1414 					  offset1, max_size1,
1415 					  ao_ref_alias_set (ref1),
1416 					  ao_ref_base_alias_set (ref1),
1417 					  tbaa_p);
1418   else if (ind1_p && ind2_p)
1419     return indirect_refs_may_alias_p (ref1->ref, base1,
1420 				      offset1, max_size1,
1421 				      ao_ref_alias_set (ref1),
1422 				      ao_ref_base_alias_set (ref1),
1423 				      ref2->ref, base2,
1424 				      offset2, max_size2,
1425 				      ao_ref_alias_set (ref2),
1426 				      ao_ref_base_alias_set (ref2),
1427 				      tbaa_p);
1428 
1429   gcc_unreachable ();
1430 }
1431 
1432 static bool
1433 refs_may_alias_p (tree ref1, ao_ref *ref2)
1434 {
1435   ao_ref r1;
1436   ao_ref_init (&r1, ref1);
1437   return refs_may_alias_p_1 (&r1, ref2, true);
1438 }
1439 
1440 bool
1441 refs_may_alias_p (tree ref1, tree ref2)
1442 {
1443   ao_ref r1, r2;
1444   bool res;
1445   ao_ref_init (&r1, ref1);
1446   ao_ref_init (&r2, ref2);
1447   res = refs_may_alias_p_1 (&r1, &r2, true);
1448   if (res)
1449     ++alias_stats.refs_may_alias_p_may_alias;
1450   else
1451     ++alias_stats.refs_may_alias_p_no_alias;
1452   return res;
1453 }
1454 
1455 /* Returns true if there is a anti-dependence for the STORE that
1456    executes after the LOAD.  */
1457 
1458 bool
1459 refs_anti_dependent_p (tree load, tree store)
1460 {
1461   ao_ref r1, r2;
1462   ao_ref_init (&r1, load);
1463   ao_ref_init (&r2, store);
1464   return refs_may_alias_p_1 (&r1, &r2, false);
1465 }
1466 
1467 /* Returns true if there is a output dependence for the stores
1468    STORE1 and STORE2.  */
1469 
1470 bool
1471 refs_output_dependent_p (tree store1, tree store2)
1472 {
1473   ao_ref r1, r2;
1474   ao_ref_init (&r1, store1);
1475   ao_ref_init (&r2, store2);
1476   return refs_may_alias_p_1 (&r1, &r2, false);
1477 }
1478 
1479 /* If the call CALL may use the memory reference REF return true,
1480    otherwise return false.  */
1481 
1482 static bool
1483 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1484 {
1485   tree base, callee;
1486   unsigned i;
1487   int flags = gimple_call_flags (call);
1488 
1489   /* Const functions without a static chain do not implicitly use memory.  */
1490   if (!gimple_call_chain (call)
1491       && (flags & (ECF_CONST|ECF_NOVOPS)))
1492     goto process_args;
1493 
1494   base = ao_ref_base (ref);
1495   if (!base)
1496     return true;
1497 
1498   /* A call that is not without side-effects might involve volatile
1499      accesses and thus conflicts with all other volatile accesses.  */
1500   if (ref->volatile_p)
1501     return true;
1502 
1503   /* If the reference is based on a decl that is not aliased the call
1504      cannot possibly use it.  */
1505   if (DECL_P (base)
1506       && !may_be_aliased (base)
1507       /* But local statics can be used through recursion.  */
1508       && !is_global_var (base))
1509     goto process_args;
1510 
1511   callee = gimple_call_fndecl (call);
1512 
1513   /* Handle those builtin functions explicitly that do not act as
1514      escape points.  See tree-ssa-structalias.c:find_func_aliases
1515      for the list of builtins we might need to handle here.  */
1516   if (callee != NULL_TREE
1517       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1518     switch (DECL_FUNCTION_CODE (callee))
1519       {
1520 	/* All the following functions read memory pointed to by
1521 	   their second argument.  strcat/strncat additionally
1522 	   reads memory pointed to by the first argument.  */
1523 	case BUILT_IN_STRCAT:
1524 	case BUILT_IN_STRNCAT:
1525 	  {
1526 	    ao_ref dref;
1527 	    ao_ref_init_from_ptr_and_size (&dref,
1528 					   gimple_call_arg (call, 0),
1529 					   NULL_TREE);
1530 	    if (refs_may_alias_p_1 (&dref, ref, false))
1531 	      return true;
1532 	  }
1533 	  /* FALLTHRU */
1534 	case BUILT_IN_STRCPY:
1535 	case BUILT_IN_STRNCPY:
1536 	case BUILT_IN_MEMCPY:
1537 	case BUILT_IN_MEMMOVE:
1538 	case BUILT_IN_MEMPCPY:
1539 	case BUILT_IN_STPCPY:
1540 	case BUILT_IN_STPNCPY:
1541 	case BUILT_IN_TM_MEMCPY:
1542 	case BUILT_IN_TM_MEMMOVE:
1543 	  {
1544 	    ao_ref dref;
1545 	    tree size = NULL_TREE;
1546 	    if (gimple_call_num_args (call) == 3)
1547 	      size = gimple_call_arg (call, 2);
1548 	    ao_ref_init_from_ptr_and_size (&dref,
1549 					   gimple_call_arg (call, 1),
1550 					   size);
1551 	    return refs_may_alias_p_1 (&dref, ref, false);
1552 	  }
1553 	case BUILT_IN_STRCAT_CHK:
1554 	case BUILT_IN_STRNCAT_CHK:
1555 	  {
1556 	    ao_ref dref;
1557 	    ao_ref_init_from_ptr_and_size (&dref,
1558 					   gimple_call_arg (call, 0),
1559 					   NULL_TREE);
1560 	    if (refs_may_alias_p_1 (&dref, ref, false))
1561 	      return true;
1562 	  }
1563 	  /* FALLTHRU */
1564 	case BUILT_IN_STRCPY_CHK:
1565 	case BUILT_IN_STRNCPY_CHK:
1566 	case BUILT_IN_MEMCPY_CHK:
1567 	case BUILT_IN_MEMMOVE_CHK:
1568 	case BUILT_IN_MEMPCPY_CHK:
1569 	case BUILT_IN_STPCPY_CHK:
1570 	case BUILT_IN_STPNCPY_CHK:
1571 	  {
1572 	    ao_ref dref;
1573 	    tree size = NULL_TREE;
1574 	    if (gimple_call_num_args (call) == 4)
1575 	      size = gimple_call_arg (call, 2);
1576 	    ao_ref_init_from_ptr_and_size (&dref,
1577 					   gimple_call_arg (call, 1),
1578 					   size);
1579 	    return refs_may_alias_p_1 (&dref, ref, false);
1580 	  }
1581 	case BUILT_IN_BCOPY:
1582 	  {
1583 	    ao_ref dref;
1584 	    tree size = gimple_call_arg (call, 2);
1585 	    ao_ref_init_from_ptr_and_size (&dref,
1586 					   gimple_call_arg (call, 0),
1587 					   size);
1588 	    return refs_may_alias_p_1 (&dref, ref, false);
1589 	  }
1590 
1591 	/* The following functions read memory pointed to by their
1592 	   first argument.  */
1593 	CASE_BUILT_IN_TM_LOAD (1):
1594 	CASE_BUILT_IN_TM_LOAD (2):
1595 	CASE_BUILT_IN_TM_LOAD (4):
1596 	CASE_BUILT_IN_TM_LOAD (8):
1597 	CASE_BUILT_IN_TM_LOAD (FLOAT):
1598 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1599 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1600 	CASE_BUILT_IN_TM_LOAD (M64):
1601 	CASE_BUILT_IN_TM_LOAD (M128):
1602 	CASE_BUILT_IN_TM_LOAD (M256):
1603 	case BUILT_IN_TM_LOG:
1604 	case BUILT_IN_TM_LOG_1:
1605 	case BUILT_IN_TM_LOG_2:
1606 	case BUILT_IN_TM_LOG_4:
1607 	case BUILT_IN_TM_LOG_8:
1608 	case BUILT_IN_TM_LOG_FLOAT:
1609 	case BUILT_IN_TM_LOG_DOUBLE:
1610 	case BUILT_IN_TM_LOG_LDOUBLE:
1611 	case BUILT_IN_TM_LOG_M64:
1612 	case BUILT_IN_TM_LOG_M128:
1613 	case BUILT_IN_TM_LOG_M256:
1614 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1615 
1616 	/* These read memory pointed to by the first argument.  */
1617 	case BUILT_IN_STRDUP:
1618 	case BUILT_IN_STRNDUP:
1619 	case BUILT_IN_REALLOC:
1620 	  {
1621 	    ao_ref dref;
1622 	    tree size = NULL_TREE;
1623 	    if (gimple_call_num_args (call) == 2)
1624 	      size = gimple_call_arg (call, 1);
1625 	    ao_ref_init_from_ptr_and_size (&dref,
1626 					   gimple_call_arg (call, 0),
1627 					   size);
1628 	    return refs_may_alias_p_1 (&dref, ref, false);
1629 	  }
1630 	/* These read memory pointed to by the first argument.  */
1631 	case BUILT_IN_INDEX:
1632 	case BUILT_IN_STRCHR:
1633 	case BUILT_IN_STRRCHR:
1634 	  {
1635 	    ao_ref dref;
1636 	    ao_ref_init_from_ptr_and_size (&dref,
1637 					   gimple_call_arg (call, 0),
1638 					   NULL_TREE);
1639 	    return refs_may_alias_p_1 (&dref, ref, false);
1640 	  }
1641 	/* These read memory pointed to by the first argument with size
1642 	   in the third argument.  */
1643 	case BUILT_IN_MEMCHR:
1644 	  {
1645 	    ao_ref dref;
1646 	    ao_ref_init_from_ptr_and_size (&dref,
1647 					   gimple_call_arg (call, 0),
1648 					   gimple_call_arg (call, 2));
1649 	    return refs_may_alias_p_1 (&dref, ref, false);
1650 	  }
1651 	/* These read memory pointed to by the first and second arguments.  */
1652 	case BUILT_IN_STRSTR:
1653 	case BUILT_IN_STRPBRK:
1654 	  {
1655 	    ao_ref dref;
1656 	    ao_ref_init_from_ptr_and_size (&dref,
1657 					   gimple_call_arg (call, 0),
1658 					   NULL_TREE);
1659 	    if (refs_may_alias_p_1 (&dref, ref, false))
1660 	      return true;
1661 	    ao_ref_init_from_ptr_and_size (&dref,
1662 					   gimple_call_arg (call, 1),
1663 					   NULL_TREE);
1664 	    return refs_may_alias_p_1 (&dref, ref, false);
1665 	  }
1666 
1667 	/* The following builtins do not read from memory.  */
1668 	case BUILT_IN_FREE:
1669 	case BUILT_IN_MALLOC:
1670 	case BUILT_IN_POSIX_MEMALIGN:
1671 	case BUILT_IN_ALIGNED_ALLOC:
1672 	case BUILT_IN_CALLOC:
1673 	case BUILT_IN_ALLOCA:
1674 	case BUILT_IN_ALLOCA_WITH_ALIGN:
1675 	case BUILT_IN_STACK_SAVE:
1676 	case BUILT_IN_STACK_RESTORE:
1677 	case BUILT_IN_MEMSET:
1678 	case BUILT_IN_TM_MEMSET:
1679 	case BUILT_IN_MEMSET_CHK:
1680 	case BUILT_IN_FREXP:
1681 	case BUILT_IN_FREXPF:
1682 	case BUILT_IN_FREXPL:
1683 	case BUILT_IN_GAMMA_R:
1684 	case BUILT_IN_GAMMAF_R:
1685 	case BUILT_IN_GAMMAL_R:
1686 	case BUILT_IN_LGAMMA_R:
1687 	case BUILT_IN_LGAMMAF_R:
1688 	case BUILT_IN_LGAMMAL_R:
1689 	case BUILT_IN_MODF:
1690 	case BUILT_IN_MODFF:
1691 	case BUILT_IN_MODFL:
1692 	case BUILT_IN_REMQUO:
1693 	case BUILT_IN_REMQUOF:
1694 	case BUILT_IN_REMQUOL:
1695 	case BUILT_IN_SINCOS:
1696 	case BUILT_IN_SINCOSF:
1697 	case BUILT_IN_SINCOSL:
1698 	case BUILT_IN_ASSUME_ALIGNED:
1699 	case BUILT_IN_VA_END:
1700 	  return false;
1701 	/* __sync_* builtins and some OpenMP builtins act as threading
1702 	   barriers.  */
1703 #undef DEF_SYNC_BUILTIN
1704 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1705 #include "sync-builtins.def"
1706 #undef DEF_SYNC_BUILTIN
1707 	case BUILT_IN_GOMP_ATOMIC_START:
1708 	case BUILT_IN_GOMP_ATOMIC_END:
1709 	case BUILT_IN_GOMP_BARRIER:
1710 	case BUILT_IN_GOMP_BARRIER_CANCEL:
1711 	case BUILT_IN_GOMP_TASKWAIT:
1712 	case BUILT_IN_GOMP_TASKGROUP_END:
1713 	case BUILT_IN_GOMP_CRITICAL_START:
1714 	case BUILT_IN_GOMP_CRITICAL_END:
1715 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1716 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1717 	case BUILT_IN_GOMP_LOOP_END:
1718 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
1719 	case BUILT_IN_GOMP_ORDERED_START:
1720 	case BUILT_IN_GOMP_ORDERED_END:
1721 	case BUILT_IN_GOMP_SECTIONS_END:
1722 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1723 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1724 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1725 	  return true;
1726 
1727 	default:
1728 	  /* Fallthru to general call handling.  */;
1729       }
1730 
1731   /* Check if base is a global static variable that is not read
1732      by the function.  */
1733   if (callee != NULL_TREE
1734       && TREE_CODE (base) == VAR_DECL
1735       && TREE_STATIC (base))
1736     {
1737       struct cgraph_node *node = cgraph_node::get (callee);
1738       bitmap not_read;
1739 
1740       /* FIXME: Callee can be an OMP builtin that does not have a call graph
1741 	 node yet.  We should enforce that there are nodes for all decls in the
1742 	 IL and remove this check instead.  */
1743       if (node
1744 	  && (not_read = ipa_reference_get_not_read_global (node))
1745 	  && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1746 	goto process_args;
1747     }
1748 
1749   /* Check if the base variable is call-used.  */
1750   if (DECL_P (base))
1751     {
1752       if (pt_solution_includes (gimple_call_use_set (call), base))
1753 	return true;
1754     }
1755   else if ((TREE_CODE (base) == MEM_REF
1756 	    || TREE_CODE (base) == TARGET_MEM_REF)
1757 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1758     {
1759       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1760       if (!pi)
1761 	return true;
1762 
1763       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1764 	return true;
1765     }
1766   else
1767     return true;
1768 
1769   /* Inspect call arguments for passed-by-value aliases.  */
1770 process_args:
1771   for (i = 0; i < gimple_call_num_args (call); ++i)
1772     {
1773       tree op = gimple_call_arg (call, i);
1774       int flags = gimple_call_arg_flags (call, i);
1775 
1776       if (flags & EAF_UNUSED)
1777 	continue;
1778 
1779       if (TREE_CODE (op) == WITH_SIZE_EXPR)
1780 	op = TREE_OPERAND (op, 0);
1781 
1782       if (TREE_CODE (op) != SSA_NAME
1783 	  && !is_gimple_min_invariant (op))
1784 	{
1785 	  ao_ref r;
1786 	  ao_ref_init (&r, op);
1787 	  if (refs_may_alias_p_1 (&r, ref, true))
1788 	    return true;
1789 	}
1790     }
1791 
1792   return false;
1793 }
1794 
1795 static bool
1796 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1797 {
1798   bool res;
1799   res = ref_maybe_used_by_call_p_1 (call, ref);
1800   if (res)
1801     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1802   else
1803     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1804   return res;
1805 }
1806 
1807 
1808 /* If the statement STMT may use the memory reference REF return
1809    true, otherwise return false.  */
1810 
1811 bool
1812 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
1813 {
1814   if (is_gimple_assign (stmt))
1815     {
1816       tree rhs;
1817 
1818       /* All memory assign statements are single.  */
1819       if (!gimple_assign_single_p (stmt))
1820 	return false;
1821 
1822       rhs = gimple_assign_rhs1 (stmt);
1823       if (is_gimple_reg (rhs)
1824 	  || is_gimple_min_invariant (rhs)
1825 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1826 	return false;
1827 
1828       return refs_may_alias_p (rhs, ref);
1829     }
1830   else if (is_gimple_call (stmt))
1831     return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1832   else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1833     {
1834       tree retval = gimple_return_retval (return_stmt);
1835       if (retval
1836 	  && TREE_CODE (retval) != SSA_NAME
1837 	  && !is_gimple_min_invariant (retval)
1838 	  && refs_may_alias_p (retval, ref))
1839 	return true;
1840       /* If ref escapes the function then the return acts as a use.  */
1841       tree base = ao_ref_base (ref);
1842       if (!base)
1843 	;
1844       else if (DECL_P (base))
1845 	return is_global_var (base);
1846       else if (TREE_CODE (base) == MEM_REF
1847 	       || TREE_CODE (base) == TARGET_MEM_REF)
1848 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1849       return false;
1850     }
1851 
1852   return true;
1853 }
1854 
1855 bool
1856 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
1857 {
1858   ao_ref r;
1859   ao_ref_init (&r, ref);
1860   return ref_maybe_used_by_stmt_p (stmt, &r);
1861 }
1862 
1863 /* If the call in statement CALL may clobber the memory reference REF
1864    return true, otherwise return false.  */
1865 
1866 bool
1867 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1868 {
1869   tree base;
1870   tree callee;
1871 
1872   /* If the call is pure or const it cannot clobber anything.  */
1873   if (gimple_call_flags (call)
1874       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1875     return false;
1876   if (gimple_call_internal_p (call))
1877     switch (gimple_call_internal_fn (call))
1878       {
1879 	/* Treat these internal calls like ECF_PURE for aliasing,
1880 	   they don't write to any memory the program should care about.
1881 	   They have important other side-effects, and read memory,
1882 	   so can't be ECF_NOVOPS.  */
1883       case IFN_UBSAN_NULL:
1884       case IFN_UBSAN_BOUNDS:
1885       case IFN_UBSAN_VPTR:
1886       case IFN_UBSAN_OBJECT_SIZE:
1887       case IFN_ASAN_CHECK:
1888 	return false;
1889       default:
1890 	break;
1891       }
1892 
1893   base = ao_ref_base (ref);
1894   if (!base)
1895     return true;
1896 
1897   if (TREE_CODE (base) == SSA_NAME
1898       || CONSTANT_CLASS_P (base))
1899     return false;
1900 
1901   /* A call that is not without side-effects might involve volatile
1902      accesses and thus conflicts with all other volatile accesses.  */
1903   if (ref->volatile_p)
1904     return true;
1905 
1906   /* If the reference is based on a decl that is not aliased the call
1907      cannot possibly clobber it.  */
1908   if (DECL_P (base)
1909       && !may_be_aliased (base)
1910       /* But local non-readonly statics can be modified through recursion
1911          or the call may implement a threading barrier which we must
1912 	 treat as may-def.  */
1913       && (TREE_READONLY (base)
1914 	  || !is_global_var (base)))
1915     return false;
1916 
1917   callee = gimple_call_fndecl (call);
1918 
1919   /* Handle those builtin functions explicitly that do not act as
1920      escape points.  See tree-ssa-structalias.c:find_func_aliases
1921      for the list of builtins we might need to handle here.  */
1922   if (callee != NULL_TREE
1923       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1924     switch (DECL_FUNCTION_CODE (callee))
1925       {
1926 	/* All the following functions clobber memory pointed to by
1927 	   their first argument.  */
1928 	case BUILT_IN_STRCPY:
1929 	case BUILT_IN_STRNCPY:
1930 	case BUILT_IN_MEMCPY:
1931 	case BUILT_IN_MEMMOVE:
1932 	case BUILT_IN_MEMPCPY:
1933 	case BUILT_IN_STPCPY:
1934 	case BUILT_IN_STPNCPY:
1935 	case BUILT_IN_STRCAT:
1936 	case BUILT_IN_STRNCAT:
1937 	case BUILT_IN_MEMSET:
1938 	case BUILT_IN_TM_MEMSET:
1939 	CASE_BUILT_IN_TM_STORE (1):
1940 	CASE_BUILT_IN_TM_STORE (2):
1941 	CASE_BUILT_IN_TM_STORE (4):
1942 	CASE_BUILT_IN_TM_STORE (8):
1943 	CASE_BUILT_IN_TM_STORE (FLOAT):
1944 	CASE_BUILT_IN_TM_STORE (DOUBLE):
1945 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
1946 	CASE_BUILT_IN_TM_STORE (M64):
1947 	CASE_BUILT_IN_TM_STORE (M128):
1948 	CASE_BUILT_IN_TM_STORE (M256):
1949 	case BUILT_IN_TM_MEMCPY:
1950 	case BUILT_IN_TM_MEMMOVE:
1951 	  {
1952 	    ao_ref dref;
1953 	    tree size = NULL_TREE;
1954 	    /* Don't pass in size for strncat, as the maximum size
1955 	       is strlen (dest) + n + 1 instead of n, resp.
1956 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1957 	       known.  */
1958 	    if (gimple_call_num_args (call) == 3
1959 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1960 	      size = gimple_call_arg (call, 2);
1961 	    ao_ref_init_from_ptr_and_size (&dref,
1962 					   gimple_call_arg (call, 0),
1963 					   size);
1964 	    return refs_may_alias_p_1 (&dref, ref, false);
1965 	  }
1966 	case BUILT_IN_STRCPY_CHK:
1967 	case BUILT_IN_STRNCPY_CHK:
1968 	case BUILT_IN_MEMCPY_CHK:
1969 	case BUILT_IN_MEMMOVE_CHK:
1970 	case BUILT_IN_MEMPCPY_CHK:
1971 	case BUILT_IN_STPCPY_CHK:
1972 	case BUILT_IN_STPNCPY_CHK:
1973 	case BUILT_IN_STRCAT_CHK:
1974 	case BUILT_IN_STRNCAT_CHK:
1975 	case BUILT_IN_MEMSET_CHK:
1976 	  {
1977 	    ao_ref dref;
1978 	    tree size = NULL_TREE;
1979 	    /* Don't pass in size for __strncat_chk, as the maximum size
1980 	       is strlen (dest) + n + 1 instead of n, resp.
1981 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
1982 	       known.  */
1983 	    if (gimple_call_num_args (call) == 4
1984 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1985 	      size = gimple_call_arg (call, 2);
1986 	    ao_ref_init_from_ptr_and_size (&dref,
1987 					   gimple_call_arg (call, 0),
1988 					   size);
1989 	    return refs_may_alias_p_1 (&dref, ref, false);
1990 	  }
1991 	case BUILT_IN_BCOPY:
1992 	  {
1993 	    ao_ref dref;
1994 	    tree size = gimple_call_arg (call, 2);
1995 	    ao_ref_init_from_ptr_and_size (&dref,
1996 					   gimple_call_arg (call, 1),
1997 					   size);
1998 	    return refs_may_alias_p_1 (&dref, ref, false);
1999 	  }
2000 	/* Allocating memory does not have any side-effects apart from
2001 	   being the definition point for the pointer.  */
2002 	case BUILT_IN_MALLOC:
2003 	case BUILT_IN_ALIGNED_ALLOC:
2004 	case BUILT_IN_CALLOC:
2005 	case BUILT_IN_STRDUP:
2006 	case BUILT_IN_STRNDUP:
2007 	  /* Unix98 specifies that errno is set on allocation failure.  */
2008 	  if (flag_errno_math
2009 	      && targetm.ref_may_alias_errno (ref))
2010 	    return true;
2011 	  return false;
2012 	case BUILT_IN_STACK_SAVE:
2013 	case BUILT_IN_ALLOCA:
2014 	case BUILT_IN_ALLOCA_WITH_ALIGN:
2015 	case BUILT_IN_ASSUME_ALIGNED:
2016 	  return false;
2017 	/* But posix_memalign stores a pointer into the memory pointed to
2018 	   by its first argument.  */
2019 	case BUILT_IN_POSIX_MEMALIGN:
2020 	  {
2021 	    tree ptrptr = gimple_call_arg (call, 0);
2022 	    ao_ref dref;
2023 	    ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2024 					   TYPE_SIZE_UNIT (ptr_type_node));
2025 	    return (refs_may_alias_p_1 (&dref, ref, false)
2026 		    || (flag_errno_math
2027 			&& targetm.ref_may_alias_errno (ref)));
2028 	  }
2029 	/* Freeing memory kills the pointed-to memory.  More importantly
2030 	   the call has to serve as a barrier for moving loads and stores
2031 	   across it.  */
2032 	case BUILT_IN_FREE:
2033 	case BUILT_IN_VA_END:
2034 	  {
2035 	    tree ptr = gimple_call_arg (call, 0);
2036 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2037 	  }
2038 	/* Realloc serves both as allocation point and deallocation point.  */
2039 	case BUILT_IN_REALLOC:
2040 	  {
2041 	    tree ptr = gimple_call_arg (call, 0);
2042 	    /* Unix98 specifies that errno is set on allocation failure.  */
2043 	    return ((flag_errno_math
2044 		     && targetm.ref_may_alias_errno (ref))
2045 		    || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2046 	  }
2047 	case BUILT_IN_GAMMA_R:
2048 	case BUILT_IN_GAMMAF_R:
2049 	case BUILT_IN_GAMMAL_R:
2050 	case BUILT_IN_LGAMMA_R:
2051 	case BUILT_IN_LGAMMAF_R:
2052 	case BUILT_IN_LGAMMAL_R:
2053 	  {
2054 	    tree out = gimple_call_arg (call, 1);
2055 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2056 	      return true;
2057 	    if (flag_errno_math)
2058 	      break;
2059 	    return false;
2060 	  }
2061 	case BUILT_IN_FREXP:
2062 	case BUILT_IN_FREXPF:
2063 	case BUILT_IN_FREXPL:
2064 	case BUILT_IN_MODF:
2065 	case BUILT_IN_MODFF:
2066 	case BUILT_IN_MODFL:
2067 	  {
2068 	    tree out = gimple_call_arg (call, 1);
2069 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
2070 	  }
2071 	case BUILT_IN_REMQUO:
2072 	case BUILT_IN_REMQUOF:
2073 	case BUILT_IN_REMQUOL:
2074 	  {
2075 	    tree out = gimple_call_arg (call, 2);
2076 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2077 	      return true;
2078 	    if (flag_errno_math)
2079 	      break;
2080 	    return false;
2081 	  }
2082 	case BUILT_IN_SINCOS:
2083 	case BUILT_IN_SINCOSF:
2084 	case BUILT_IN_SINCOSL:
2085 	  {
2086 	    tree sin = gimple_call_arg (call, 1);
2087 	    tree cos = gimple_call_arg (call, 2);
2088 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2089 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
2090 	  }
2091 	/* __sync_* builtins and some OpenMP builtins act as threading
2092 	   barriers.  */
2093 #undef DEF_SYNC_BUILTIN
2094 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2095 #include "sync-builtins.def"
2096 #undef DEF_SYNC_BUILTIN
2097 	case BUILT_IN_GOMP_ATOMIC_START:
2098 	case BUILT_IN_GOMP_ATOMIC_END:
2099 	case BUILT_IN_GOMP_BARRIER:
2100 	case BUILT_IN_GOMP_BARRIER_CANCEL:
2101 	case BUILT_IN_GOMP_TASKWAIT:
2102 	case BUILT_IN_GOMP_TASKGROUP_END:
2103 	case BUILT_IN_GOMP_CRITICAL_START:
2104 	case BUILT_IN_GOMP_CRITICAL_END:
2105 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
2106 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
2107 	case BUILT_IN_GOMP_LOOP_END:
2108 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
2109 	case BUILT_IN_GOMP_ORDERED_START:
2110 	case BUILT_IN_GOMP_ORDERED_END:
2111 	case BUILT_IN_GOMP_SECTIONS_END:
2112 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2113 	case BUILT_IN_GOMP_SINGLE_COPY_START:
2114 	case BUILT_IN_GOMP_SINGLE_COPY_END:
2115 	  return true;
2116 	default:
2117 	  /* Fallthru to general call handling.  */;
2118       }
2119 
2120   /* Check if base is a global static variable that is not written
2121      by the function.  */
2122   if (callee != NULL_TREE
2123       && TREE_CODE (base) == VAR_DECL
2124       && TREE_STATIC (base))
2125     {
2126       struct cgraph_node *node = cgraph_node::get (callee);
2127       bitmap not_written;
2128 
2129       if (node
2130 	  && (not_written = ipa_reference_get_not_written_global (node))
2131 	  && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2132 	return false;
2133     }
2134 
2135   /* Check if the base variable is call-clobbered.  */
2136   if (DECL_P (base))
2137     return pt_solution_includes (gimple_call_clobber_set (call), base);
2138   else if ((TREE_CODE (base) == MEM_REF
2139 	    || TREE_CODE (base) == TARGET_MEM_REF)
2140 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2141     {
2142       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2143       if (!pi)
2144 	return true;
2145 
2146       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2147     }
2148 
2149   return true;
2150 }
2151 
2152 /* If the call in statement CALL may clobber the memory reference REF
2153    return true, otherwise return false.  */
2154 
2155 bool
2156 call_may_clobber_ref_p (gcall *call, tree ref)
2157 {
2158   bool res;
2159   ao_ref r;
2160   ao_ref_init (&r, ref);
2161   res = call_may_clobber_ref_p_1 (call, &r);
2162   if (res)
2163     ++alias_stats.call_may_clobber_ref_p_may_alias;
2164   else
2165     ++alias_stats.call_may_clobber_ref_p_no_alias;
2166   return res;
2167 }
2168 
2169 
2170 /* If the statement STMT may clobber the memory reference REF return true,
2171    otherwise return false.  */
2172 
2173 bool
2174 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
2175 {
2176   if (is_gimple_call (stmt))
2177     {
2178       tree lhs = gimple_call_lhs (stmt);
2179       if (lhs
2180 	  && TREE_CODE (lhs) != SSA_NAME)
2181 	{
2182 	  ao_ref r;
2183 	  ao_ref_init (&r, lhs);
2184 	  if (refs_may_alias_p_1 (ref, &r, true))
2185 	    return true;
2186 	}
2187 
2188       return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2189     }
2190   else if (gimple_assign_single_p (stmt))
2191     {
2192       tree lhs = gimple_assign_lhs (stmt);
2193       if (TREE_CODE (lhs) != SSA_NAME)
2194 	{
2195 	  ao_ref r;
2196 	  ao_ref_init (&r, lhs);
2197 	  return refs_may_alias_p_1 (ref, &r, true);
2198 	}
2199     }
2200   else if (gimple_code (stmt) == GIMPLE_ASM)
2201     return true;
2202 
2203   return false;
2204 }
2205 
2206 bool
2207 stmt_may_clobber_ref_p (gimple *stmt, tree ref)
2208 {
2209   ao_ref r;
2210   ao_ref_init (&r, ref);
2211   return stmt_may_clobber_ref_p_1 (stmt, &r);
2212 }
2213 
2214 /* If STMT kills the memory reference REF return true, otherwise
2215    return false.  */
2216 
2217 bool
2218 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2219 {
2220   if (!ao_ref_base (ref))
2221     return false;
2222 
2223   if (gimple_has_lhs (stmt)
2224       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2225       /* The assignment is not necessarily carried out if it can throw
2226 	 and we can catch it in the current function where we could inspect
2227 	 the previous value.
2228 	 ???  We only need to care about the RHS throwing.  For aggregate
2229 	 assignments or similar calls and non-call exceptions the LHS
2230 	 might throw as well.  */
2231       && !stmt_can_throw_internal (stmt))
2232     {
2233       tree lhs = gimple_get_lhs (stmt);
2234       /* If LHS is literally a base of the access we are done.  */
2235       if (ref->ref)
2236 	{
2237 	  tree base = ref->ref;
2238 	  tree innermost_dropped_array_ref = NULL_TREE;
2239 	  if (handled_component_p (base))
2240 	    {
2241 	      tree saved_lhs0 = NULL_TREE;
2242 	      if (handled_component_p (lhs))
2243 		{
2244 		  saved_lhs0 = TREE_OPERAND (lhs, 0);
2245 		  TREE_OPERAND (lhs, 0) = integer_zero_node;
2246 		}
2247 	      do
2248 		{
2249 		  /* Just compare the outermost handled component, if
2250 		     they are equal we have found a possible common
2251 		     base.  */
2252 		  tree saved_base0 = TREE_OPERAND (base, 0);
2253 		  TREE_OPERAND (base, 0) = integer_zero_node;
2254 		  bool res = operand_equal_p (lhs, base, 0);
2255 		  TREE_OPERAND (base, 0) = saved_base0;
2256 		  if (res)
2257 		    break;
2258 		  /* Remember if we drop an array-ref that we need to
2259 		     double-check not being at struct end.  */
2260 		  if (TREE_CODE (base) == ARRAY_REF
2261 		      || TREE_CODE (base) == ARRAY_RANGE_REF)
2262 		    innermost_dropped_array_ref = base;
2263 		  /* Otherwise drop handled components of the access.  */
2264 		  base = saved_base0;
2265 		}
2266 	      while (handled_component_p (base));
2267 	      if (saved_lhs0)
2268 		TREE_OPERAND (lhs, 0) = saved_lhs0;
2269 	    }
2270 	  /* Finally check if the lhs has the same address and size as the
2271 	     base candidate of the access.  Watch out if we have dropped
2272 	     an array-ref that was at struct end, this means ref->ref may
2273 	     be outside of the TYPE_SIZE of its base.  */
2274 	  if ((! innermost_dropped_array_ref
2275 	       || ! array_at_struct_end_p (innermost_dropped_array_ref))
2276 	      && (lhs == base
2277 		  || (((TYPE_SIZE (TREE_TYPE (lhs))
2278 			== TYPE_SIZE (TREE_TYPE (base)))
2279 		       || (TYPE_SIZE (TREE_TYPE (lhs))
2280 			   && TYPE_SIZE (TREE_TYPE (base))
2281 			   && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2282 					       TYPE_SIZE (TREE_TYPE (base)),
2283 					       0)))
2284 		      && operand_equal_p (lhs, base,
2285 					  OEP_ADDRESS_OF
2286 					  | OEP_MATCH_SIDE_EFFECTS))))
2287 	    return true;
2288 	}
2289 
2290       /* Now look for non-literal equal bases with the restriction of
2291          handling constant offset and size.  */
2292       /* For a must-alias check we need to be able to constrain
2293 	 the access properly.  */
2294       if (ref->max_size == -1)
2295 	return false;
2296       HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2297       bool reverse;
2298       tree base
2299 	= get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
2300       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2301 	 so base == ref->base does not always hold.  */
2302       if (base != ref->base)
2303 	{
2304 	  /* If both base and ref->base are MEM_REFs, only compare the
2305 	     first operand, and if the second operand isn't equal constant,
2306 	     try to add the offsets into offset and ref_offset.  */
2307 	  if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2308 	      && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2309 	    {
2310 	      if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2311 				       TREE_OPERAND (ref->base, 1)))
2312 		{
2313 		  offset_int off1 = mem_ref_offset (base);
2314 		  off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2315 		  off1 += offset;
2316 		  offset_int off2 = mem_ref_offset (ref->base);
2317 		  off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2318 		  off2 += ref_offset;
2319 		  if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2320 		    {
2321 		      offset = off1.to_shwi ();
2322 		      ref_offset = off2.to_shwi ();
2323 		    }
2324 		  else
2325 		    size = -1;
2326 		}
2327 	    }
2328 	  else
2329 	    size = -1;
2330 	}
2331       /* For a must-alias check we need to be able to constrain
2332 	 the access properly.  */
2333       if (size != -1 && size == max_size)
2334 	{
2335 	  if (offset <= ref_offset
2336 	      && offset + size >= ref_offset + ref->max_size)
2337 	    return true;
2338 	}
2339     }
2340 
2341   if (is_gimple_call (stmt))
2342     {
2343       tree callee = gimple_call_fndecl (stmt);
2344       if (callee != NULL_TREE
2345 	  && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2346 	switch (DECL_FUNCTION_CODE (callee))
2347 	  {
2348 	  case BUILT_IN_FREE:
2349 	    {
2350 	      tree ptr = gimple_call_arg (stmt, 0);
2351 	      tree base = ao_ref_base (ref);
2352 	      if (base && TREE_CODE (base) == MEM_REF
2353 		  && TREE_OPERAND (base, 0) == ptr)
2354 		return true;
2355 	      break;
2356 	    }
2357 
2358 	  case BUILT_IN_MEMCPY:
2359 	  case BUILT_IN_MEMPCPY:
2360 	  case BUILT_IN_MEMMOVE:
2361 	  case BUILT_IN_MEMSET:
2362 	  case BUILT_IN_MEMCPY_CHK:
2363 	  case BUILT_IN_MEMPCPY_CHK:
2364 	  case BUILT_IN_MEMMOVE_CHK:
2365 	  case BUILT_IN_MEMSET_CHK:
2366 	    {
2367 	      /* For a must-alias check we need to be able to constrain
2368 		 the access properly.  */
2369 	      if (ref->max_size == -1)
2370 		return false;
2371 	      tree dest = gimple_call_arg (stmt, 0);
2372 	      tree len = gimple_call_arg (stmt, 2);
2373 	      if (!tree_fits_shwi_p (len))
2374 		return false;
2375 	      tree rbase = ref->base;
2376 	      offset_int roffset = ref->offset;
2377 	      ao_ref dref;
2378 	      ao_ref_init_from_ptr_and_size (&dref, dest, len);
2379 	      tree base = ao_ref_base (&dref);
2380 	      offset_int offset = dref.offset;
2381 	      if (!base || dref.size == -1)
2382 		return false;
2383 	      if (TREE_CODE (base) == MEM_REF)
2384 		{
2385 		  if (TREE_CODE (rbase) != MEM_REF)
2386 		    return false;
2387 		  // Compare pointers.
2388 		  offset += wi::lshift (mem_ref_offset (base),
2389 					LOG2_BITS_PER_UNIT);
2390 		  roffset += wi::lshift (mem_ref_offset (rbase),
2391 					 LOG2_BITS_PER_UNIT);
2392 		  base = TREE_OPERAND (base, 0);
2393 		  rbase = TREE_OPERAND (rbase, 0);
2394 		}
2395 	      if (base == rbase
2396 		  && wi::les_p (offset, roffset)
2397 		  && wi::les_p (roffset + ref->max_size,
2398 				offset + wi::lshift (wi::to_offset (len),
2399 						     LOG2_BITS_PER_UNIT)))
2400 		return true;
2401 	      break;
2402 	    }
2403 
2404 	  case BUILT_IN_VA_END:
2405 	    {
2406 	      tree ptr = gimple_call_arg (stmt, 0);
2407 	      if (TREE_CODE (ptr) == ADDR_EXPR)
2408 		{
2409 		  tree base = ao_ref_base (ref);
2410 		  if (TREE_OPERAND (ptr, 0) == base)
2411 		    return true;
2412 		}
2413 	      break;
2414 	    }
2415 
2416 	  default:;
2417 	  }
2418     }
2419   return false;
2420 }
2421 
2422 bool
2423 stmt_kills_ref_p (gimple *stmt, tree ref)
2424 {
2425   ao_ref r;
2426   ao_ref_init (&r, ref);
2427   return stmt_kills_ref_p (stmt, &r);
2428 }
2429 
2430 
2431 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2432    TARGET or a statement clobbering the memory reference REF in which
2433    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
2434 
2435 static bool
2436 maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
2437 		  tree vuse, unsigned int *cnt, bitmap *visited,
2438 		  bool abort_on_visited,
2439 		  void *(*translate)(ao_ref *, tree, void *, bool *),
2440 		  void *data)
2441 {
2442   basic_block bb = gimple_bb (phi);
2443 
2444   if (!*visited)
2445     *visited = BITMAP_ALLOC (NULL);
2446 
2447   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2448 
2449   /* Walk until we hit the target.  */
2450   while (vuse != target)
2451     {
2452       gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2453       /* Recurse for PHI nodes.  */
2454       if (gimple_code (def_stmt) == GIMPLE_PHI)
2455 	{
2456 	  /* An already visited PHI node ends the walk successfully.  */
2457 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2458 	    return !abort_on_visited;
2459 	  vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2460 					   visited, abort_on_visited,
2461 					   translate, data);
2462 	  if (!vuse)
2463 	    return false;
2464 	  continue;
2465 	}
2466       else if (gimple_nop_p (def_stmt))
2467 	return false;
2468       else
2469 	{
2470 	  /* A clobbering statement or the end of the IL ends it failing.  */
2471 	  ++*cnt;
2472 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2473 	    {
2474 	      bool disambiguate_only = true;
2475 	      if (translate
2476 		  && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2477 		;
2478 	      else
2479 		return false;
2480 	    }
2481 	}
2482       /* If we reach a new basic-block see if we already skipped it
2483          in a previous walk that ended successfully.  */
2484       if (gimple_bb (def_stmt) != bb)
2485 	{
2486 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2487 	    return !abort_on_visited;
2488 	  bb = gimple_bb (def_stmt);
2489 	}
2490       vuse = gimple_vuse (def_stmt);
2491     }
2492   return true;
2493 }
2494 
2495 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2496    until we hit the phi argument definition that dominates the other one.
2497    Return that, or NULL_TREE if there is no such definition.  */
2498 
2499 static tree
2500 get_continuation_for_phi_1 (gimple *phi, tree arg0, tree arg1,
2501 			    ao_ref *ref, unsigned int *cnt,
2502 			    bitmap *visited, bool abort_on_visited,
2503 			    void *(*translate)(ao_ref *, tree, void *, bool *),
2504 			    void *data)
2505 {
2506   gimple *def0 = SSA_NAME_DEF_STMT (arg0);
2507   gimple *def1 = SSA_NAME_DEF_STMT (arg1);
2508   tree common_vuse;
2509 
2510   if (arg0 == arg1)
2511     return arg0;
2512   else if (gimple_nop_p (def0)
2513 	   || (!gimple_nop_p (def1)
2514 	       && dominated_by_p (CDI_DOMINATORS,
2515 				  gimple_bb (def1), gimple_bb (def0))))
2516     {
2517       if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2518 			    visited, abort_on_visited, translate, data))
2519 	return arg0;
2520     }
2521   else if (gimple_nop_p (def1)
2522 	   || dominated_by_p (CDI_DOMINATORS,
2523 			      gimple_bb (def0), gimple_bb (def1)))
2524     {
2525       if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2526 			    visited, abort_on_visited, translate, data))
2527 	return arg1;
2528     }
2529   /* Special case of a diamond:
2530        MEM_1 = ...
2531        goto (cond) ? L1 : L2
2532        L1: store1 = ...    #MEM_2 = vuse(MEM_1)
2533 	   goto L3
2534        L2: store2 = ...    #MEM_3 = vuse(MEM_1)
2535        L3: MEM_4 = PHI<MEM_2, MEM_3>
2536      We were called with the PHI at L3, MEM_2 and MEM_3 don't
2537      dominate each other, but still we can easily skip this PHI node
2538      if we recognize that the vuse MEM operand is the same for both,
2539      and that we can skip both statements (they don't clobber us).
2540      This is still linear.  Don't use maybe_skip_until, that might
2541      potentially be slow.  */
2542   else if ((common_vuse = gimple_vuse (def0))
2543 	   && common_vuse == gimple_vuse (def1))
2544     {
2545       bool disambiguate_only = true;
2546       *cnt += 2;
2547       if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2548 	   || (translate
2549 	       && (*translate) (ref, arg0, data, &disambiguate_only) == NULL))
2550 	  && (!stmt_may_clobber_ref_p_1 (def1, ref)
2551 	      || (translate
2552 		  && (*translate) (ref, arg1, data, &disambiguate_only) == NULL)))
2553 	return common_vuse;
2554     }
2555 
2556   return NULL_TREE;
2557 }
2558 
2559 
2560 /* Starting from a PHI node for the virtual operand of the memory reference
2561    REF find a continuation virtual operand that allows to continue walking
2562    statements dominating PHI skipping only statements that cannot possibly
2563    clobber REF.  Increments *CNT for each alias disambiguation done.
2564    Returns NULL_TREE if no suitable virtual operand can be found.  */
2565 
2566 tree
2567 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2568 			  unsigned int *cnt, bitmap *visited,
2569 			  bool abort_on_visited,
2570 			  void *(*translate)(ao_ref *, tree, void *, bool *),
2571 			  void *data)
2572 {
2573   unsigned nargs = gimple_phi_num_args (phi);
2574 
2575   /* Through a single-argument PHI we can simply look through.  */
2576   if (nargs == 1)
2577     return PHI_ARG_DEF (phi, 0);
2578 
2579   /* For two or more arguments try to pairwise skip non-aliasing code
2580      until we hit the phi argument definition that dominates the other one.  */
2581   else if (nargs >= 2)
2582     {
2583       tree arg0, arg1;
2584       unsigned i;
2585 
2586       /* Find a candidate for the virtual operand which definition
2587 	 dominates those of all others.  */
2588       arg0 = PHI_ARG_DEF (phi, 0);
2589       if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2590 	for (i = 1; i < nargs; ++i)
2591 	  {
2592 	    arg1 = PHI_ARG_DEF (phi, i);
2593 	    if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2594 	      {
2595 		arg0 = arg1;
2596 		break;
2597 	      }
2598 	    if (dominated_by_p (CDI_DOMINATORS,
2599 				gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2600 				gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2601 	      arg0 = arg1;
2602 	  }
2603 
2604       /* Then pairwise reduce against the found candidate.  */
2605       for (i = 0; i < nargs; ++i)
2606 	{
2607 	  arg1 = PHI_ARG_DEF (phi, i);
2608 	  arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2609 					     cnt, visited, abort_on_visited,
2610 					     translate, data);
2611 	  if (!arg0)
2612 	    return NULL_TREE;
2613 	}
2614 
2615       return arg0;
2616     }
2617 
2618   return NULL_TREE;
2619 }
2620 
2621 /* Based on the memory reference REF and its virtual use VUSE call
2622    WALKER for each virtual use that is equivalent to VUSE, including VUSE
2623    itself.  That is, for each virtual use for which its defining statement
2624    does not clobber REF.
2625 
2626    WALKER is called with REF, the current virtual use and DATA.  If
2627    WALKER returns non-NULL the walk stops and its result is returned.
2628    At the end of a non-successful walk NULL is returned.
2629 
2630    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2631    use which definition is a statement that may clobber REF and DATA.
2632    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2633    If TRANSLATE returns non-NULL the walk stops and its result is returned.
2634    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2635    to adjust REF and *DATA to make that valid.
2636 
2637    VALUEIZE if non-NULL is called with the next VUSE that is considered
2638    and return value is substituted for that.  This can be used to
2639    implement optimistic value-numbering for example.  Note that the
2640    VUSE argument is assumed to be valueized already.
2641 
2642    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2643 
2644 void *
2645 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2646 			void *(*walker)(ao_ref *, tree, unsigned int, void *),
2647 			void *(*translate)(ao_ref *, tree, void *, bool *),
2648 			tree (*valueize)(tree),
2649 			void *data)
2650 {
2651   bitmap visited = NULL;
2652   void *res;
2653   unsigned int cnt = 0;
2654   bool translated = false;
2655 
2656   timevar_push (TV_ALIAS_STMT_WALK);
2657 
2658   do
2659     {
2660       gimple *def_stmt;
2661 
2662       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2663       res = (*walker) (ref, vuse, cnt, data);
2664       /* Abort walk.  */
2665       if (res == (void *)-1)
2666 	{
2667 	  res = NULL;
2668 	  break;
2669 	}
2670       /* Lookup succeeded.  */
2671       else if (res != NULL)
2672 	break;
2673 
2674       if (valueize)
2675 	vuse = valueize (vuse);
2676       def_stmt = SSA_NAME_DEF_STMT (vuse);
2677       if (gimple_nop_p (def_stmt))
2678 	break;
2679       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2680 	vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2681 					 &visited, translated, translate, data);
2682       else
2683 	{
2684 	  cnt++;
2685 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2686 	    {
2687 	      if (!translate)
2688 		break;
2689 	      bool disambiguate_only = false;
2690 	      res = (*translate) (ref, vuse, data, &disambiguate_only);
2691 	      /* Failed lookup and translation.  */
2692 	      if (res == (void *)-1)
2693 		{
2694 		  res = NULL;
2695 		  break;
2696 		}
2697 	      /* Lookup succeeded.  */
2698 	      else if (res != NULL)
2699 		break;
2700 	      /* Translation succeeded, continue walking.  */
2701 	      translated = translated || !disambiguate_only;
2702 	    }
2703 	  vuse = gimple_vuse (def_stmt);
2704 	}
2705     }
2706   while (vuse);
2707 
2708   if (visited)
2709     BITMAP_FREE (visited);
2710 
2711   timevar_pop (TV_ALIAS_STMT_WALK);
2712 
2713   return res;
2714 }
2715 
2716 
2717 /* Based on the memory reference REF call WALKER for each vdef which
2718    defining statement may clobber REF, starting with VDEF.  If REF
2719    is NULL_TREE, each defining statement is visited.
2720 
2721    WALKER is called with REF, the current vdef and DATA.  If WALKER
2722    returns true the walk is stopped, otherwise it continues.
2723 
2724    If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2725    The pointer may be NULL and then we do not track this information.
2726 
2727    At PHI nodes walk_aliased_vdefs forks into one walk for reach
2728    PHI argument (but only one walk continues on merge points), the
2729    return value is true if any of the walks was successful.
2730 
2731    The function returns the number of statements walked.  */
2732 
2733 static unsigned int
2734 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2735 		      bool (*walker)(ao_ref *, tree, void *), void *data,
2736 		      bitmap *visited, unsigned int cnt,
2737 		      bool *function_entry_reached)
2738 {
2739   do
2740     {
2741       gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2742 
2743       if (*visited
2744 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2745 	return cnt;
2746 
2747       if (gimple_nop_p (def_stmt))
2748 	{
2749 	  if (function_entry_reached)
2750 	    *function_entry_reached = true;
2751 	  return cnt;
2752 	}
2753       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2754 	{
2755 	  unsigned i;
2756 	  if (!*visited)
2757 	    *visited = BITMAP_ALLOC (NULL);
2758 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2759 	    cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2760 					 walker, data, visited, 0,
2761 					 function_entry_reached);
2762 	  return cnt;
2763 	}
2764 
2765       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2766       cnt++;
2767       if ((!ref
2768 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2769 	  && (*walker) (ref, vdef, data))
2770 	return cnt;
2771 
2772       vdef = gimple_vuse (def_stmt);
2773     }
2774   while (1);
2775 }
2776 
2777 unsigned int
2778 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2779 		    bool (*walker)(ao_ref *, tree, void *), void *data,
2780 		    bitmap *visited,
2781 		    bool *function_entry_reached)
2782 {
2783   bitmap local_visited = NULL;
2784   unsigned int ret;
2785 
2786   timevar_push (TV_ALIAS_STMT_WALK);
2787 
2788   if (function_entry_reached)
2789     *function_entry_reached = false;
2790 
2791   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2792 			      visited ? visited : &local_visited, 0,
2793 			      function_entry_reached);
2794   if (local_visited)
2795     BITMAP_FREE (local_visited);
2796 
2797   timevar_pop (TV_ALIAS_STMT_WALK);
2798 
2799   return ret;
2800 }
2801 
2802