xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-ssa-alias.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* Alias analysis for trees.
2*38fd1498Szrj    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3*38fd1498Szrj    Contributed by Diego Novillo <dnovillo@redhat.com>
4*38fd1498Szrj 
5*38fd1498Szrj This file is part of GCC.
6*38fd1498Szrj 
7*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
8*38fd1498Szrj it under the terms of the GNU General Public License as published by
9*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
10*38fd1498Szrj any later version.
11*38fd1498Szrj 
12*38fd1498Szrj GCC is distributed in the hope that it will be useful,
13*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
14*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15*38fd1498Szrj GNU General Public License for more details.
16*38fd1498Szrj 
17*38fd1498Szrj You should have received a copy of the GNU General Public License
18*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
19*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
20*38fd1498Szrj 
21*38fd1498Szrj #include "config.h"
22*38fd1498Szrj #include "system.h"
23*38fd1498Szrj #include "coretypes.h"
24*38fd1498Szrj #include "backend.h"
25*38fd1498Szrj #include "target.h"
26*38fd1498Szrj #include "rtl.h"
27*38fd1498Szrj #include "tree.h"
28*38fd1498Szrj #include "gimple.h"
29*38fd1498Szrj #include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
30*38fd1498Szrj #include "ssa.h"
31*38fd1498Szrj #include "cgraph.h"
32*38fd1498Szrj #include "tree-pretty-print.h"
33*38fd1498Szrj #include "alias.h"
34*38fd1498Szrj #include "fold-const.h"
35*38fd1498Szrj #include "langhooks.h"
36*38fd1498Szrj #include "dumpfile.h"
37*38fd1498Szrj #include "tree-eh.h"
38*38fd1498Szrj #include "tree-dfa.h"
39*38fd1498Szrj #include "ipa-reference.h"
40*38fd1498Szrj #include "varasm.h"
41*38fd1498Szrj 
42*38fd1498Szrj /* Broad overview of how alias analysis on gimple works:
43*38fd1498Szrj 
44*38fd1498Szrj    Statements clobbering or using memory are linked through the
45*38fd1498Szrj    virtual operand factored use-def chain.  The virtual operand
46*38fd1498Szrj    is unique per function, its symbol is accessible via gimple_vop (cfun).
47*38fd1498Szrj    Virtual operands are used for efficiently walking memory statements
48*38fd1498Szrj    in the gimple IL and are useful for things like value-numbering as
49*38fd1498Szrj    a generation count for memory references.
50*38fd1498Szrj 
51*38fd1498Szrj    SSA_NAME pointers may have associated points-to information
52*38fd1498Szrj    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
53*38fd1498Szrj    points-to information is (re-)computed by the TODO_rebuild_alias
54*38fd1498Szrj    pass manager todo.  Points-to information is also used for more
55*38fd1498Szrj    precise tracking of call-clobbered and call-used variables and
56*38fd1498Szrj    related disambiguations.
57*38fd1498Szrj 
58*38fd1498Szrj    This file contains functions for disambiguating memory references,
59*38fd1498Szrj    the so called alias-oracle and tools for walking of the gimple IL.
60*38fd1498Szrj 
61*38fd1498Szrj    The main alias-oracle entry-points are
62*38fd1498Szrj 
63*38fd1498Szrj    bool stmt_may_clobber_ref_p (gimple *, tree)
64*38fd1498Szrj 
65*38fd1498Szrj      This function queries if a statement may invalidate (parts of)
66*38fd1498Szrj      the memory designated by the reference tree argument.
67*38fd1498Szrj 
68*38fd1498Szrj    bool ref_maybe_used_by_stmt_p (gimple *, tree)
69*38fd1498Szrj 
70*38fd1498Szrj      This function queries if a statement may need (parts of) the
71*38fd1498Szrj      memory designated by the reference tree argument.
72*38fd1498Szrj 
73*38fd1498Szrj    There are variants of these functions that only handle the call
74*38fd1498Szrj    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75*38fd1498Szrj    Note that these do not disambiguate against a possible call lhs.
76*38fd1498Szrj 
77*38fd1498Szrj    bool refs_may_alias_p (tree, tree)
78*38fd1498Szrj 
79*38fd1498Szrj      This function tries to disambiguate two reference trees.
80*38fd1498Szrj 
81*38fd1498Szrj    bool ptr_deref_may_alias_global_p (tree)
82*38fd1498Szrj 
83*38fd1498Szrj      This function queries if dereferencing a pointer variable may
84*38fd1498Szrj      alias global memory.
85*38fd1498Szrj 
86*38fd1498Szrj    More low-level disambiguators are available and documented in
87*38fd1498Szrj    this file.  Low-level disambiguators dealing with points-to
88*38fd1498Szrj    information are in tree-ssa-structalias.c.  */
89*38fd1498Szrj 
90*38fd1498Szrj 
91*38fd1498Szrj /* Query statistics for the different low-level disambiguators.
92*38fd1498Szrj    A high-level query may trigger multiple of them.  */
93*38fd1498Szrj 
94*38fd1498Szrj static struct {
95*38fd1498Szrj   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96*38fd1498Szrj   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97*38fd1498Szrj   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98*38fd1498Szrj   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99*38fd1498Szrj   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100*38fd1498Szrj   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101*38fd1498Szrj } alias_stats;
102*38fd1498Szrj 
103*38fd1498Szrj void
104*38fd1498Szrj dump_alias_stats (FILE *s)
105*38fd1498Szrj {
106*38fd1498Szrj   fprintf (s, "\nAlias oracle query stats:\n");
107*38fd1498Szrj   fprintf (s, "  refs_may_alias_p: "
108*38fd1498Szrj 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
109*38fd1498Szrj 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
110*38fd1498Szrj 	   alias_stats.refs_may_alias_p_no_alias,
111*38fd1498Szrj 	   alias_stats.refs_may_alias_p_no_alias
112*38fd1498Szrj 	   + alias_stats.refs_may_alias_p_may_alias);
113*38fd1498Szrj   fprintf (s, "  ref_maybe_used_by_call_p: "
114*38fd1498Szrj 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115*38fd1498Szrj 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
116*38fd1498Szrj 	   alias_stats.ref_maybe_used_by_call_p_no_alias,
117*38fd1498Szrj 	   alias_stats.refs_may_alias_p_no_alias
118*38fd1498Szrj 	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
119*38fd1498Szrj   fprintf (s, "  call_may_clobber_ref_p: "
120*38fd1498Szrj 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121*38fd1498Szrj 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
122*38fd1498Szrj 	   alias_stats.call_may_clobber_ref_p_no_alias,
123*38fd1498Szrj 	   alias_stats.call_may_clobber_ref_p_no_alias
124*38fd1498Szrj 	   + alias_stats.call_may_clobber_ref_p_may_alias);
125*38fd1498Szrj   dump_alias_stats_in_alias_c (s);
126*38fd1498Szrj }
127*38fd1498Szrj 
128*38fd1498Szrj 
129*38fd1498Szrj /* Return true, if dereferencing PTR may alias with a global variable.  */
130*38fd1498Szrj 
131*38fd1498Szrj bool
132*38fd1498Szrj ptr_deref_may_alias_global_p (tree ptr)
133*38fd1498Szrj {
134*38fd1498Szrj   struct ptr_info_def *pi;
135*38fd1498Szrj 
136*38fd1498Szrj   /* If we end up with a pointer constant here that may point
137*38fd1498Szrj      to global memory.  */
138*38fd1498Szrj   if (TREE_CODE (ptr) != SSA_NAME)
139*38fd1498Szrj     return true;
140*38fd1498Szrj 
141*38fd1498Szrj   pi = SSA_NAME_PTR_INFO (ptr);
142*38fd1498Szrj 
143*38fd1498Szrj   /* If we do not have points-to information for this variable,
144*38fd1498Szrj      we have to punt.  */
145*38fd1498Szrj   if (!pi)
146*38fd1498Szrj     return true;
147*38fd1498Szrj 
148*38fd1498Szrj   /* ???  This does not use TBAA to prune globals ptr may not access.  */
149*38fd1498Szrj   return pt_solution_includes_global (&pi->pt);
150*38fd1498Szrj }
151*38fd1498Szrj 
152*38fd1498Szrj /* Return true if dereferencing PTR may alias DECL.
153*38fd1498Szrj    The caller is responsible for applying TBAA to see if PTR
154*38fd1498Szrj    may access DECL at all.  */
155*38fd1498Szrj 
156*38fd1498Szrj static bool
157*38fd1498Szrj ptr_deref_may_alias_decl_p (tree ptr, tree decl)
158*38fd1498Szrj {
159*38fd1498Szrj   struct ptr_info_def *pi;
160*38fd1498Szrj 
161*38fd1498Szrj   /* Conversions are irrelevant for points-to information and
162*38fd1498Szrj      data-dependence analysis can feed us those.  */
163*38fd1498Szrj   STRIP_NOPS (ptr);
164*38fd1498Szrj 
165*38fd1498Szrj   /* Anything we do not explicilty handle aliases.  */
166*38fd1498Szrj   if ((TREE_CODE (ptr) != SSA_NAME
167*38fd1498Szrj        && TREE_CODE (ptr) != ADDR_EXPR
168*38fd1498Szrj        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
169*38fd1498Szrj       || !POINTER_TYPE_P (TREE_TYPE (ptr))
170*38fd1498Szrj       || (!VAR_P (decl)
171*38fd1498Szrj 	  && TREE_CODE (decl) != PARM_DECL
172*38fd1498Szrj 	  && TREE_CODE (decl) != RESULT_DECL))
173*38fd1498Szrj     return true;
174*38fd1498Szrj 
175*38fd1498Szrj   /* Disregard pointer offsetting.  */
176*38fd1498Szrj   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
177*38fd1498Szrj     {
178*38fd1498Szrj       do
179*38fd1498Szrj 	{
180*38fd1498Szrj 	  ptr = TREE_OPERAND (ptr, 0);
181*38fd1498Szrj 	}
182*38fd1498Szrj       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
183*38fd1498Szrj       return ptr_deref_may_alias_decl_p (ptr, decl);
184*38fd1498Szrj     }
185*38fd1498Szrj 
186*38fd1498Szrj   /* ADDR_EXPR pointers either just offset another pointer or directly
187*38fd1498Szrj      specify the pointed-to set.  */
188*38fd1498Szrj   if (TREE_CODE (ptr) == ADDR_EXPR)
189*38fd1498Szrj     {
190*38fd1498Szrj       tree base = get_base_address (TREE_OPERAND (ptr, 0));
191*38fd1498Szrj       if (base
192*38fd1498Szrj 	  && (TREE_CODE (base) == MEM_REF
193*38fd1498Szrj 	      || TREE_CODE (base) == TARGET_MEM_REF))
194*38fd1498Szrj 	ptr = TREE_OPERAND (base, 0);
195*38fd1498Szrj       else if (base
196*38fd1498Szrj 	       && DECL_P (base))
197*38fd1498Szrj 	return compare_base_decls (base, decl) != 0;
198*38fd1498Szrj       else if (base
199*38fd1498Szrj 	       && CONSTANT_CLASS_P (base))
200*38fd1498Szrj 	return false;
201*38fd1498Szrj       else
202*38fd1498Szrj 	return true;
203*38fd1498Szrj     }
204*38fd1498Szrj 
205*38fd1498Szrj   /* Non-aliased variables can not be pointed to.  */
206*38fd1498Szrj   if (!may_be_aliased (decl))
207*38fd1498Szrj     return false;
208*38fd1498Szrj 
209*38fd1498Szrj   /* If we do not have useful points-to information for this pointer
210*38fd1498Szrj      we cannot disambiguate anything else.  */
211*38fd1498Szrj   pi = SSA_NAME_PTR_INFO (ptr);
212*38fd1498Szrj   if (!pi)
213*38fd1498Szrj     return true;
214*38fd1498Szrj 
215*38fd1498Szrj   return pt_solution_includes (&pi->pt, decl);
216*38fd1498Szrj }
217*38fd1498Szrj 
218*38fd1498Szrj /* Return true if dereferenced PTR1 and PTR2 may alias.
219*38fd1498Szrj    The caller is responsible for applying TBAA to see if accesses
220*38fd1498Szrj    through PTR1 and PTR2 may conflict at all.  */
221*38fd1498Szrj 
222*38fd1498Szrj bool
223*38fd1498Szrj ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
224*38fd1498Szrj {
225*38fd1498Szrj   struct ptr_info_def *pi1, *pi2;
226*38fd1498Szrj 
227*38fd1498Szrj   /* Conversions are irrelevant for points-to information and
228*38fd1498Szrj      data-dependence analysis can feed us those.  */
229*38fd1498Szrj   STRIP_NOPS (ptr1);
230*38fd1498Szrj   STRIP_NOPS (ptr2);
231*38fd1498Szrj 
232*38fd1498Szrj   /* Disregard pointer offsetting.  */
233*38fd1498Szrj   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
234*38fd1498Szrj     {
235*38fd1498Szrj       do
236*38fd1498Szrj 	{
237*38fd1498Szrj 	  ptr1 = TREE_OPERAND (ptr1, 0);
238*38fd1498Szrj 	}
239*38fd1498Szrj       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
240*38fd1498Szrj       return ptr_derefs_may_alias_p (ptr1, ptr2);
241*38fd1498Szrj     }
242*38fd1498Szrj   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
243*38fd1498Szrj     {
244*38fd1498Szrj       do
245*38fd1498Szrj 	{
246*38fd1498Szrj 	  ptr2 = TREE_OPERAND (ptr2, 0);
247*38fd1498Szrj 	}
248*38fd1498Szrj       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
249*38fd1498Szrj       return ptr_derefs_may_alias_p (ptr1, ptr2);
250*38fd1498Szrj     }
251*38fd1498Szrj 
252*38fd1498Szrj   /* ADDR_EXPR pointers either just offset another pointer or directly
253*38fd1498Szrj      specify the pointed-to set.  */
254*38fd1498Szrj   if (TREE_CODE (ptr1) == ADDR_EXPR)
255*38fd1498Szrj     {
256*38fd1498Szrj       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
257*38fd1498Szrj       if (base
258*38fd1498Szrj 	  && (TREE_CODE (base) == MEM_REF
259*38fd1498Szrj 	      || TREE_CODE (base) == TARGET_MEM_REF))
260*38fd1498Szrj 	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
261*38fd1498Szrj       else if (base
262*38fd1498Szrj 	       && DECL_P (base))
263*38fd1498Szrj 	return ptr_deref_may_alias_decl_p (ptr2, base);
264*38fd1498Szrj       else
265*38fd1498Szrj 	return true;
266*38fd1498Szrj     }
267*38fd1498Szrj   if (TREE_CODE (ptr2) == ADDR_EXPR)
268*38fd1498Szrj     {
269*38fd1498Szrj       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
270*38fd1498Szrj       if (base
271*38fd1498Szrj 	  && (TREE_CODE (base) == MEM_REF
272*38fd1498Szrj 	      || TREE_CODE (base) == TARGET_MEM_REF))
273*38fd1498Szrj 	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
274*38fd1498Szrj       else if (base
275*38fd1498Szrj 	       && DECL_P (base))
276*38fd1498Szrj 	return ptr_deref_may_alias_decl_p (ptr1, base);
277*38fd1498Szrj       else
278*38fd1498Szrj 	return true;
279*38fd1498Szrj     }
280*38fd1498Szrj 
281*38fd1498Szrj   /* From here we require SSA name pointers.  Anything else aliases.  */
282*38fd1498Szrj   if (TREE_CODE (ptr1) != SSA_NAME
283*38fd1498Szrj       || TREE_CODE (ptr2) != SSA_NAME
284*38fd1498Szrj       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
285*38fd1498Szrj       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
286*38fd1498Szrj     return true;
287*38fd1498Szrj 
288*38fd1498Szrj   /* We may end up with two empty points-to solutions for two same pointers.
289*38fd1498Szrj      In this case we still want to say both pointers alias, so shortcut
290*38fd1498Szrj      that here.  */
291*38fd1498Szrj   if (ptr1 == ptr2)
292*38fd1498Szrj     return true;
293*38fd1498Szrj 
294*38fd1498Szrj   /* If we do not have useful points-to information for either pointer
295*38fd1498Szrj      we cannot disambiguate anything else.  */
296*38fd1498Szrj   pi1 = SSA_NAME_PTR_INFO (ptr1);
297*38fd1498Szrj   pi2 = SSA_NAME_PTR_INFO (ptr2);
298*38fd1498Szrj   if (!pi1 || !pi2)
299*38fd1498Szrj     return true;
300*38fd1498Szrj 
301*38fd1498Szrj   /* ???  This does not use TBAA to prune decls from the intersection
302*38fd1498Szrj      that not both pointers may access.  */
303*38fd1498Szrj   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
304*38fd1498Szrj }
305*38fd1498Szrj 
306*38fd1498Szrj /* Return true if dereferencing PTR may alias *REF.
307*38fd1498Szrj    The caller is responsible for applying TBAA to see if PTR
308*38fd1498Szrj    may access *REF at all.  */
309*38fd1498Szrj 
310*38fd1498Szrj static bool
311*38fd1498Szrj ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
312*38fd1498Szrj {
313*38fd1498Szrj   tree base = ao_ref_base (ref);
314*38fd1498Szrj 
315*38fd1498Szrj   if (TREE_CODE (base) == MEM_REF
316*38fd1498Szrj       || TREE_CODE (base) == TARGET_MEM_REF)
317*38fd1498Szrj     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
318*38fd1498Szrj   else if (DECL_P (base))
319*38fd1498Szrj     return ptr_deref_may_alias_decl_p (ptr, base);
320*38fd1498Szrj 
321*38fd1498Szrj   return true;
322*38fd1498Szrj }
323*38fd1498Szrj 
324*38fd1498Szrj /* Returns true if PTR1 and PTR2 compare unequal because of points-to.  */
325*38fd1498Szrj 
326*38fd1498Szrj bool
327*38fd1498Szrj ptrs_compare_unequal (tree ptr1, tree ptr2)
328*38fd1498Szrj {
329*38fd1498Szrj   /* First resolve the pointers down to a SSA name pointer base or
330*38fd1498Szrj      a VAR_DECL, PARM_DECL or RESULT_DECL.  This explicitely does
331*38fd1498Szrj      not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
332*38fd1498Szrj      or STRING_CSTs which needs points-to adjustments to track them
333*38fd1498Szrj      in the points-to sets.  */
334*38fd1498Szrj   tree obj1 = NULL_TREE;
335*38fd1498Szrj   tree obj2 = NULL_TREE;
336*38fd1498Szrj   if (TREE_CODE (ptr1) == ADDR_EXPR)
337*38fd1498Szrj     {
338*38fd1498Szrj       tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
339*38fd1498Szrj       if (! tem)
340*38fd1498Szrj 	return false;
341*38fd1498Szrj       if (VAR_P (tem)
342*38fd1498Szrj 	  || TREE_CODE (tem) == PARM_DECL
343*38fd1498Szrj 	  || TREE_CODE (tem) == RESULT_DECL)
344*38fd1498Szrj 	obj1 = tem;
345*38fd1498Szrj       else if (TREE_CODE (tem) == MEM_REF)
346*38fd1498Szrj 	ptr1 = TREE_OPERAND (tem, 0);
347*38fd1498Szrj     }
348*38fd1498Szrj   if (TREE_CODE (ptr2) == ADDR_EXPR)
349*38fd1498Szrj     {
350*38fd1498Szrj       tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
351*38fd1498Szrj       if (! tem)
352*38fd1498Szrj 	return false;
353*38fd1498Szrj       if (VAR_P (tem)
354*38fd1498Szrj 	  || TREE_CODE (tem) == PARM_DECL
355*38fd1498Szrj 	  || TREE_CODE (tem) == RESULT_DECL)
356*38fd1498Szrj 	obj2 = tem;
357*38fd1498Szrj       else if (TREE_CODE (tem) == MEM_REF)
358*38fd1498Szrj 	ptr2 = TREE_OPERAND (tem, 0);
359*38fd1498Szrj     }
360*38fd1498Szrj 
361*38fd1498Szrj   /* Canonicalize ptr vs. object.  */
362*38fd1498Szrj   if (TREE_CODE (ptr1) == SSA_NAME && obj2)
363*38fd1498Szrj     {
364*38fd1498Szrj       std::swap (ptr1, ptr2);
365*38fd1498Szrj       std::swap (obj1, obj2);
366*38fd1498Szrj     }
367*38fd1498Szrj 
368*38fd1498Szrj   if (obj1 && obj2)
369*38fd1498Szrj     /* Other code handles this correctly, no need to duplicate it here.  */;
370*38fd1498Szrj   else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
371*38fd1498Szrj     {
372*38fd1498Szrj       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
373*38fd1498Szrj       /* We may not use restrict to optimize pointer comparisons.
374*38fd1498Szrj          See PR71062.  So we have to assume that restrict-pointed-to
375*38fd1498Szrj 	 may be in fact obj1.  */
376*38fd1498Szrj       if (!pi
377*38fd1498Szrj 	  || pi->pt.vars_contains_restrict
378*38fd1498Szrj 	  || pi->pt.vars_contains_interposable)
379*38fd1498Szrj 	return false;
380*38fd1498Szrj       if (VAR_P (obj1)
381*38fd1498Szrj 	  && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
382*38fd1498Szrj 	{
383*38fd1498Szrj 	  varpool_node *node = varpool_node::get (obj1);
384*38fd1498Szrj 	  /* If obj1 may bind to NULL give up (see below).  */
385*38fd1498Szrj 	  if (! node
386*38fd1498Szrj 	      || ! node->nonzero_address ()
387*38fd1498Szrj 	      || ! decl_binds_to_current_def_p (obj1))
388*38fd1498Szrj 	    return false;
389*38fd1498Szrj 	}
390*38fd1498Szrj       return !pt_solution_includes (&pi->pt, obj1);
391*38fd1498Szrj     }
392*38fd1498Szrj 
393*38fd1498Szrj   /* ???  We'd like to handle ptr1 != NULL and ptr1 != ptr2
394*38fd1498Szrj      but those require pt.null to be conservatively correct.  */
395*38fd1498Szrj 
396*38fd1498Szrj   return false;
397*38fd1498Szrj }
398*38fd1498Szrj 
399*38fd1498Szrj /* Returns whether reference REF to BASE may refer to global memory.  */
400*38fd1498Szrj 
401*38fd1498Szrj static bool
402*38fd1498Szrj ref_may_alias_global_p_1 (tree base)
403*38fd1498Szrj {
404*38fd1498Szrj   if (DECL_P (base))
405*38fd1498Szrj     return is_global_var (base);
406*38fd1498Szrj   else if (TREE_CODE (base) == MEM_REF
407*38fd1498Szrj 	   || TREE_CODE (base) == TARGET_MEM_REF)
408*38fd1498Szrj     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
409*38fd1498Szrj   return true;
410*38fd1498Szrj }
411*38fd1498Szrj 
412*38fd1498Szrj bool
413*38fd1498Szrj ref_may_alias_global_p (ao_ref *ref)
414*38fd1498Szrj {
415*38fd1498Szrj   tree base = ao_ref_base (ref);
416*38fd1498Szrj   return ref_may_alias_global_p_1 (base);
417*38fd1498Szrj }
418*38fd1498Szrj 
419*38fd1498Szrj bool
420*38fd1498Szrj ref_may_alias_global_p (tree ref)
421*38fd1498Szrj {
422*38fd1498Szrj   tree base = get_base_address (ref);
423*38fd1498Szrj   return ref_may_alias_global_p_1 (base);
424*38fd1498Szrj }
425*38fd1498Szrj 
426*38fd1498Szrj /* Return true whether STMT may clobber global memory.  */
427*38fd1498Szrj 
428*38fd1498Szrj bool
429*38fd1498Szrj stmt_may_clobber_global_p (gimple *stmt)
430*38fd1498Szrj {
431*38fd1498Szrj   tree lhs;
432*38fd1498Szrj 
433*38fd1498Szrj   if (!gimple_vdef (stmt))
434*38fd1498Szrj     return false;
435*38fd1498Szrj 
436*38fd1498Szrj   /* ???  We can ask the oracle whether an artificial pointer
437*38fd1498Szrj      dereference with a pointer with points-to information covering
438*38fd1498Szrj      all global memory (what about non-address taken memory?) maybe
439*38fd1498Szrj      clobbered by this call.  As there is at the moment no convenient
440*38fd1498Szrj      way of doing that without generating garbage do some manual
441*38fd1498Szrj      checking instead.
442*38fd1498Szrj      ???  We could make a NULL ao_ref argument to the various
443*38fd1498Szrj      predicates special, meaning any global memory.  */
444*38fd1498Szrj 
445*38fd1498Szrj   switch (gimple_code (stmt))
446*38fd1498Szrj     {
447*38fd1498Szrj     case GIMPLE_ASSIGN:
448*38fd1498Szrj       lhs = gimple_assign_lhs (stmt);
449*38fd1498Szrj       return (TREE_CODE (lhs) != SSA_NAME
450*38fd1498Szrj 	      && ref_may_alias_global_p (lhs));
451*38fd1498Szrj     case GIMPLE_CALL:
452*38fd1498Szrj       return true;
453*38fd1498Szrj     default:
454*38fd1498Szrj       return true;
455*38fd1498Szrj     }
456*38fd1498Szrj }
457*38fd1498Szrj 
458*38fd1498Szrj 
459*38fd1498Szrj /* Dump alias information on FILE.  */
460*38fd1498Szrj 
461*38fd1498Szrj void
462*38fd1498Szrj dump_alias_info (FILE *file)
463*38fd1498Szrj {
464*38fd1498Szrj   unsigned i;
465*38fd1498Szrj   tree ptr;
466*38fd1498Szrj   const char *funcname
467*38fd1498Szrj     = lang_hooks.decl_printable_name (current_function_decl, 2);
468*38fd1498Szrj   tree var;
469*38fd1498Szrj 
470*38fd1498Szrj   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
471*38fd1498Szrj 
472*38fd1498Szrj   fprintf (file, "Aliased symbols\n\n");
473*38fd1498Szrj 
474*38fd1498Szrj   FOR_EACH_LOCAL_DECL (cfun, i, var)
475*38fd1498Szrj     {
476*38fd1498Szrj       if (may_be_aliased (var))
477*38fd1498Szrj 	dump_variable (file, var);
478*38fd1498Szrj     }
479*38fd1498Szrj 
480*38fd1498Szrj   fprintf (file, "\nCall clobber information\n");
481*38fd1498Szrj 
482*38fd1498Szrj   fprintf (file, "\nESCAPED");
483*38fd1498Szrj   dump_points_to_solution (file, &cfun->gimple_df->escaped);
484*38fd1498Szrj 
485*38fd1498Szrj   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
486*38fd1498Szrj 
487*38fd1498Szrj   FOR_EACH_SSA_NAME (i, ptr, cfun)
488*38fd1498Szrj     {
489*38fd1498Szrj       struct ptr_info_def *pi;
490*38fd1498Szrj 
491*38fd1498Szrj       if (!POINTER_TYPE_P (TREE_TYPE (ptr))
492*38fd1498Szrj 	  || SSA_NAME_IN_FREE_LIST (ptr))
493*38fd1498Szrj 	continue;
494*38fd1498Szrj 
495*38fd1498Szrj       pi = SSA_NAME_PTR_INFO (ptr);
496*38fd1498Szrj       if (pi)
497*38fd1498Szrj 	dump_points_to_info_for (file, ptr);
498*38fd1498Szrj     }
499*38fd1498Szrj 
500*38fd1498Szrj   fprintf (file, "\n");
501*38fd1498Szrj }
502*38fd1498Szrj 
503*38fd1498Szrj 
504*38fd1498Szrj /* Dump alias information on stderr.  */
505*38fd1498Szrj 
506*38fd1498Szrj DEBUG_FUNCTION void
507*38fd1498Szrj debug_alias_info (void)
508*38fd1498Szrj {
509*38fd1498Szrj   dump_alias_info (stderr);
510*38fd1498Szrj }
511*38fd1498Szrj 
512*38fd1498Szrj 
513*38fd1498Szrj /* Dump the points-to set *PT into FILE.  */
514*38fd1498Szrj 
515*38fd1498Szrj void
516*38fd1498Szrj dump_points_to_solution (FILE *file, struct pt_solution *pt)
517*38fd1498Szrj {
518*38fd1498Szrj   if (pt->anything)
519*38fd1498Szrj     fprintf (file, ", points-to anything");
520*38fd1498Szrj 
521*38fd1498Szrj   if (pt->nonlocal)
522*38fd1498Szrj     fprintf (file, ", points-to non-local");
523*38fd1498Szrj 
524*38fd1498Szrj   if (pt->escaped)
525*38fd1498Szrj     fprintf (file, ", points-to escaped");
526*38fd1498Szrj 
527*38fd1498Szrj   if (pt->ipa_escaped)
528*38fd1498Szrj     fprintf (file, ", points-to unit escaped");
529*38fd1498Szrj 
530*38fd1498Szrj   if (pt->null)
531*38fd1498Szrj     fprintf (file, ", points-to NULL");
532*38fd1498Szrj 
533*38fd1498Szrj   if (pt->vars)
534*38fd1498Szrj     {
535*38fd1498Szrj       fprintf (file, ", points-to vars: ");
536*38fd1498Szrj       dump_decl_set (file, pt->vars);
537*38fd1498Szrj       if (pt->vars_contains_nonlocal
538*38fd1498Szrj 	  || pt->vars_contains_escaped
539*38fd1498Szrj 	  || pt->vars_contains_escaped_heap
540*38fd1498Szrj 	  || pt->vars_contains_restrict)
541*38fd1498Szrj 	{
542*38fd1498Szrj 	  const char *comma = "";
543*38fd1498Szrj 	  fprintf (file, " (");
544*38fd1498Szrj 	  if (pt->vars_contains_nonlocal)
545*38fd1498Szrj 	    {
546*38fd1498Szrj 	      fprintf (file, "nonlocal");
547*38fd1498Szrj 	      comma = ", ";
548*38fd1498Szrj 	    }
549*38fd1498Szrj 	  if (pt->vars_contains_escaped)
550*38fd1498Szrj 	    {
551*38fd1498Szrj 	      fprintf (file, "%sescaped", comma);
552*38fd1498Szrj 	      comma = ", ";
553*38fd1498Szrj 	    }
554*38fd1498Szrj 	  if (pt->vars_contains_escaped_heap)
555*38fd1498Szrj 	    {
556*38fd1498Szrj 	      fprintf (file, "%sescaped heap", comma);
557*38fd1498Szrj 	      comma = ", ";
558*38fd1498Szrj 	    }
559*38fd1498Szrj 	  if (pt->vars_contains_restrict)
560*38fd1498Szrj 	    {
561*38fd1498Szrj 	      fprintf (file, "%srestrict", comma);
562*38fd1498Szrj 	      comma = ", ";
563*38fd1498Szrj 	    }
564*38fd1498Szrj 	  if (pt->vars_contains_interposable)
565*38fd1498Szrj 	    fprintf (file, "%sinterposable", comma);
566*38fd1498Szrj 	  fprintf (file, ")");
567*38fd1498Szrj 	}
568*38fd1498Szrj     }
569*38fd1498Szrj }
570*38fd1498Szrj 
571*38fd1498Szrj 
572*38fd1498Szrj /* Unified dump function for pt_solution.  */
573*38fd1498Szrj 
574*38fd1498Szrj DEBUG_FUNCTION void
575*38fd1498Szrj debug (pt_solution &ref)
576*38fd1498Szrj {
577*38fd1498Szrj   dump_points_to_solution (stderr, &ref);
578*38fd1498Szrj }
579*38fd1498Szrj 
580*38fd1498Szrj DEBUG_FUNCTION void
581*38fd1498Szrj debug (pt_solution *ptr)
582*38fd1498Szrj {
583*38fd1498Szrj   if (ptr)
584*38fd1498Szrj     debug (*ptr);
585*38fd1498Szrj   else
586*38fd1498Szrj     fprintf (stderr, "<nil>\n");
587*38fd1498Szrj }
588*38fd1498Szrj 
589*38fd1498Szrj 
590*38fd1498Szrj /* Dump points-to information for SSA_NAME PTR into FILE.  */
591*38fd1498Szrj 
592*38fd1498Szrj void
593*38fd1498Szrj dump_points_to_info_for (FILE *file, tree ptr)
594*38fd1498Szrj {
595*38fd1498Szrj   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
596*38fd1498Szrj 
597*38fd1498Szrj   print_generic_expr (file, ptr, dump_flags);
598*38fd1498Szrj 
599*38fd1498Szrj   if (pi)
600*38fd1498Szrj     dump_points_to_solution (file, &pi->pt);
601*38fd1498Szrj   else
602*38fd1498Szrj     fprintf (file, ", points-to anything");
603*38fd1498Szrj 
604*38fd1498Szrj   fprintf (file, "\n");
605*38fd1498Szrj }
606*38fd1498Szrj 
607*38fd1498Szrj 
608*38fd1498Szrj /* Dump points-to information for VAR into stderr.  */
609*38fd1498Szrj 
610*38fd1498Szrj DEBUG_FUNCTION void
611*38fd1498Szrj debug_points_to_info_for (tree var)
612*38fd1498Szrj {
613*38fd1498Szrj   dump_points_to_info_for (stderr, var);
614*38fd1498Szrj }
615*38fd1498Szrj 
616*38fd1498Szrj 
617*38fd1498Szrj /* Initializes the alias-oracle reference representation *R from REF.  */
618*38fd1498Szrj 
619*38fd1498Szrj void
620*38fd1498Szrj ao_ref_init (ao_ref *r, tree ref)
621*38fd1498Szrj {
622*38fd1498Szrj   r->ref = ref;
623*38fd1498Szrj   r->base = NULL_TREE;
624*38fd1498Szrj   r->offset = 0;
625*38fd1498Szrj   r->size = -1;
626*38fd1498Szrj   r->max_size = -1;
627*38fd1498Szrj   r->ref_alias_set = -1;
628*38fd1498Szrj   r->base_alias_set = -1;
629*38fd1498Szrj   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
630*38fd1498Szrj }
631*38fd1498Szrj 
632*38fd1498Szrj /* Returns the base object of the memory reference *REF.  */
633*38fd1498Szrj 
634*38fd1498Szrj tree
635*38fd1498Szrj ao_ref_base (ao_ref *ref)
636*38fd1498Szrj {
637*38fd1498Szrj   bool reverse;
638*38fd1498Szrj 
639*38fd1498Szrj   if (ref->base)
640*38fd1498Szrj     return ref->base;
641*38fd1498Szrj   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
642*38fd1498Szrj 				       &ref->max_size, &reverse);
643*38fd1498Szrj   return ref->base;
644*38fd1498Szrj }
645*38fd1498Szrj 
646*38fd1498Szrj /* Returns the base object alias set of the memory reference *REF.  */
647*38fd1498Szrj 
648*38fd1498Szrj alias_set_type
649*38fd1498Szrj ao_ref_base_alias_set (ao_ref *ref)
650*38fd1498Szrj {
651*38fd1498Szrj   tree base_ref;
652*38fd1498Szrj   if (ref->base_alias_set != -1)
653*38fd1498Szrj     return ref->base_alias_set;
654*38fd1498Szrj   if (!ref->ref)
655*38fd1498Szrj     return 0;
656*38fd1498Szrj   base_ref = ref->ref;
657*38fd1498Szrj   while (handled_component_p (base_ref))
658*38fd1498Szrj     base_ref = TREE_OPERAND (base_ref, 0);
659*38fd1498Szrj   ref->base_alias_set = get_alias_set (base_ref);
660*38fd1498Szrj   return ref->base_alias_set;
661*38fd1498Szrj }
662*38fd1498Szrj 
663*38fd1498Szrj /* Returns the reference alias set of the memory reference *REF.  */
664*38fd1498Szrj 
665*38fd1498Szrj alias_set_type
666*38fd1498Szrj ao_ref_alias_set (ao_ref *ref)
667*38fd1498Szrj {
668*38fd1498Szrj   if (ref->ref_alias_set != -1)
669*38fd1498Szrj     return ref->ref_alias_set;
670*38fd1498Szrj   ref->ref_alias_set = get_alias_set (ref->ref);
671*38fd1498Szrj   return ref->ref_alias_set;
672*38fd1498Szrj }
673*38fd1498Szrj 
674*38fd1498Szrj /* Init an alias-oracle reference representation from a gimple pointer
675*38fd1498Szrj    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
676*38fd1498Szrj    size is assumed to be unknown.  The access is assumed to be only
677*38fd1498Szrj    to or after of the pointer target, not before it.  */
678*38fd1498Szrj 
679*38fd1498Szrj void
680*38fd1498Szrj ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
681*38fd1498Szrj {
682*38fd1498Szrj   poly_int64 t, size_hwi, extra_offset = 0;
683*38fd1498Szrj   ref->ref = NULL_TREE;
684*38fd1498Szrj   if (TREE_CODE (ptr) == SSA_NAME)
685*38fd1498Szrj     {
686*38fd1498Szrj       gimple *stmt = SSA_NAME_DEF_STMT (ptr);
687*38fd1498Szrj       if (gimple_assign_single_p (stmt)
688*38fd1498Szrj 	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
689*38fd1498Szrj 	ptr = gimple_assign_rhs1 (stmt);
690*38fd1498Szrj       else if (is_gimple_assign (stmt)
691*38fd1498Szrj 	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
692*38fd1498Szrj 	       && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
693*38fd1498Szrj 	{
694*38fd1498Szrj 	  ptr = gimple_assign_rhs1 (stmt);
695*38fd1498Szrj 	  extra_offset *= BITS_PER_UNIT;
696*38fd1498Szrj 	}
697*38fd1498Szrj     }
698*38fd1498Szrj 
699*38fd1498Szrj   if (TREE_CODE (ptr) == ADDR_EXPR)
700*38fd1498Szrj     {
701*38fd1498Szrj       ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
702*38fd1498Szrj       if (ref->base)
703*38fd1498Szrj 	ref->offset = BITS_PER_UNIT * t;
704*38fd1498Szrj       else
705*38fd1498Szrj 	{
706*38fd1498Szrj 	  size = NULL_TREE;
707*38fd1498Szrj 	  ref->offset = 0;
708*38fd1498Szrj 	  ref->base = get_base_address (TREE_OPERAND (ptr, 0));
709*38fd1498Szrj 	}
710*38fd1498Szrj     }
711*38fd1498Szrj   else
712*38fd1498Szrj     {
713*38fd1498Szrj       ref->base = build2 (MEM_REF, char_type_node,
714*38fd1498Szrj 			  ptr, null_pointer_node);
715*38fd1498Szrj       ref->offset = 0;
716*38fd1498Szrj     }
717*38fd1498Szrj   ref->offset += extra_offset;
718*38fd1498Szrj   if (size
719*38fd1498Szrj       && poly_int_tree_p (size, &size_hwi)
720*38fd1498Szrj       && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
721*38fd1498Szrj     ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
722*38fd1498Szrj   else
723*38fd1498Szrj     ref->max_size = ref->size = -1;
724*38fd1498Szrj   ref->ref_alias_set = 0;
725*38fd1498Szrj   ref->base_alias_set = 0;
726*38fd1498Szrj   ref->volatile_p = false;
727*38fd1498Szrj }
728*38fd1498Szrj 
729*38fd1498Szrj /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
730*38fd1498Szrj    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
731*38fd1498Szrj    decide.  */
732*38fd1498Szrj 
733*38fd1498Szrj static inline int
734*38fd1498Szrj same_type_for_tbaa (tree type1, tree type2)
735*38fd1498Szrj {
736*38fd1498Szrj   type1 = TYPE_MAIN_VARIANT (type1);
737*38fd1498Szrj   type2 = TYPE_MAIN_VARIANT (type2);
738*38fd1498Szrj 
739*38fd1498Szrj   /* If we would have to do structural comparison bail out.  */
740*38fd1498Szrj   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
741*38fd1498Szrj       || TYPE_STRUCTURAL_EQUALITY_P (type2))
742*38fd1498Szrj     return -1;
743*38fd1498Szrj 
744*38fd1498Szrj   /* Compare the canonical types.  */
745*38fd1498Szrj   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
746*38fd1498Szrj     return 1;
747*38fd1498Szrj 
748*38fd1498Szrj   /* ??? Array types are not properly unified in all cases as we have
749*38fd1498Szrj      spurious changes in the index types for example.  Removing this
750*38fd1498Szrj      causes all sorts of problems with the Fortran frontend.  */
751*38fd1498Szrj   if (TREE_CODE (type1) == ARRAY_TYPE
752*38fd1498Szrj       && TREE_CODE (type2) == ARRAY_TYPE)
753*38fd1498Szrj     return -1;
754*38fd1498Szrj 
755*38fd1498Szrj   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
756*38fd1498Szrj      object of one of its constrained subtypes, e.g. when a function with an
757*38fd1498Szrj      unconstrained parameter passed by reference is called on an object and
758*38fd1498Szrj      inlined.  But, even in the case of a fixed size, type and subtypes are
759*38fd1498Szrj      not equivalent enough as to share the same TYPE_CANONICAL, since this
760*38fd1498Szrj      would mean that conversions between them are useless, whereas they are
761*38fd1498Szrj      not (e.g. type and subtypes can have different modes).  So, in the end,
762*38fd1498Szrj      they are only guaranteed to have the same alias set.  */
763*38fd1498Szrj   if (get_alias_set (type1) == get_alias_set (type2))
764*38fd1498Szrj     return -1;
765*38fd1498Szrj 
766*38fd1498Szrj   /* The types are known to be not equal.  */
767*38fd1498Szrj   return 0;
768*38fd1498Szrj }
769*38fd1498Szrj 
770*38fd1498Szrj /* Determine if the two component references REF1 and REF2 which are
771*38fd1498Szrj    based on access types TYPE1 and TYPE2 and of which at least one is based
772*38fd1498Szrj    on an indirect reference may alias.  REF2 is the only one that can
773*38fd1498Szrj    be a decl in which case REF2_IS_DECL is true.
774*38fd1498Szrj    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
775*38fd1498Szrj    are the respective alias sets.  */
776*38fd1498Szrj 
777*38fd1498Szrj static bool
778*38fd1498Szrj aliasing_component_refs_p (tree ref1,
779*38fd1498Szrj 			   alias_set_type ref1_alias_set,
780*38fd1498Szrj 			   alias_set_type base1_alias_set,
781*38fd1498Szrj 			   poly_int64 offset1, poly_int64 max_size1,
782*38fd1498Szrj 			   tree ref2,
783*38fd1498Szrj 			   alias_set_type ref2_alias_set,
784*38fd1498Szrj 			   alias_set_type base2_alias_set,
785*38fd1498Szrj 			   poly_int64 offset2, poly_int64 max_size2,
786*38fd1498Szrj 			   bool ref2_is_decl)
787*38fd1498Szrj {
788*38fd1498Szrj   /* If one reference is a component references through pointers try to find a
789*38fd1498Szrj      common base and apply offset based disambiguation.  This handles
790*38fd1498Szrj      for example
791*38fd1498Szrj        struct A { int i; int j; } *q;
792*38fd1498Szrj        struct B { struct A a; int k; } *p;
793*38fd1498Szrj      disambiguating q->i and p->a.j.  */
794*38fd1498Szrj   tree base1, base2;
795*38fd1498Szrj   tree type1, type2;
796*38fd1498Szrj   tree *refp;
797*38fd1498Szrj   int same_p;
798*38fd1498Szrj 
799*38fd1498Szrj   /* Choose bases and base types to search for.  */
800*38fd1498Szrj   base1 = ref1;
801*38fd1498Szrj   while (handled_component_p (base1))
802*38fd1498Szrj     base1 = TREE_OPERAND (base1, 0);
803*38fd1498Szrj   type1 = TREE_TYPE (base1);
804*38fd1498Szrj   base2 = ref2;
805*38fd1498Szrj   while (handled_component_p (base2))
806*38fd1498Szrj     base2 = TREE_OPERAND (base2, 0);
807*38fd1498Szrj   type2 = TREE_TYPE (base2);
808*38fd1498Szrj 
809*38fd1498Szrj   /* Now search for the type1 in the access path of ref2.  This
810*38fd1498Szrj      would be a common base for doing offset based disambiguation on.  */
811*38fd1498Szrj   refp = &ref2;
812*38fd1498Szrj   while (handled_component_p (*refp)
813*38fd1498Szrj 	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
814*38fd1498Szrj     refp = &TREE_OPERAND (*refp, 0);
815*38fd1498Szrj   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
816*38fd1498Szrj   /* If we couldn't compare types we have to bail out.  */
817*38fd1498Szrj   if (same_p == -1)
818*38fd1498Szrj     return true;
819*38fd1498Szrj   else if (same_p == 1)
820*38fd1498Szrj     {
821*38fd1498Szrj       poly_int64 offadj, sztmp, msztmp;
822*38fd1498Szrj       bool reverse;
823*38fd1498Szrj       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
824*38fd1498Szrj       offset2 -= offadj;
825*38fd1498Szrj       get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
826*38fd1498Szrj       offset1 -= offadj;
827*38fd1498Szrj       return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
828*38fd1498Szrj     }
829*38fd1498Szrj   /* If we didn't find a common base, try the other way around.  */
830*38fd1498Szrj   refp = &ref1;
831*38fd1498Szrj   while (handled_component_p (*refp)
832*38fd1498Szrj 	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
833*38fd1498Szrj     refp = &TREE_OPERAND (*refp, 0);
834*38fd1498Szrj   same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
835*38fd1498Szrj   /* If we couldn't compare types we have to bail out.  */
836*38fd1498Szrj   if (same_p == -1)
837*38fd1498Szrj     return true;
838*38fd1498Szrj   else if (same_p == 1)
839*38fd1498Szrj     {
840*38fd1498Szrj       poly_int64 offadj, sztmp, msztmp;
841*38fd1498Szrj       bool reverse;
842*38fd1498Szrj       get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
843*38fd1498Szrj       offset1 -= offadj;
844*38fd1498Szrj       get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
845*38fd1498Szrj       offset2 -= offadj;
846*38fd1498Szrj       return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
847*38fd1498Szrj     }
848*38fd1498Szrj 
849*38fd1498Szrj   /* If we have two type access paths B1.path1 and B2.path2 they may
850*38fd1498Szrj      only alias if either B1 is in B2.path2 or B2 is in B1.path1.
851*38fd1498Szrj      But we can still have a path that goes B1.path1...B2.path2 with
852*38fd1498Szrj      a part that we do not see.  So we can only disambiguate now
853*38fd1498Szrj      if there is no B2 in the tail of path1 and no B1 on the
854*38fd1498Szrj      tail of path2.  */
855*38fd1498Szrj   if (base1_alias_set == ref2_alias_set
856*38fd1498Szrj       || alias_set_subset_of (base1_alias_set, ref2_alias_set))
857*38fd1498Szrj     return true;
858*38fd1498Szrj   /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
859*38fd1498Szrj   if (!ref2_is_decl)
860*38fd1498Szrj     return (base2_alias_set == ref1_alias_set
861*38fd1498Szrj 	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
862*38fd1498Szrj   return false;
863*38fd1498Szrj }
864*38fd1498Szrj 
865*38fd1498Szrj /* Return true if we can determine that component references REF1 and REF2,
866*38fd1498Szrj    that are within a common DECL, cannot overlap.  */
867*38fd1498Szrj 
868*38fd1498Szrj static bool
869*38fd1498Szrj nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
870*38fd1498Szrj {
871*38fd1498Szrj   auto_vec<tree, 16> component_refs1;
872*38fd1498Szrj   auto_vec<tree, 16> component_refs2;
873*38fd1498Szrj 
874*38fd1498Szrj   /* Create the stack of handled components for REF1.  */
875*38fd1498Szrj   while (handled_component_p (ref1))
876*38fd1498Szrj     {
877*38fd1498Szrj       component_refs1.safe_push (ref1);
878*38fd1498Szrj       ref1 = TREE_OPERAND (ref1, 0);
879*38fd1498Szrj     }
880*38fd1498Szrj   if (TREE_CODE (ref1) == MEM_REF)
881*38fd1498Szrj     {
882*38fd1498Szrj       if (!integer_zerop (TREE_OPERAND (ref1, 1)))
883*38fd1498Szrj 	return false;
884*38fd1498Szrj       ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
885*38fd1498Szrj     }
886*38fd1498Szrj 
887*38fd1498Szrj   /* Create the stack of handled components for REF2.  */
888*38fd1498Szrj   while (handled_component_p (ref2))
889*38fd1498Szrj     {
890*38fd1498Szrj       component_refs2.safe_push (ref2);
891*38fd1498Szrj       ref2 = TREE_OPERAND (ref2, 0);
892*38fd1498Szrj     }
893*38fd1498Szrj   if (TREE_CODE (ref2) == MEM_REF)
894*38fd1498Szrj     {
895*38fd1498Szrj       if (!integer_zerop (TREE_OPERAND (ref2, 1)))
896*38fd1498Szrj 	return false;
897*38fd1498Szrj       ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
898*38fd1498Szrj     }
899*38fd1498Szrj 
900*38fd1498Szrj   /* Bases must be either same or uncomparable.  */
901*38fd1498Szrj   gcc_checking_assert (ref1 == ref2
902*38fd1498Szrj 		       || (DECL_P (ref1) && DECL_P (ref2)
903*38fd1498Szrj 			   && compare_base_decls (ref1, ref2) != 0));
904*38fd1498Szrj 
905*38fd1498Szrj   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
906*38fd1498Szrj      rank.  This is sufficient because we start from the same DECL and you
907*38fd1498Szrj      cannot reference several fields at a time with COMPONENT_REFs (unlike
908*38fd1498Szrj      with ARRAY_RANGE_REFs for arrays) so you always need the same number
909*38fd1498Szrj      of them to access a sub-component, unless you're in a union, in which
910*38fd1498Szrj      case the return value will precisely be false.  */
911*38fd1498Szrj   while (true)
912*38fd1498Szrj     {
913*38fd1498Szrj       do
914*38fd1498Szrj 	{
915*38fd1498Szrj 	  if (component_refs1.is_empty ())
916*38fd1498Szrj 	    return false;
917*38fd1498Szrj 	  ref1 = component_refs1.pop ();
918*38fd1498Szrj 	}
919*38fd1498Szrj       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
920*38fd1498Szrj 
921*38fd1498Szrj       do
922*38fd1498Szrj 	{
923*38fd1498Szrj 	  if (component_refs2.is_empty ())
924*38fd1498Szrj 	     return false;
925*38fd1498Szrj 	  ref2 = component_refs2.pop ();
926*38fd1498Szrj 	}
927*38fd1498Szrj       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
928*38fd1498Szrj 
929*38fd1498Szrj       /* Beware of BIT_FIELD_REF.  */
930*38fd1498Szrj       if (TREE_CODE (ref1) != COMPONENT_REF
931*38fd1498Szrj 	  || TREE_CODE (ref2) != COMPONENT_REF)
932*38fd1498Szrj 	return false;
933*38fd1498Szrj 
934*38fd1498Szrj       tree field1 = TREE_OPERAND (ref1, 1);
935*38fd1498Szrj       tree field2 = TREE_OPERAND (ref2, 1);
936*38fd1498Szrj 
937*38fd1498Szrj       /* ??? We cannot simply use the type of operand #0 of the refs here
938*38fd1498Szrj 	 as the Fortran compiler smuggles type punning into COMPONENT_REFs
939*38fd1498Szrj 	 for common blocks instead of using unions like everyone else.  */
940*38fd1498Szrj       tree type1 = DECL_CONTEXT (field1);
941*38fd1498Szrj       tree type2 = DECL_CONTEXT (field2);
942*38fd1498Szrj 
943*38fd1498Szrj       /* We cannot disambiguate fields in a union or qualified union.  */
944*38fd1498Szrj       if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
945*38fd1498Szrj 	 return false;
946*38fd1498Szrj 
947*38fd1498Szrj       if (field1 != field2)
948*38fd1498Szrj 	{
949*38fd1498Szrj 	  /* A field and its representative need to be considered the
950*38fd1498Szrj 	     same.  */
951*38fd1498Szrj 	  if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
952*38fd1498Szrj 	      || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
953*38fd1498Szrj 	    return false;
954*38fd1498Szrj 	  /* Different fields of the same record type cannot overlap.
955*38fd1498Szrj 	     ??? Bitfields can overlap at RTL level so punt on them.  */
956*38fd1498Szrj 	  if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
957*38fd1498Szrj 	    return false;
958*38fd1498Szrj 	  return true;
959*38fd1498Szrj 	}
960*38fd1498Szrj     }
961*38fd1498Szrj 
962*38fd1498Szrj   return false;
963*38fd1498Szrj }
964*38fd1498Szrj 
965*38fd1498Szrj /* qsort compare function to sort FIELD_DECLs after their
966*38fd1498Szrj    DECL_FIELD_CONTEXT TYPE_UID.  */
967*38fd1498Szrj 
968*38fd1498Szrj static inline int
969*38fd1498Szrj ncr_compar (const void *field1_, const void *field2_)
970*38fd1498Szrj {
971*38fd1498Szrj   const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
972*38fd1498Szrj   const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
973*38fd1498Szrj   unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
974*38fd1498Szrj   unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
975*38fd1498Szrj   if (uid1 < uid2)
976*38fd1498Szrj     return -1;
977*38fd1498Szrj   else if (uid1 > uid2)
978*38fd1498Szrj     return 1;
979*38fd1498Szrj   return 0;
980*38fd1498Szrj }
981*38fd1498Szrj 
982*38fd1498Szrj /* Return true if we can determine that the fields referenced cannot
983*38fd1498Szrj    overlap for any pair of objects.  */
984*38fd1498Szrj 
985*38fd1498Szrj static bool
986*38fd1498Szrj nonoverlapping_component_refs_p (const_tree x, const_tree y)
987*38fd1498Szrj {
988*38fd1498Szrj   if (!flag_strict_aliasing
989*38fd1498Szrj       || !x || !y
990*38fd1498Szrj       || TREE_CODE (x) != COMPONENT_REF
991*38fd1498Szrj       || TREE_CODE (y) != COMPONENT_REF)
992*38fd1498Szrj     return false;
993*38fd1498Szrj 
994*38fd1498Szrj   auto_vec<const_tree, 16> fieldsx;
995*38fd1498Szrj   while (TREE_CODE (x) == COMPONENT_REF)
996*38fd1498Szrj     {
997*38fd1498Szrj       tree field = TREE_OPERAND (x, 1);
998*38fd1498Szrj       tree type = DECL_FIELD_CONTEXT (field);
999*38fd1498Szrj       if (TREE_CODE (type) == RECORD_TYPE)
1000*38fd1498Szrj 	fieldsx.safe_push (field);
1001*38fd1498Szrj       x = TREE_OPERAND (x, 0);
1002*38fd1498Szrj     }
1003*38fd1498Szrj   if (fieldsx.length () == 0)
1004*38fd1498Szrj     return false;
1005*38fd1498Szrj   auto_vec<const_tree, 16> fieldsy;
1006*38fd1498Szrj   while (TREE_CODE (y) == COMPONENT_REF)
1007*38fd1498Szrj     {
1008*38fd1498Szrj       tree field = TREE_OPERAND (y, 1);
1009*38fd1498Szrj       tree type = DECL_FIELD_CONTEXT (field);
1010*38fd1498Szrj       if (TREE_CODE (type) == RECORD_TYPE)
1011*38fd1498Szrj 	fieldsy.safe_push (TREE_OPERAND (y, 1));
1012*38fd1498Szrj       y = TREE_OPERAND (y, 0);
1013*38fd1498Szrj     }
1014*38fd1498Szrj   if (fieldsy.length () == 0)
1015*38fd1498Szrj     return false;
1016*38fd1498Szrj 
1017*38fd1498Szrj   /* Most common case first.  */
1018*38fd1498Szrj   if (fieldsx.length () == 1
1019*38fd1498Szrj       && fieldsy.length () == 1)
1020*38fd1498Szrj     return ((DECL_FIELD_CONTEXT (fieldsx[0])
1021*38fd1498Szrj 	     == DECL_FIELD_CONTEXT (fieldsy[0]))
1022*38fd1498Szrj 	    && fieldsx[0] != fieldsy[0]
1023*38fd1498Szrj 	    && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1024*38fd1498Szrj 
1025*38fd1498Szrj   if (fieldsx.length () == 2)
1026*38fd1498Szrj     {
1027*38fd1498Szrj       if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1028*38fd1498Szrj 	std::swap (fieldsx[0], fieldsx[1]);
1029*38fd1498Szrj     }
1030*38fd1498Szrj   else
1031*38fd1498Szrj     fieldsx.qsort (ncr_compar);
1032*38fd1498Szrj 
1033*38fd1498Szrj   if (fieldsy.length () == 2)
1034*38fd1498Szrj     {
1035*38fd1498Szrj       if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1036*38fd1498Szrj 	std::swap (fieldsy[0], fieldsy[1]);
1037*38fd1498Szrj     }
1038*38fd1498Szrj   else
1039*38fd1498Szrj     fieldsy.qsort (ncr_compar);
1040*38fd1498Szrj 
1041*38fd1498Szrj   unsigned i = 0, j = 0;
1042*38fd1498Szrj   do
1043*38fd1498Szrj     {
1044*38fd1498Szrj       const_tree fieldx = fieldsx[i];
1045*38fd1498Szrj       const_tree fieldy = fieldsy[j];
1046*38fd1498Szrj       tree typex = DECL_FIELD_CONTEXT (fieldx);
1047*38fd1498Szrj       tree typey = DECL_FIELD_CONTEXT (fieldy);
1048*38fd1498Szrj       if (typex == typey)
1049*38fd1498Szrj 	{
1050*38fd1498Szrj 	  /* We're left with accessing different fields of a structure,
1051*38fd1498Szrj 	     no possible overlap.  */
1052*38fd1498Szrj 	  if (fieldx != fieldy)
1053*38fd1498Szrj 	    {
1054*38fd1498Szrj 	      /* A field and its representative need to be considered the
1055*38fd1498Szrj 		 same.  */
1056*38fd1498Szrj 	      if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1057*38fd1498Szrj 		  || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1058*38fd1498Szrj 		return false;
1059*38fd1498Szrj 	      /* Different fields of the same record type cannot overlap.
1060*38fd1498Szrj 		 ??? Bitfields can overlap at RTL level so punt on them.  */
1061*38fd1498Szrj 	      if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1062*38fd1498Szrj 		return false;
1063*38fd1498Szrj 	      return true;
1064*38fd1498Szrj 	    }
1065*38fd1498Szrj 	}
1066*38fd1498Szrj       if (TYPE_UID (typex) < TYPE_UID (typey))
1067*38fd1498Szrj 	{
1068*38fd1498Szrj 	  i++;
1069*38fd1498Szrj 	  if (i == fieldsx.length ())
1070*38fd1498Szrj 	    break;
1071*38fd1498Szrj 	}
1072*38fd1498Szrj       else
1073*38fd1498Szrj 	{
1074*38fd1498Szrj 	  j++;
1075*38fd1498Szrj 	  if (j == fieldsy.length ())
1076*38fd1498Szrj 	    break;
1077*38fd1498Szrj 	}
1078*38fd1498Szrj     }
1079*38fd1498Szrj   while (1);
1080*38fd1498Szrj 
1081*38fd1498Szrj   return false;
1082*38fd1498Szrj }
1083*38fd1498Szrj 
1084*38fd1498Szrj 
1085*38fd1498Szrj /* Return true if two memory references based on the variables BASE1
1086*38fd1498Szrj    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1087*38fd1498Szrj    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
1088*38fd1498Szrj    if non-NULL are the complete memory reference trees.  */
1089*38fd1498Szrj 
1090*38fd1498Szrj static bool
1091*38fd1498Szrj decl_refs_may_alias_p (tree ref1, tree base1,
1092*38fd1498Szrj 		       poly_int64 offset1, poly_int64 max_size1,
1093*38fd1498Szrj 		       tree ref2, tree base2,
1094*38fd1498Szrj 		       poly_int64 offset2, poly_int64 max_size2)
1095*38fd1498Szrj {
1096*38fd1498Szrj   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1097*38fd1498Szrj 
1098*38fd1498Szrj   /* If both references are based on different variables, they cannot alias.  */
1099*38fd1498Szrj   if (compare_base_decls (base1, base2) == 0)
1100*38fd1498Szrj     return false;
1101*38fd1498Szrj 
1102*38fd1498Szrj   /* If both references are based on the same variable, they cannot alias if
1103*38fd1498Szrj      the accesses do not overlap.  */
1104*38fd1498Szrj   if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1105*38fd1498Szrj     return false;
1106*38fd1498Szrj 
1107*38fd1498Szrj   /* For components with variable position, the above test isn't sufficient,
1108*38fd1498Szrj      so we disambiguate component references manually.  */
1109*38fd1498Szrj   if (ref1 && ref2
1110*38fd1498Szrj       && handled_component_p (ref1) && handled_component_p (ref2)
1111*38fd1498Szrj       && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1112*38fd1498Szrj     return false;
1113*38fd1498Szrj 
1114*38fd1498Szrj   return true;
1115*38fd1498Szrj }
1116*38fd1498Szrj 
1117*38fd1498Szrj /* Return true if an indirect reference based on *PTR1 constrained
1118*38fd1498Szrj    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1119*38fd1498Szrj    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
1120*38fd1498Szrj    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1121*38fd1498Szrj    in which case they are computed on-demand.  REF1 and REF2
1122*38fd1498Szrj    if non-NULL are the complete memory reference trees.  */
1123*38fd1498Szrj 
1124*38fd1498Szrj static bool
1125*38fd1498Szrj indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1126*38fd1498Szrj 			       poly_int64 offset1, poly_int64 max_size1,
1127*38fd1498Szrj 			       alias_set_type ref1_alias_set,
1128*38fd1498Szrj 			       alias_set_type base1_alias_set,
1129*38fd1498Szrj 			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
1130*38fd1498Szrj 			       poly_int64 offset2, poly_int64 max_size2,
1131*38fd1498Szrj 			       alias_set_type ref2_alias_set,
1132*38fd1498Szrj 			       alias_set_type base2_alias_set, bool tbaa_p)
1133*38fd1498Szrj {
1134*38fd1498Szrj   tree ptr1;
1135*38fd1498Szrj   tree ptrtype1, dbase2;
1136*38fd1498Szrj 
1137*38fd1498Szrj   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1138*38fd1498Szrj 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1139*38fd1498Szrj 		       && DECL_P (base2));
1140*38fd1498Szrj 
1141*38fd1498Szrj   ptr1 = TREE_OPERAND (base1, 0);
1142*38fd1498Szrj   poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1143*38fd1498Szrj 
1144*38fd1498Szrj   /* If only one reference is based on a variable, they cannot alias if
1145*38fd1498Szrj      the pointer access is beyond the extent of the variable access.
1146*38fd1498Szrj      (the pointer base cannot validly point to an offset less than zero
1147*38fd1498Szrj      of the variable).
1148*38fd1498Szrj      ???  IVOPTs creates bases that do not honor this restriction,
1149*38fd1498Szrj      so do not apply this optimization for TARGET_MEM_REFs.  */
1150*38fd1498Szrj   if (TREE_CODE (base1) != TARGET_MEM_REF
1151*38fd1498Szrj       && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1152*38fd1498Szrj     return false;
1153*38fd1498Szrj   /* They also cannot alias if the pointer may not point to the decl.  */
1154*38fd1498Szrj   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1155*38fd1498Szrj     return false;
1156*38fd1498Szrj 
1157*38fd1498Szrj   /* Disambiguations that rely on strict aliasing rules follow.  */
1158*38fd1498Szrj   if (!flag_strict_aliasing || !tbaa_p)
1159*38fd1498Szrj     return true;
1160*38fd1498Szrj 
1161*38fd1498Szrj   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1162*38fd1498Szrj 
1163*38fd1498Szrj   /* If the alias set for a pointer access is zero all bets are off.  */
1164*38fd1498Szrj   if (base1_alias_set == 0)
1165*38fd1498Szrj     return true;
1166*38fd1498Szrj 
1167*38fd1498Szrj   /* When we are trying to disambiguate an access with a pointer dereference
1168*38fd1498Szrj      as base versus one with a decl as base we can use both the size
1169*38fd1498Szrj      of the decl and its dynamic type for extra disambiguation.
1170*38fd1498Szrj      ???  We do not know anything about the dynamic type of the decl
1171*38fd1498Szrj      other than that its alias-set contains base2_alias_set as a subset
1172*38fd1498Szrj      which does not help us here.  */
1173*38fd1498Szrj   /* As we know nothing useful about the dynamic type of the decl just
1174*38fd1498Szrj      use the usual conflict check rather than a subset test.
1175*38fd1498Szrj      ???  We could introduce -fvery-strict-aliasing when the language
1176*38fd1498Szrj      does not allow decls to have a dynamic type that differs from their
1177*38fd1498Szrj      static type.  Then we can check
1178*38fd1498Szrj      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
1179*38fd1498Szrj   if (base1_alias_set != base2_alias_set
1180*38fd1498Szrj       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1181*38fd1498Szrj     return false;
1182*38fd1498Szrj   /* If the size of the access relevant for TBAA through the pointer
1183*38fd1498Szrj      is bigger than the size of the decl we can't possibly access the
1184*38fd1498Szrj      decl via that pointer.  */
1185*38fd1498Szrj   if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1186*38fd1498Szrj       && poly_int_tree_p (DECL_SIZE (base2))
1187*38fd1498Szrj       && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (ptrtype1)))
1188*38fd1498Szrj       /* ???  This in turn may run afoul when a decl of type T which is
1189*38fd1498Szrj 	 a member of union type U is accessed through a pointer to
1190*38fd1498Szrj 	 type U and sizeof T is smaller than sizeof U.  */
1191*38fd1498Szrj       && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1192*38fd1498Szrj       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1193*38fd1498Szrj       && known_lt (wi::to_poly_widest (DECL_SIZE (base2)),
1194*38fd1498Szrj 		   wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ptrtype1)))))
1195*38fd1498Szrj     return false;
1196*38fd1498Szrj 
1197*38fd1498Szrj   if (!ref2)
1198*38fd1498Szrj     return true;
1199*38fd1498Szrj 
1200*38fd1498Szrj   /* If the decl is accessed via a MEM_REF, reconstruct the base
1201*38fd1498Szrj      we can use for TBAA and an appropriately adjusted offset.  */
1202*38fd1498Szrj   dbase2 = ref2;
1203*38fd1498Szrj   while (handled_component_p (dbase2))
1204*38fd1498Szrj     dbase2 = TREE_OPERAND (dbase2, 0);
1205*38fd1498Szrj   poly_int64 doffset1 = offset1;
1206*38fd1498Szrj   poly_offset_int doffset2 = offset2;
1207*38fd1498Szrj   if (TREE_CODE (dbase2) == MEM_REF
1208*38fd1498Szrj       || TREE_CODE (dbase2) == TARGET_MEM_REF)
1209*38fd1498Szrj     doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1210*38fd1498Szrj 
1211*38fd1498Szrj   /* If either reference is view-converted, give up now.  */
1212*38fd1498Szrj   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1213*38fd1498Szrj       || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1214*38fd1498Szrj     return true;
1215*38fd1498Szrj 
1216*38fd1498Szrj   /* If both references are through the same type, they do not alias
1217*38fd1498Szrj      if the accesses do not overlap.  This does extra disambiguation
1218*38fd1498Szrj      for mixed/pointer accesses but requires strict aliasing.
1219*38fd1498Szrj      For MEM_REFs we require that the component-ref offset we computed
1220*38fd1498Szrj      is relative to the start of the type which we ensure by
1221*38fd1498Szrj      comparing rvalue and access type and disregarding the constant
1222*38fd1498Szrj      pointer offset.  */
1223*38fd1498Szrj   if ((TREE_CODE (base1) != TARGET_MEM_REF
1224*38fd1498Szrj        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1225*38fd1498Szrj       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1226*38fd1498Szrj     return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1227*38fd1498Szrj 
1228*38fd1498Szrj   if (ref1 && ref2
1229*38fd1498Szrj       && nonoverlapping_component_refs_p (ref1, ref2))
1230*38fd1498Szrj     return false;
1231*38fd1498Szrj 
1232*38fd1498Szrj   /* Do access-path based disambiguation.  */
1233*38fd1498Szrj   if (ref1 && ref2
1234*38fd1498Szrj       && (handled_component_p (ref1) || handled_component_p (ref2)))
1235*38fd1498Szrj     return aliasing_component_refs_p (ref1,
1236*38fd1498Szrj 				      ref1_alias_set, base1_alias_set,
1237*38fd1498Szrj 				      offset1, max_size1,
1238*38fd1498Szrj 				      ref2,
1239*38fd1498Szrj 				      ref2_alias_set, base2_alias_set,
1240*38fd1498Szrj 				      offset2, max_size2, true);
1241*38fd1498Szrj 
1242*38fd1498Szrj   return true;
1243*38fd1498Szrj }
1244*38fd1498Szrj 
1245*38fd1498Szrj /* Return true if two indirect references based on *PTR1
1246*38fd1498Szrj    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1247*38fd1498Szrj    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
1248*38fd1498Szrj    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1249*38fd1498Szrj    in which case they are computed on-demand.  REF1 and REF2
1250*38fd1498Szrj    if non-NULL are the complete memory reference trees. */
1251*38fd1498Szrj 
1252*38fd1498Szrj static bool
1253*38fd1498Szrj indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1254*38fd1498Szrj 			   poly_int64 offset1, poly_int64 max_size1,
1255*38fd1498Szrj 			   alias_set_type ref1_alias_set,
1256*38fd1498Szrj 			   alias_set_type base1_alias_set,
1257*38fd1498Szrj 			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
1258*38fd1498Szrj 			   poly_int64 offset2, poly_int64 max_size2,
1259*38fd1498Szrj 			   alias_set_type ref2_alias_set,
1260*38fd1498Szrj 			   alias_set_type base2_alias_set, bool tbaa_p)
1261*38fd1498Szrj {
1262*38fd1498Szrj   tree ptr1;
1263*38fd1498Szrj   tree ptr2;
1264*38fd1498Szrj   tree ptrtype1, ptrtype2;
1265*38fd1498Szrj 
1266*38fd1498Szrj   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1267*38fd1498Szrj 			|| TREE_CODE (base1) == TARGET_MEM_REF)
1268*38fd1498Szrj 		       && (TREE_CODE (base2) == MEM_REF
1269*38fd1498Szrj 			   || TREE_CODE (base2) == TARGET_MEM_REF));
1270*38fd1498Szrj 
1271*38fd1498Szrj   ptr1 = TREE_OPERAND (base1, 0);
1272*38fd1498Szrj   ptr2 = TREE_OPERAND (base2, 0);
1273*38fd1498Szrj 
1274*38fd1498Szrj   /* If both bases are based on pointers they cannot alias if they may not
1275*38fd1498Szrj      point to the same memory object or if they point to the same object
1276*38fd1498Szrj      and the accesses do not overlap.  */
1277*38fd1498Szrj   if ((!cfun || gimple_in_ssa_p (cfun))
1278*38fd1498Szrj       && operand_equal_p (ptr1, ptr2, 0)
1279*38fd1498Szrj       && (((TREE_CODE (base1) != TARGET_MEM_REF
1280*38fd1498Szrj 	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1281*38fd1498Szrj 	   && (TREE_CODE (base2) != TARGET_MEM_REF
1282*38fd1498Szrj 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1283*38fd1498Szrj 	  || (TREE_CODE (base1) == TARGET_MEM_REF
1284*38fd1498Szrj 	      && TREE_CODE (base2) == TARGET_MEM_REF
1285*38fd1498Szrj 	      && (TMR_STEP (base1) == TMR_STEP (base2)
1286*38fd1498Szrj 		  || (TMR_STEP (base1) && TMR_STEP (base2)
1287*38fd1498Szrj 		      && operand_equal_p (TMR_STEP (base1),
1288*38fd1498Szrj 					  TMR_STEP (base2), 0)))
1289*38fd1498Szrj 	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
1290*38fd1498Szrj 		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
1291*38fd1498Szrj 		      && operand_equal_p (TMR_INDEX (base1),
1292*38fd1498Szrj 					  TMR_INDEX (base2), 0)))
1293*38fd1498Szrj 	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1294*38fd1498Szrj 		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1295*38fd1498Szrj 		      && operand_equal_p (TMR_INDEX2 (base1),
1296*38fd1498Szrj 					  TMR_INDEX2 (base2), 0))))))
1297*38fd1498Szrj     {
1298*38fd1498Szrj       poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1299*38fd1498Szrj       poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1300*38fd1498Szrj       return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1301*38fd1498Szrj 				     offset2 + moff2, max_size2);
1302*38fd1498Szrj     }
1303*38fd1498Szrj   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1304*38fd1498Szrj     return false;
1305*38fd1498Szrj 
1306*38fd1498Szrj   /* Disambiguations that rely on strict aliasing rules follow.  */
1307*38fd1498Szrj   if (!flag_strict_aliasing || !tbaa_p)
1308*38fd1498Szrj     return true;
1309*38fd1498Szrj 
1310*38fd1498Szrj   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1311*38fd1498Szrj   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1312*38fd1498Szrj 
1313*38fd1498Szrj   /* If the alias set for a pointer access is zero all bets are off.  */
1314*38fd1498Szrj   if (base1_alias_set == 0
1315*38fd1498Szrj       || base2_alias_set == 0)
1316*38fd1498Szrj     return true;
1317*38fd1498Szrj 
1318*38fd1498Szrj   /* If both references are through the same type, they do not alias
1319*38fd1498Szrj      if the accesses do not overlap.  This does extra disambiguation
1320*38fd1498Szrj      for mixed/pointer accesses but requires strict aliasing.  */
1321*38fd1498Szrj   if ((TREE_CODE (base1) != TARGET_MEM_REF
1322*38fd1498Szrj        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1323*38fd1498Szrj       && (TREE_CODE (base2) != TARGET_MEM_REF
1324*38fd1498Szrj 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1325*38fd1498Szrj       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1326*38fd1498Szrj       && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1327*38fd1498Szrj       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1328*38fd1498Szrj 			     TREE_TYPE (ptrtype2)) == 1
1329*38fd1498Szrj       /* But avoid treating arrays as "objects", instead assume they
1330*38fd1498Szrj          can overlap by an exact multiple of their element size.  */
1331*38fd1498Szrj       && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1332*38fd1498Szrj     return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1333*38fd1498Szrj 
1334*38fd1498Szrj   /* Do type-based disambiguation.  */
1335*38fd1498Szrj   if (base1_alias_set != base2_alias_set
1336*38fd1498Szrj       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1337*38fd1498Szrj     return false;
1338*38fd1498Szrj 
1339*38fd1498Szrj   /* If either reference is view-converted, give up now.  */
1340*38fd1498Szrj   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1341*38fd1498Szrj       || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1342*38fd1498Szrj     return true;
1343*38fd1498Szrj 
1344*38fd1498Szrj   if (ref1 && ref2
1345*38fd1498Szrj       && nonoverlapping_component_refs_p (ref1, ref2))
1346*38fd1498Szrj     return false;
1347*38fd1498Szrj 
1348*38fd1498Szrj   /* Do access-path based disambiguation.  */
1349*38fd1498Szrj   if (ref1 && ref2
1350*38fd1498Szrj       && (handled_component_p (ref1) || handled_component_p (ref2)))
1351*38fd1498Szrj     return aliasing_component_refs_p (ref1,
1352*38fd1498Szrj 				      ref1_alias_set, base1_alias_set,
1353*38fd1498Szrj 				      offset1, max_size1,
1354*38fd1498Szrj 				      ref2,
1355*38fd1498Szrj 				      ref2_alias_set, base2_alias_set,
1356*38fd1498Szrj 				      offset2, max_size2, false);
1357*38fd1498Szrj 
1358*38fd1498Szrj   return true;
1359*38fd1498Szrj }
1360*38fd1498Szrj 
1361*38fd1498Szrj /* Return true, if the two memory references REF1 and REF2 may alias.  */
1362*38fd1498Szrj 
1363*38fd1498Szrj bool
1364*38fd1498Szrj refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1365*38fd1498Szrj {
1366*38fd1498Szrj   tree base1, base2;
1367*38fd1498Szrj   poly_int64 offset1 = 0, offset2 = 0;
1368*38fd1498Szrj   poly_int64 max_size1 = -1, max_size2 = -1;
1369*38fd1498Szrj   bool var1_p, var2_p, ind1_p, ind2_p;
1370*38fd1498Szrj 
1371*38fd1498Szrj   gcc_checking_assert ((!ref1->ref
1372*38fd1498Szrj 			|| TREE_CODE (ref1->ref) == SSA_NAME
1373*38fd1498Szrj 			|| DECL_P (ref1->ref)
1374*38fd1498Szrj 			|| TREE_CODE (ref1->ref) == STRING_CST
1375*38fd1498Szrj 			|| handled_component_p (ref1->ref)
1376*38fd1498Szrj 			|| TREE_CODE (ref1->ref) == MEM_REF
1377*38fd1498Szrj 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1378*38fd1498Szrj 		       && (!ref2->ref
1379*38fd1498Szrj 			   || TREE_CODE (ref2->ref) == SSA_NAME
1380*38fd1498Szrj 			   || DECL_P (ref2->ref)
1381*38fd1498Szrj 			   || TREE_CODE (ref2->ref) == STRING_CST
1382*38fd1498Szrj 			   || handled_component_p (ref2->ref)
1383*38fd1498Szrj 			   || TREE_CODE (ref2->ref) == MEM_REF
1384*38fd1498Szrj 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1385*38fd1498Szrj 
1386*38fd1498Szrj   /* Decompose the references into their base objects and the access.  */
1387*38fd1498Szrj   base1 = ao_ref_base (ref1);
1388*38fd1498Szrj   offset1 = ref1->offset;
1389*38fd1498Szrj   max_size1 = ref1->max_size;
1390*38fd1498Szrj   base2 = ao_ref_base (ref2);
1391*38fd1498Szrj   offset2 = ref2->offset;
1392*38fd1498Szrj   max_size2 = ref2->max_size;
1393*38fd1498Szrj 
1394*38fd1498Szrj   /* We can end up with registers or constants as bases for example from
1395*38fd1498Szrj      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1396*38fd1498Szrj      which is seen as a struct copy.  */
1397*38fd1498Szrj   if (TREE_CODE (base1) == SSA_NAME
1398*38fd1498Szrj       || TREE_CODE (base1) == CONST_DECL
1399*38fd1498Szrj       || TREE_CODE (base1) == CONSTRUCTOR
1400*38fd1498Szrj       || TREE_CODE (base1) == ADDR_EXPR
1401*38fd1498Szrj       || CONSTANT_CLASS_P (base1)
1402*38fd1498Szrj       || TREE_CODE (base2) == SSA_NAME
1403*38fd1498Szrj       || TREE_CODE (base2) == CONST_DECL
1404*38fd1498Szrj       || TREE_CODE (base2) == CONSTRUCTOR
1405*38fd1498Szrj       || TREE_CODE (base2) == ADDR_EXPR
1406*38fd1498Szrj       || CONSTANT_CLASS_P (base2))
1407*38fd1498Szrj     return false;
1408*38fd1498Szrj 
1409*38fd1498Szrj   /* We can end up referring to code via function and label decls.
1410*38fd1498Szrj      As we likely do not properly track code aliases conservatively
1411*38fd1498Szrj      bail out.  */
1412*38fd1498Szrj   if (TREE_CODE (base1) == FUNCTION_DECL
1413*38fd1498Szrj       || TREE_CODE (base1) == LABEL_DECL
1414*38fd1498Szrj       || TREE_CODE (base2) == FUNCTION_DECL
1415*38fd1498Szrj       || TREE_CODE (base2) == LABEL_DECL)
1416*38fd1498Szrj     return true;
1417*38fd1498Szrj 
1418*38fd1498Szrj   /* Two volatile accesses always conflict.  */
1419*38fd1498Szrj   if (ref1->volatile_p
1420*38fd1498Szrj       && ref2->volatile_p)
1421*38fd1498Szrj     return true;
1422*38fd1498Szrj 
1423*38fd1498Szrj   /* Defer to simple offset based disambiguation if we have
1424*38fd1498Szrj      references based on two decls.  Do this before defering to
1425*38fd1498Szrj      TBAA to handle must-alias cases in conformance with the
1426*38fd1498Szrj      GCC extension of allowing type-punning through unions.  */
1427*38fd1498Szrj   var1_p = DECL_P (base1);
1428*38fd1498Szrj   var2_p = DECL_P (base2);
1429*38fd1498Szrj   if (var1_p && var2_p)
1430*38fd1498Szrj     return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1431*38fd1498Szrj 				  ref2->ref, base2, offset2, max_size2);
1432*38fd1498Szrj 
1433*38fd1498Szrj   /* Handle restrict based accesses.
1434*38fd1498Szrj      ???  ao_ref_base strips inner MEM_REF [&decl], recover from that
1435*38fd1498Szrj      here.  */
1436*38fd1498Szrj   tree rbase1 = base1;
1437*38fd1498Szrj   tree rbase2 = base2;
1438*38fd1498Szrj   if (var1_p)
1439*38fd1498Szrj     {
1440*38fd1498Szrj       rbase1 = ref1->ref;
1441*38fd1498Szrj       if (rbase1)
1442*38fd1498Szrj 	while (handled_component_p (rbase1))
1443*38fd1498Szrj 	  rbase1 = TREE_OPERAND (rbase1, 0);
1444*38fd1498Szrj     }
1445*38fd1498Szrj   if (var2_p)
1446*38fd1498Szrj     {
1447*38fd1498Szrj       rbase2 = ref2->ref;
1448*38fd1498Szrj       if (rbase2)
1449*38fd1498Szrj 	while (handled_component_p (rbase2))
1450*38fd1498Szrj 	  rbase2 = TREE_OPERAND (rbase2, 0);
1451*38fd1498Szrj     }
1452*38fd1498Szrj   if (rbase1 && rbase2
1453*38fd1498Szrj       && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1454*38fd1498Szrj       && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1455*38fd1498Szrj       /* If the accesses are in the same restrict clique... */
1456*38fd1498Szrj       && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1457*38fd1498Szrj       /* But based on different pointers they do not alias.  */
1458*38fd1498Szrj       && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1459*38fd1498Szrj     return false;
1460*38fd1498Szrj 
1461*38fd1498Szrj   ind1_p = (TREE_CODE (base1) == MEM_REF
1462*38fd1498Szrj 	    || TREE_CODE (base1) == TARGET_MEM_REF);
1463*38fd1498Szrj   ind2_p = (TREE_CODE (base2) == MEM_REF
1464*38fd1498Szrj 	    || TREE_CODE (base2) == TARGET_MEM_REF);
1465*38fd1498Szrj 
1466*38fd1498Szrj   /* Canonicalize the pointer-vs-decl case.  */
1467*38fd1498Szrj   if (ind1_p && var2_p)
1468*38fd1498Szrj     {
1469*38fd1498Szrj       std::swap (offset1, offset2);
1470*38fd1498Szrj       std::swap (max_size1, max_size2);
1471*38fd1498Szrj       std::swap (base1, base2);
1472*38fd1498Szrj       std::swap (ref1, ref2);
1473*38fd1498Szrj       var1_p = true;
1474*38fd1498Szrj       ind1_p = false;
1475*38fd1498Szrj       var2_p = false;
1476*38fd1498Szrj       ind2_p = true;
1477*38fd1498Szrj     }
1478*38fd1498Szrj 
1479*38fd1498Szrj   /* First defer to TBAA if possible.  */
1480*38fd1498Szrj   if (tbaa_p
1481*38fd1498Szrj       && flag_strict_aliasing
1482*38fd1498Szrj       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1483*38fd1498Szrj 				 ao_ref_alias_set (ref2)))
1484*38fd1498Szrj     return false;
1485*38fd1498Szrj 
1486*38fd1498Szrj   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1487*38fd1498Szrj   if (var1_p && ind2_p)
1488*38fd1498Szrj     return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1489*38fd1498Szrj 					  offset2, max_size2,
1490*38fd1498Szrj 					  ao_ref_alias_set (ref2),
1491*38fd1498Szrj 					  ao_ref_base_alias_set (ref2),
1492*38fd1498Szrj 					  ref1->ref, base1,
1493*38fd1498Szrj 					  offset1, max_size1,
1494*38fd1498Szrj 					  ao_ref_alias_set (ref1),
1495*38fd1498Szrj 					  ao_ref_base_alias_set (ref1),
1496*38fd1498Szrj 					  tbaa_p);
1497*38fd1498Szrj   else if (ind1_p && ind2_p)
1498*38fd1498Szrj     return indirect_refs_may_alias_p (ref1->ref, base1,
1499*38fd1498Szrj 				      offset1, max_size1,
1500*38fd1498Szrj 				      ao_ref_alias_set (ref1),
1501*38fd1498Szrj 				      ao_ref_base_alias_set (ref1),
1502*38fd1498Szrj 				      ref2->ref, base2,
1503*38fd1498Szrj 				      offset2, max_size2,
1504*38fd1498Szrj 				      ao_ref_alias_set (ref2),
1505*38fd1498Szrj 				      ao_ref_base_alias_set (ref2),
1506*38fd1498Szrj 				      tbaa_p);
1507*38fd1498Szrj 
1508*38fd1498Szrj   gcc_unreachable ();
1509*38fd1498Szrj }
1510*38fd1498Szrj 
1511*38fd1498Szrj static bool
1512*38fd1498Szrj refs_may_alias_p (tree ref1, ao_ref *ref2)
1513*38fd1498Szrj {
1514*38fd1498Szrj   ao_ref r1;
1515*38fd1498Szrj   ao_ref_init (&r1, ref1);
1516*38fd1498Szrj   return refs_may_alias_p_1 (&r1, ref2, true);
1517*38fd1498Szrj }
1518*38fd1498Szrj 
1519*38fd1498Szrj bool
1520*38fd1498Szrj refs_may_alias_p (tree ref1, tree ref2)
1521*38fd1498Szrj {
1522*38fd1498Szrj   ao_ref r1, r2;
1523*38fd1498Szrj   bool res;
1524*38fd1498Szrj   ao_ref_init (&r1, ref1);
1525*38fd1498Szrj   ao_ref_init (&r2, ref2);
1526*38fd1498Szrj   res = refs_may_alias_p_1 (&r1, &r2, true);
1527*38fd1498Szrj   if (res)
1528*38fd1498Szrj     ++alias_stats.refs_may_alias_p_may_alias;
1529*38fd1498Szrj   else
1530*38fd1498Szrj     ++alias_stats.refs_may_alias_p_no_alias;
1531*38fd1498Szrj   return res;
1532*38fd1498Szrj }
1533*38fd1498Szrj 
1534*38fd1498Szrj /* Returns true if there is a anti-dependence for the STORE that
1535*38fd1498Szrj    executes after the LOAD.  */
1536*38fd1498Szrj 
1537*38fd1498Szrj bool
1538*38fd1498Szrj refs_anti_dependent_p (tree load, tree store)
1539*38fd1498Szrj {
1540*38fd1498Szrj   ao_ref r1, r2;
1541*38fd1498Szrj   ao_ref_init (&r1, load);
1542*38fd1498Szrj   ao_ref_init (&r2, store);
1543*38fd1498Szrj   return refs_may_alias_p_1 (&r1, &r2, false);
1544*38fd1498Szrj }
1545*38fd1498Szrj 
1546*38fd1498Szrj /* Returns true if there is a output dependence for the stores
1547*38fd1498Szrj    STORE1 and STORE2.  */
1548*38fd1498Szrj 
1549*38fd1498Szrj bool
1550*38fd1498Szrj refs_output_dependent_p (tree store1, tree store2)
1551*38fd1498Szrj {
1552*38fd1498Szrj   ao_ref r1, r2;
1553*38fd1498Szrj   ao_ref_init (&r1, store1);
1554*38fd1498Szrj   ao_ref_init (&r2, store2);
1555*38fd1498Szrj   return refs_may_alias_p_1 (&r1, &r2, false);
1556*38fd1498Szrj }
1557*38fd1498Szrj 
1558*38fd1498Szrj /* If the call CALL may use the memory reference REF return true,
1559*38fd1498Szrj    otherwise return false.  */
1560*38fd1498Szrj 
1561*38fd1498Szrj static bool
1562*38fd1498Szrj ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1563*38fd1498Szrj {
1564*38fd1498Szrj   tree base, callee;
1565*38fd1498Szrj   unsigned i;
1566*38fd1498Szrj   int flags = gimple_call_flags (call);
1567*38fd1498Szrj 
1568*38fd1498Szrj   /* Const functions without a static chain do not implicitly use memory.  */
1569*38fd1498Szrj   if (!gimple_call_chain (call)
1570*38fd1498Szrj       && (flags & (ECF_CONST|ECF_NOVOPS)))
1571*38fd1498Szrj     goto process_args;
1572*38fd1498Szrj 
1573*38fd1498Szrj   base = ao_ref_base (ref);
1574*38fd1498Szrj   if (!base)
1575*38fd1498Szrj     return true;
1576*38fd1498Szrj 
1577*38fd1498Szrj   /* A call that is not without side-effects might involve volatile
1578*38fd1498Szrj      accesses and thus conflicts with all other volatile accesses.  */
1579*38fd1498Szrj   if (ref->volatile_p)
1580*38fd1498Szrj     return true;
1581*38fd1498Szrj 
1582*38fd1498Szrj   /* If the reference is based on a decl that is not aliased the call
1583*38fd1498Szrj      cannot possibly use it.  */
1584*38fd1498Szrj   if (DECL_P (base)
1585*38fd1498Szrj       && !may_be_aliased (base)
1586*38fd1498Szrj       /* But local statics can be used through recursion.  */
1587*38fd1498Szrj       && !is_global_var (base))
1588*38fd1498Szrj     goto process_args;
1589*38fd1498Szrj 
1590*38fd1498Szrj   callee = gimple_call_fndecl (call);
1591*38fd1498Szrj 
1592*38fd1498Szrj   /* Handle those builtin functions explicitly that do not act as
1593*38fd1498Szrj      escape points.  See tree-ssa-structalias.c:find_func_aliases
1594*38fd1498Szrj      for the list of builtins we might need to handle here.  */
1595*38fd1498Szrj   if (callee != NULL_TREE
1596*38fd1498Szrj       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1597*38fd1498Szrj     switch (DECL_FUNCTION_CODE (callee))
1598*38fd1498Szrj       {
1599*38fd1498Szrj 	/* All the following functions read memory pointed to by
1600*38fd1498Szrj 	   their second argument.  strcat/strncat additionally
1601*38fd1498Szrj 	   reads memory pointed to by the first argument.  */
1602*38fd1498Szrj 	case BUILT_IN_STRCAT:
1603*38fd1498Szrj 	case BUILT_IN_STRNCAT:
1604*38fd1498Szrj 	  {
1605*38fd1498Szrj 	    ao_ref dref;
1606*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1607*38fd1498Szrj 					   gimple_call_arg (call, 0),
1608*38fd1498Szrj 					   NULL_TREE);
1609*38fd1498Szrj 	    if (refs_may_alias_p_1 (&dref, ref, false))
1610*38fd1498Szrj 	      return true;
1611*38fd1498Szrj 	  }
1612*38fd1498Szrj 	  /* FALLTHRU */
1613*38fd1498Szrj 	case BUILT_IN_STRCPY:
1614*38fd1498Szrj 	case BUILT_IN_STRNCPY:
1615*38fd1498Szrj 	case BUILT_IN_MEMCPY:
1616*38fd1498Szrj 	case BUILT_IN_MEMMOVE:
1617*38fd1498Szrj 	case BUILT_IN_MEMPCPY:
1618*38fd1498Szrj 	case BUILT_IN_STPCPY:
1619*38fd1498Szrj 	case BUILT_IN_STPNCPY:
1620*38fd1498Szrj 	case BUILT_IN_TM_MEMCPY:
1621*38fd1498Szrj 	case BUILT_IN_TM_MEMMOVE:
1622*38fd1498Szrj 	  {
1623*38fd1498Szrj 	    ao_ref dref;
1624*38fd1498Szrj 	    tree size = NULL_TREE;
1625*38fd1498Szrj 	    if (gimple_call_num_args (call) == 3)
1626*38fd1498Szrj 	      size = gimple_call_arg (call, 2);
1627*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1628*38fd1498Szrj 					   gimple_call_arg (call, 1),
1629*38fd1498Szrj 					   size);
1630*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
1631*38fd1498Szrj 	  }
1632*38fd1498Szrj 	case BUILT_IN_STRCAT_CHK:
1633*38fd1498Szrj 	case BUILT_IN_STRNCAT_CHK:
1634*38fd1498Szrj 	  {
1635*38fd1498Szrj 	    ao_ref dref;
1636*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1637*38fd1498Szrj 					   gimple_call_arg (call, 0),
1638*38fd1498Szrj 					   NULL_TREE);
1639*38fd1498Szrj 	    if (refs_may_alias_p_1 (&dref, ref, false))
1640*38fd1498Szrj 	      return true;
1641*38fd1498Szrj 	  }
1642*38fd1498Szrj 	  /* FALLTHRU */
1643*38fd1498Szrj 	case BUILT_IN_STRCPY_CHK:
1644*38fd1498Szrj 	case BUILT_IN_STRNCPY_CHK:
1645*38fd1498Szrj 	case BUILT_IN_MEMCPY_CHK:
1646*38fd1498Szrj 	case BUILT_IN_MEMMOVE_CHK:
1647*38fd1498Szrj 	case BUILT_IN_MEMPCPY_CHK:
1648*38fd1498Szrj 	case BUILT_IN_STPCPY_CHK:
1649*38fd1498Szrj 	case BUILT_IN_STPNCPY_CHK:
1650*38fd1498Szrj 	  {
1651*38fd1498Szrj 	    ao_ref dref;
1652*38fd1498Szrj 	    tree size = NULL_TREE;
1653*38fd1498Szrj 	    if (gimple_call_num_args (call) == 4)
1654*38fd1498Szrj 	      size = gimple_call_arg (call, 2);
1655*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1656*38fd1498Szrj 					   gimple_call_arg (call, 1),
1657*38fd1498Szrj 					   size);
1658*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
1659*38fd1498Szrj 	  }
1660*38fd1498Szrj 	case BUILT_IN_BCOPY:
1661*38fd1498Szrj 	  {
1662*38fd1498Szrj 	    ao_ref dref;
1663*38fd1498Szrj 	    tree size = gimple_call_arg (call, 2);
1664*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1665*38fd1498Szrj 					   gimple_call_arg (call, 0),
1666*38fd1498Szrj 					   size);
1667*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
1668*38fd1498Szrj 	  }
1669*38fd1498Szrj 
1670*38fd1498Szrj 	/* The following functions read memory pointed to by their
1671*38fd1498Szrj 	   first argument.  */
1672*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (1):
1673*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (2):
1674*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (4):
1675*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (8):
1676*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (FLOAT):
1677*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1678*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1679*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (M64):
1680*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (M128):
1681*38fd1498Szrj 	CASE_BUILT_IN_TM_LOAD (M256):
1682*38fd1498Szrj 	case BUILT_IN_TM_LOG:
1683*38fd1498Szrj 	case BUILT_IN_TM_LOG_1:
1684*38fd1498Szrj 	case BUILT_IN_TM_LOG_2:
1685*38fd1498Szrj 	case BUILT_IN_TM_LOG_4:
1686*38fd1498Szrj 	case BUILT_IN_TM_LOG_8:
1687*38fd1498Szrj 	case BUILT_IN_TM_LOG_FLOAT:
1688*38fd1498Szrj 	case BUILT_IN_TM_LOG_DOUBLE:
1689*38fd1498Szrj 	case BUILT_IN_TM_LOG_LDOUBLE:
1690*38fd1498Szrj 	case BUILT_IN_TM_LOG_M64:
1691*38fd1498Szrj 	case BUILT_IN_TM_LOG_M128:
1692*38fd1498Szrj 	case BUILT_IN_TM_LOG_M256:
1693*38fd1498Szrj 	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1694*38fd1498Szrj 
1695*38fd1498Szrj 	/* These read memory pointed to by the first argument.  */
1696*38fd1498Szrj 	case BUILT_IN_STRDUP:
1697*38fd1498Szrj 	case BUILT_IN_STRNDUP:
1698*38fd1498Szrj 	case BUILT_IN_REALLOC:
1699*38fd1498Szrj 	  {
1700*38fd1498Szrj 	    ao_ref dref;
1701*38fd1498Szrj 	    tree size = NULL_TREE;
1702*38fd1498Szrj 	    if (gimple_call_num_args (call) == 2)
1703*38fd1498Szrj 	      size = gimple_call_arg (call, 1);
1704*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1705*38fd1498Szrj 					   gimple_call_arg (call, 0),
1706*38fd1498Szrj 					   size);
1707*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
1708*38fd1498Szrj 	  }
1709*38fd1498Szrj 	/* These read memory pointed to by the first argument.  */
1710*38fd1498Szrj 	case BUILT_IN_INDEX:
1711*38fd1498Szrj 	case BUILT_IN_STRCHR:
1712*38fd1498Szrj 	case BUILT_IN_STRRCHR:
1713*38fd1498Szrj 	  {
1714*38fd1498Szrj 	    ao_ref dref;
1715*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1716*38fd1498Szrj 					   gimple_call_arg (call, 0),
1717*38fd1498Szrj 					   NULL_TREE);
1718*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
1719*38fd1498Szrj 	  }
1720*38fd1498Szrj 	/* These read memory pointed to by the first argument with size
1721*38fd1498Szrj 	   in the third argument.  */
1722*38fd1498Szrj 	case BUILT_IN_MEMCHR:
1723*38fd1498Szrj 	  {
1724*38fd1498Szrj 	    ao_ref dref;
1725*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1726*38fd1498Szrj 					   gimple_call_arg (call, 0),
1727*38fd1498Szrj 					   gimple_call_arg (call, 2));
1728*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
1729*38fd1498Szrj 	  }
1730*38fd1498Szrj 	/* These read memory pointed to by the first and second arguments.  */
1731*38fd1498Szrj 	case BUILT_IN_STRSTR:
1732*38fd1498Szrj 	case BUILT_IN_STRPBRK:
1733*38fd1498Szrj 	  {
1734*38fd1498Szrj 	    ao_ref dref;
1735*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1736*38fd1498Szrj 					   gimple_call_arg (call, 0),
1737*38fd1498Szrj 					   NULL_TREE);
1738*38fd1498Szrj 	    if (refs_may_alias_p_1 (&dref, ref, false))
1739*38fd1498Szrj 	      return true;
1740*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
1741*38fd1498Szrj 					   gimple_call_arg (call, 1),
1742*38fd1498Szrj 					   NULL_TREE);
1743*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
1744*38fd1498Szrj 	  }
1745*38fd1498Szrj 
1746*38fd1498Szrj 	/* The following builtins do not read from memory.  */
1747*38fd1498Szrj 	case BUILT_IN_FREE:
1748*38fd1498Szrj 	case BUILT_IN_MALLOC:
1749*38fd1498Szrj 	case BUILT_IN_POSIX_MEMALIGN:
1750*38fd1498Szrj 	case BUILT_IN_ALIGNED_ALLOC:
1751*38fd1498Szrj 	case BUILT_IN_CALLOC:
1752*38fd1498Szrj 	CASE_BUILT_IN_ALLOCA:
1753*38fd1498Szrj 	case BUILT_IN_STACK_SAVE:
1754*38fd1498Szrj 	case BUILT_IN_STACK_RESTORE:
1755*38fd1498Szrj 	case BUILT_IN_MEMSET:
1756*38fd1498Szrj 	case BUILT_IN_TM_MEMSET:
1757*38fd1498Szrj 	case BUILT_IN_MEMSET_CHK:
1758*38fd1498Szrj 	case BUILT_IN_FREXP:
1759*38fd1498Szrj 	case BUILT_IN_FREXPF:
1760*38fd1498Szrj 	case BUILT_IN_FREXPL:
1761*38fd1498Szrj 	case BUILT_IN_GAMMA_R:
1762*38fd1498Szrj 	case BUILT_IN_GAMMAF_R:
1763*38fd1498Szrj 	case BUILT_IN_GAMMAL_R:
1764*38fd1498Szrj 	case BUILT_IN_LGAMMA_R:
1765*38fd1498Szrj 	case BUILT_IN_LGAMMAF_R:
1766*38fd1498Szrj 	case BUILT_IN_LGAMMAL_R:
1767*38fd1498Szrj 	case BUILT_IN_MODF:
1768*38fd1498Szrj 	case BUILT_IN_MODFF:
1769*38fd1498Szrj 	case BUILT_IN_MODFL:
1770*38fd1498Szrj 	case BUILT_IN_REMQUO:
1771*38fd1498Szrj 	case BUILT_IN_REMQUOF:
1772*38fd1498Szrj 	case BUILT_IN_REMQUOL:
1773*38fd1498Szrj 	case BUILT_IN_SINCOS:
1774*38fd1498Szrj 	case BUILT_IN_SINCOSF:
1775*38fd1498Szrj 	case BUILT_IN_SINCOSL:
1776*38fd1498Szrj 	case BUILT_IN_ASSUME_ALIGNED:
1777*38fd1498Szrj 	case BUILT_IN_VA_END:
1778*38fd1498Szrj 	  return false;
1779*38fd1498Szrj 	/* __sync_* builtins and some OpenMP builtins act as threading
1780*38fd1498Szrj 	   barriers.  */
1781*38fd1498Szrj #undef DEF_SYNC_BUILTIN
1782*38fd1498Szrj #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1783*38fd1498Szrj #include "sync-builtins.def"
1784*38fd1498Szrj #undef DEF_SYNC_BUILTIN
1785*38fd1498Szrj 	case BUILT_IN_GOMP_ATOMIC_START:
1786*38fd1498Szrj 	case BUILT_IN_GOMP_ATOMIC_END:
1787*38fd1498Szrj 	case BUILT_IN_GOMP_BARRIER:
1788*38fd1498Szrj 	case BUILT_IN_GOMP_BARRIER_CANCEL:
1789*38fd1498Szrj 	case BUILT_IN_GOMP_TASKWAIT:
1790*38fd1498Szrj 	case BUILT_IN_GOMP_TASKGROUP_END:
1791*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_START:
1792*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_END:
1793*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1794*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1795*38fd1498Szrj 	case BUILT_IN_GOMP_LOOP_END:
1796*38fd1498Szrj 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
1797*38fd1498Szrj 	case BUILT_IN_GOMP_ORDERED_START:
1798*38fd1498Szrj 	case BUILT_IN_GOMP_ORDERED_END:
1799*38fd1498Szrj 	case BUILT_IN_GOMP_SECTIONS_END:
1800*38fd1498Szrj 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1801*38fd1498Szrj 	case BUILT_IN_GOMP_SINGLE_COPY_START:
1802*38fd1498Szrj 	case BUILT_IN_GOMP_SINGLE_COPY_END:
1803*38fd1498Szrj 	  return true;
1804*38fd1498Szrj 
1805*38fd1498Szrj 	default:
1806*38fd1498Szrj 	  /* Fallthru to general call handling.  */;
1807*38fd1498Szrj       }
1808*38fd1498Szrj 
1809*38fd1498Szrj   /* Check if base is a global static variable that is not read
1810*38fd1498Szrj      by the function.  */
1811*38fd1498Szrj   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1812*38fd1498Szrj     {
1813*38fd1498Szrj       struct cgraph_node *node = cgraph_node::get (callee);
1814*38fd1498Szrj       bitmap not_read;
1815*38fd1498Szrj 
1816*38fd1498Szrj       /* FIXME: Callee can be an OMP builtin that does not have a call graph
1817*38fd1498Szrj 	 node yet.  We should enforce that there are nodes for all decls in the
1818*38fd1498Szrj 	 IL and remove this check instead.  */
1819*38fd1498Szrj       if (node
1820*38fd1498Szrj 	  && (not_read = ipa_reference_get_not_read_global (node))
1821*38fd1498Szrj 	  && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1822*38fd1498Szrj 	goto process_args;
1823*38fd1498Szrj     }
1824*38fd1498Szrj 
1825*38fd1498Szrj   /* Check if the base variable is call-used.  */
1826*38fd1498Szrj   if (DECL_P (base))
1827*38fd1498Szrj     {
1828*38fd1498Szrj       if (pt_solution_includes (gimple_call_use_set (call), base))
1829*38fd1498Szrj 	return true;
1830*38fd1498Szrj     }
1831*38fd1498Szrj   else if ((TREE_CODE (base) == MEM_REF
1832*38fd1498Szrj 	    || TREE_CODE (base) == TARGET_MEM_REF)
1833*38fd1498Szrj 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1834*38fd1498Szrj     {
1835*38fd1498Szrj       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1836*38fd1498Szrj       if (!pi)
1837*38fd1498Szrj 	return true;
1838*38fd1498Szrj 
1839*38fd1498Szrj       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1840*38fd1498Szrj 	return true;
1841*38fd1498Szrj     }
1842*38fd1498Szrj   else
1843*38fd1498Szrj     return true;
1844*38fd1498Szrj 
1845*38fd1498Szrj   /* Inspect call arguments for passed-by-value aliases.  */
1846*38fd1498Szrj process_args:
1847*38fd1498Szrj   for (i = 0; i < gimple_call_num_args (call); ++i)
1848*38fd1498Szrj     {
1849*38fd1498Szrj       tree op = gimple_call_arg (call, i);
1850*38fd1498Szrj       int flags = gimple_call_arg_flags (call, i);
1851*38fd1498Szrj 
1852*38fd1498Szrj       if (flags & EAF_UNUSED)
1853*38fd1498Szrj 	continue;
1854*38fd1498Szrj 
1855*38fd1498Szrj       if (TREE_CODE (op) == WITH_SIZE_EXPR)
1856*38fd1498Szrj 	op = TREE_OPERAND (op, 0);
1857*38fd1498Szrj 
1858*38fd1498Szrj       if (TREE_CODE (op) != SSA_NAME
1859*38fd1498Szrj 	  && !is_gimple_min_invariant (op))
1860*38fd1498Szrj 	{
1861*38fd1498Szrj 	  ao_ref r;
1862*38fd1498Szrj 	  ao_ref_init (&r, op);
1863*38fd1498Szrj 	  if (refs_may_alias_p_1 (&r, ref, true))
1864*38fd1498Szrj 	    return true;
1865*38fd1498Szrj 	}
1866*38fd1498Szrj     }
1867*38fd1498Szrj 
1868*38fd1498Szrj   return false;
1869*38fd1498Szrj }
1870*38fd1498Szrj 
1871*38fd1498Szrj static bool
1872*38fd1498Szrj ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1873*38fd1498Szrj {
1874*38fd1498Szrj   bool res;
1875*38fd1498Szrj   res = ref_maybe_used_by_call_p_1 (call, ref);
1876*38fd1498Szrj   if (res)
1877*38fd1498Szrj     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1878*38fd1498Szrj   else
1879*38fd1498Szrj     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1880*38fd1498Szrj   return res;
1881*38fd1498Szrj }
1882*38fd1498Szrj 
1883*38fd1498Szrj 
1884*38fd1498Szrj /* If the statement STMT may use the memory reference REF return
1885*38fd1498Szrj    true, otherwise return false.  */
1886*38fd1498Szrj 
1887*38fd1498Szrj bool
1888*38fd1498Szrj ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
1889*38fd1498Szrj {
1890*38fd1498Szrj   if (is_gimple_assign (stmt))
1891*38fd1498Szrj     {
1892*38fd1498Szrj       tree rhs;
1893*38fd1498Szrj 
1894*38fd1498Szrj       /* All memory assign statements are single.  */
1895*38fd1498Szrj       if (!gimple_assign_single_p (stmt))
1896*38fd1498Szrj 	return false;
1897*38fd1498Szrj 
1898*38fd1498Szrj       rhs = gimple_assign_rhs1 (stmt);
1899*38fd1498Szrj       if (is_gimple_reg (rhs)
1900*38fd1498Szrj 	  || is_gimple_min_invariant (rhs)
1901*38fd1498Szrj 	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1902*38fd1498Szrj 	return false;
1903*38fd1498Szrj 
1904*38fd1498Szrj       return refs_may_alias_p (rhs, ref);
1905*38fd1498Szrj     }
1906*38fd1498Szrj   else if (is_gimple_call (stmt))
1907*38fd1498Szrj     return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1908*38fd1498Szrj   else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1909*38fd1498Szrj     {
1910*38fd1498Szrj       tree retval = gimple_return_retval (return_stmt);
1911*38fd1498Szrj       if (retval
1912*38fd1498Szrj 	  && TREE_CODE (retval) != SSA_NAME
1913*38fd1498Szrj 	  && !is_gimple_min_invariant (retval)
1914*38fd1498Szrj 	  && refs_may_alias_p (retval, ref))
1915*38fd1498Szrj 	return true;
1916*38fd1498Szrj       /* If ref escapes the function then the return acts as a use.  */
1917*38fd1498Szrj       tree base = ao_ref_base (ref);
1918*38fd1498Szrj       if (!base)
1919*38fd1498Szrj 	;
1920*38fd1498Szrj       else if (DECL_P (base))
1921*38fd1498Szrj 	return is_global_var (base);
1922*38fd1498Szrj       else if (TREE_CODE (base) == MEM_REF
1923*38fd1498Szrj 	       || TREE_CODE (base) == TARGET_MEM_REF)
1924*38fd1498Szrj 	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1925*38fd1498Szrj       return false;
1926*38fd1498Szrj     }
1927*38fd1498Szrj 
1928*38fd1498Szrj   return true;
1929*38fd1498Szrj }
1930*38fd1498Szrj 
1931*38fd1498Szrj bool
1932*38fd1498Szrj ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
1933*38fd1498Szrj {
1934*38fd1498Szrj   ao_ref r;
1935*38fd1498Szrj   ao_ref_init (&r, ref);
1936*38fd1498Szrj   return ref_maybe_used_by_stmt_p (stmt, &r);
1937*38fd1498Szrj }
1938*38fd1498Szrj 
1939*38fd1498Szrj /* If the call in statement CALL may clobber the memory reference REF
1940*38fd1498Szrj    return true, otherwise return false.  */
1941*38fd1498Szrj 
1942*38fd1498Szrj bool
1943*38fd1498Szrj call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1944*38fd1498Szrj {
1945*38fd1498Szrj   tree base;
1946*38fd1498Szrj   tree callee;
1947*38fd1498Szrj 
1948*38fd1498Szrj   /* If the call is pure or const it cannot clobber anything.  */
1949*38fd1498Szrj   if (gimple_call_flags (call)
1950*38fd1498Szrj       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1951*38fd1498Szrj     return false;
1952*38fd1498Szrj   if (gimple_call_internal_p (call))
1953*38fd1498Szrj     switch (gimple_call_internal_fn (call))
1954*38fd1498Szrj       {
1955*38fd1498Szrj 	/* Treat these internal calls like ECF_PURE for aliasing,
1956*38fd1498Szrj 	   they don't write to any memory the program should care about.
1957*38fd1498Szrj 	   They have important other side-effects, and read memory,
1958*38fd1498Szrj 	   so can't be ECF_NOVOPS.  */
1959*38fd1498Szrj       case IFN_UBSAN_NULL:
1960*38fd1498Szrj       case IFN_UBSAN_BOUNDS:
1961*38fd1498Szrj       case IFN_UBSAN_VPTR:
1962*38fd1498Szrj       case IFN_UBSAN_OBJECT_SIZE:
1963*38fd1498Szrj       case IFN_UBSAN_PTR:
1964*38fd1498Szrj       case IFN_ASAN_CHECK:
1965*38fd1498Szrj 	return false;
1966*38fd1498Szrj       default:
1967*38fd1498Szrj 	break;
1968*38fd1498Szrj       }
1969*38fd1498Szrj 
1970*38fd1498Szrj   base = ao_ref_base (ref);
1971*38fd1498Szrj   if (!base)
1972*38fd1498Szrj     return true;
1973*38fd1498Szrj 
1974*38fd1498Szrj   if (TREE_CODE (base) == SSA_NAME
1975*38fd1498Szrj       || CONSTANT_CLASS_P (base))
1976*38fd1498Szrj     return false;
1977*38fd1498Szrj 
1978*38fd1498Szrj   /* A call that is not without side-effects might involve volatile
1979*38fd1498Szrj      accesses and thus conflicts with all other volatile accesses.  */
1980*38fd1498Szrj   if (ref->volatile_p)
1981*38fd1498Szrj     return true;
1982*38fd1498Szrj 
1983*38fd1498Szrj   /* If the reference is based on a decl that is not aliased the call
1984*38fd1498Szrj      cannot possibly clobber it.  */
1985*38fd1498Szrj   if (DECL_P (base)
1986*38fd1498Szrj       && !may_be_aliased (base)
1987*38fd1498Szrj       /* But local non-readonly statics can be modified through recursion
1988*38fd1498Szrj          or the call may implement a threading barrier which we must
1989*38fd1498Szrj 	 treat as may-def.  */
1990*38fd1498Szrj       && (TREE_READONLY (base)
1991*38fd1498Szrj 	  || !is_global_var (base)))
1992*38fd1498Szrj     return false;
1993*38fd1498Szrj 
1994*38fd1498Szrj   callee = gimple_call_fndecl (call);
1995*38fd1498Szrj 
1996*38fd1498Szrj   /* Handle those builtin functions explicitly that do not act as
1997*38fd1498Szrj      escape points.  See tree-ssa-structalias.c:find_func_aliases
1998*38fd1498Szrj      for the list of builtins we might need to handle here.  */
1999*38fd1498Szrj   if (callee != NULL_TREE
2000*38fd1498Szrj       && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2001*38fd1498Szrj     switch (DECL_FUNCTION_CODE (callee))
2002*38fd1498Szrj       {
2003*38fd1498Szrj 	/* All the following functions clobber memory pointed to by
2004*38fd1498Szrj 	   their first argument.  */
2005*38fd1498Szrj 	case BUILT_IN_STRCPY:
2006*38fd1498Szrj 	case BUILT_IN_STRNCPY:
2007*38fd1498Szrj 	case BUILT_IN_MEMCPY:
2008*38fd1498Szrj 	case BUILT_IN_MEMMOVE:
2009*38fd1498Szrj 	case BUILT_IN_MEMPCPY:
2010*38fd1498Szrj 	case BUILT_IN_STPCPY:
2011*38fd1498Szrj 	case BUILT_IN_STPNCPY:
2012*38fd1498Szrj 	case BUILT_IN_STRCAT:
2013*38fd1498Szrj 	case BUILT_IN_STRNCAT:
2014*38fd1498Szrj 	case BUILT_IN_MEMSET:
2015*38fd1498Szrj 	case BUILT_IN_TM_MEMSET:
2016*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (1):
2017*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (2):
2018*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (4):
2019*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (8):
2020*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (FLOAT):
2021*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (DOUBLE):
2022*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
2023*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (M64):
2024*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (M128):
2025*38fd1498Szrj 	CASE_BUILT_IN_TM_STORE (M256):
2026*38fd1498Szrj 	case BUILT_IN_TM_MEMCPY:
2027*38fd1498Szrj 	case BUILT_IN_TM_MEMMOVE:
2028*38fd1498Szrj 	  {
2029*38fd1498Szrj 	    ao_ref dref;
2030*38fd1498Szrj 	    tree size = NULL_TREE;
2031*38fd1498Szrj 	    /* Don't pass in size for strncat, as the maximum size
2032*38fd1498Szrj 	       is strlen (dest) + n + 1 instead of n, resp.
2033*38fd1498Szrj 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2034*38fd1498Szrj 	       known.  */
2035*38fd1498Szrj 	    if (gimple_call_num_args (call) == 3
2036*38fd1498Szrj 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2037*38fd1498Szrj 	      size = gimple_call_arg (call, 2);
2038*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
2039*38fd1498Szrj 					   gimple_call_arg (call, 0),
2040*38fd1498Szrj 					   size);
2041*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
2042*38fd1498Szrj 	  }
2043*38fd1498Szrj 	case BUILT_IN_STRCPY_CHK:
2044*38fd1498Szrj 	case BUILT_IN_STRNCPY_CHK:
2045*38fd1498Szrj 	case BUILT_IN_MEMCPY_CHK:
2046*38fd1498Szrj 	case BUILT_IN_MEMMOVE_CHK:
2047*38fd1498Szrj 	case BUILT_IN_MEMPCPY_CHK:
2048*38fd1498Szrj 	case BUILT_IN_STPCPY_CHK:
2049*38fd1498Szrj 	case BUILT_IN_STPNCPY_CHK:
2050*38fd1498Szrj 	case BUILT_IN_STRCAT_CHK:
2051*38fd1498Szrj 	case BUILT_IN_STRNCAT_CHK:
2052*38fd1498Szrj 	case BUILT_IN_MEMSET_CHK:
2053*38fd1498Szrj 	  {
2054*38fd1498Szrj 	    ao_ref dref;
2055*38fd1498Szrj 	    tree size = NULL_TREE;
2056*38fd1498Szrj 	    /* Don't pass in size for __strncat_chk, as the maximum size
2057*38fd1498Szrj 	       is strlen (dest) + n + 1 instead of n, resp.
2058*38fd1498Szrj 	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2059*38fd1498Szrj 	       known.  */
2060*38fd1498Szrj 	    if (gimple_call_num_args (call) == 4
2061*38fd1498Szrj 		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2062*38fd1498Szrj 	      size = gimple_call_arg (call, 2);
2063*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
2064*38fd1498Szrj 					   gimple_call_arg (call, 0),
2065*38fd1498Szrj 					   size);
2066*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
2067*38fd1498Szrj 	  }
2068*38fd1498Szrj 	case BUILT_IN_BCOPY:
2069*38fd1498Szrj 	  {
2070*38fd1498Szrj 	    ao_ref dref;
2071*38fd1498Szrj 	    tree size = gimple_call_arg (call, 2);
2072*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref,
2073*38fd1498Szrj 					   gimple_call_arg (call, 1),
2074*38fd1498Szrj 					   size);
2075*38fd1498Szrj 	    return refs_may_alias_p_1 (&dref, ref, false);
2076*38fd1498Szrj 	  }
2077*38fd1498Szrj 	/* Allocating memory does not have any side-effects apart from
2078*38fd1498Szrj 	   being the definition point for the pointer.  */
2079*38fd1498Szrj 	case BUILT_IN_MALLOC:
2080*38fd1498Szrj 	case BUILT_IN_ALIGNED_ALLOC:
2081*38fd1498Szrj 	case BUILT_IN_CALLOC:
2082*38fd1498Szrj 	case BUILT_IN_STRDUP:
2083*38fd1498Szrj 	case BUILT_IN_STRNDUP:
2084*38fd1498Szrj 	  /* Unix98 specifies that errno is set on allocation failure.  */
2085*38fd1498Szrj 	  if (flag_errno_math
2086*38fd1498Szrj 	      && targetm.ref_may_alias_errno (ref))
2087*38fd1498Szrj 	    return true;
2088*38fd1498Szrj 	  return false;
2089*38fd1498Szrj 	case BUILT_IN_STACK_SAVE:
2090*38fd1498Szrj 	CASE_BUILT_IN_ALLOCA:
2091*38fd1498Szrj 	case BUILT_IN_ASSUME_ALIGNED:
2092*38fd1498Szrj 	  return false;
2093*38fd1498Szrj 	/* But posix_memalign stores a pointer into the memory pointed to
2094*38fd1498Szrj 	   by its first argument.  */
2095*38fd1498Szrj 	case BUILT_IN_POSIX_MEMALIGN:
2096*38fd1498Szrj 	  {
2097*38fd1498Szrj 	    tree ptrptr = gimple_call_arg (call, 0);
2098*38fd1498Szrj 	    ao_ref dref;
2099*38fd1498Szrj 	    ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2100*38fd1498Szrj 					   TYPE_SIZE_UNIT (ptr_type_node));
2101*38fd1498Szrj 	    return (refs_may_alias_p_1 (&dref, ref, false)
2102*38fd1498Szrj 		    || (flag_errno_math
2103*38fd1498Szrj 			&& targetm.ref_may_alias_errno (ref)));
2104*38fd1498Szrj 	  }
2105*38fd1498Szrj 	/* Freeing memory kills the pointed-to memory.  More importantly
2106*38fd1498Szrj 	   the call has to serve as a barrier for moving loads and stores
2107*38fd1498Szrj 	   across it.  */
2108*38fd1498Szrj 	case BUILT_IN_FREE:
2109*38fd1498Szrj 	case BUILT_IN_VA_END:
2110*38fd1498Szrj 	  {
2111*38fd1498Szrj 	    tree ptr = gimple_call_arg (call, 0);
2112*38fd1498Szrj 	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2113*38fd1498Szrj 	  }
2114*38fd1498Szrj 	/* Realloc serves both as allocation point and deallocation point.  */
2115*38fd1498Szrj 	case BUILT_IN_REALLOC:
2116*38fd1498Szrj 	  {
2117*38fd1498Szrj 	    tree ptr = gimple_call_arg (call, 0);
2118*38fd1498Szrj 	    /* Unix98 specifies that errno is set on allocation failure.  */
2119*38fd1498Szrj 	    return ((flag_errno_math
2120*38fd1498Szrj 		     && targetm.ref_may_alias_errno (ref))
2121*38fd1498Szrj 		    || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2122*38fd1498Szrj 	  }
2123*38fd1498Szrj 	case BUILT_IN_GAMMA_R:
2124*38fd1498Szrj 	case BUILT_IN_GAMMAF_R:
2125*38fd1498Szrj 	case BUILT_IN_GAMMAL_R:
2126*38fd1498Szrj 	case BUILT_IN_LGAMMA_R:
2127*38fd1498Szrj 	case BUILT_IN_LGAMMAF_R:
2128*38fd1498Szrj 	case BUILT_IN_LGAMMAL_R:
2129*38fd1498Szrj 	  {
2130*38fd1498Szrj 	    tree out = gimple_call_arg (call, 1);
2131*38fd1498Szrj 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2132*38fd1498Szrj 	      return true;
2133*38fd1498Szrj 	    if (flag_errno_math)
2134*38fd1498Szrj 	      break;
2135*38fd1498Szrj 	    return false;
2136*38fd1498Szrj 	  }
2137*38fd1498Szrj 	case BUILT_IN_FREXP:
2138*38fd1498Szrj 	case BUILT_IN_FREXPF:
2139*38fd1498Szrj 	case BUILT_IN_FREXPL:
2140*38fd1498Szrj 	case BUILT_IN_MODF:
2141*38fd1498Szrj 	case BUILT_IN_MODFF:
2142*38fd1498Szrj 	case BUILT_IN_MODFL:
2143*38fd1498Szrj 	  {
2144*38fd1498Szrj 	    tree out = gimple_call_arg (call, 1);
2145*38fd1498Szrj 	    return ptr_deref_may_alias_ref_p_1 (out, ref);
2146*38fd1498Szrj 	  }
2147*38fd1498Szrj 	case BUILT_IN_REMQUO:
2148*38fd1498Szrj 	case BUILT_IN_REMQUOF:
2149*38fd1498Szrj 	case BUILT_IN_REMQUOL:
2150*38fd1498Szrj 	  {
2151*38fd1498Szrj 	    tree out = gimple_call_arg (call, 2);
2152*38fd1498Szrj 	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2153*38fd1498Szrj 	      return true;
2154*38fd1498Szrj 	    if (flag_errno_math)
2155*38fd1498Szrj 	      break;
2156*38fd1498Szrj 	    return false;
2157*38fd1498Szrj 	  }
2158*38fd1498Szrj 	case BUILT_IN_SINCOS:
2159*38fd1498Szrj 	case BUILT_IN_SINCOSF:
2160*38fd1498Szrj 	case BUILT_IN_SINCOSL:
2161*38fd1498Szrj 	  {
2162*38fd1498Szrj 	    tree sin = gimple_call_arg (call, 1);
2163*38fd1498Szrj 	    tree cos = gimple_call_arg (call, 2);
2164*38fd1498Szrj 	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2165*38fd1498Szrj 		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
2166*38fd1498Szrj 	  }
2167*38fd1498Szrj 	/* __sync_* builtins and some OpenMP builtins act as threading
2168*38fd1498Szrj 	   barriers.  */
2169*38fd1498Szrj #undef DEF_SYNC_BUILTIN
2170*38fd1498Szrj #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2171*38fd1498Szrj #include "sync-builtins.def"
2172*38fd1498Szrj #undef DEF_SYNC_BUILTIN
2173*38fd1498Szrj 	case BUILT_IN_GOMP_ATOMIC_START:
2174*38fd1498Szrj 	case BUILT_IN_GOMP_ATOMIC_END:
2175*38fd1498Szrj 	case BUILT_IN_GOMP_BARRIER:
2176*38fd1498Szrj 	case BUILT_IN_GOMP_BARRIER_CANCEL:
2177*38fd1498Szrj 	case BUILT_IN_GOMP_TASKWAIT:
2178*38fd1498Szrj 	case BUILT_IN_GOMP_TASKGROUP_END:
2179*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_START:
2180*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_END:
2181*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_NAME_START:
2182*38fd1498Szrj 	case BUILT_IN_GOMP_CRITICAL_NAME_END:
2183*38fd1498Szrj 	case BUILT_IN_GOMP_LOOP_END:
2184*38fd1498Szrj 	case BUILT_IN_GOMP_LOOP_END_CANCEL:
2185*38fd1498Szrj 	case BUILT_IN_GOMP_ORDERED_START:
2186*38fd1498Szrj 	case BUILT_IN_GOMP_ORDERED_END:
2187*38fd1498Szrj 	case BUILT_IN_GOMP_SECTIONS_END:
2188*38fd1498Szrj 	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2189*38fd1498Szrj 	case BUILT_IN_GOMP_SINGLE_COPY_START:
2190*38fd1498Szrj 	case BUILT_IN_GOMP_SINGLE_COPY_END:
2191*38fd1498Szrj 	  return true;
2192*38fd1498Szrj 	default:
2193*38fd1498Szrj 	  /* Fallthru to general call handling.  */;
2194*38fd1498Szrj       }
2195*38fd1498Szrj 
2196*38fd1498Szrj   /* Check if base is a global static variable that is not written
2197*38fd1498Szrj      by the function.  */
2198*38fd1498Szrj   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2199*38fd1498Szrj     {
2200*38fd1498Szrj       struct cgraph_node *node = cgraph_node::get (callee);
2201*38fd1498Szrj       bitmap not_written;
2202*38fd1498Szrj 
2203*38fd1498Szrj       if (node
2204*38fd1498Szrj 	  && (not_written = ipa_reference_get_not_written_global (node))
2205*38fd1498Szrj 	  && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2206*38fd1498Szrj 	return false;
2207*38fd1498Szrj     }
2208*38fd1498Szrj 
2209*38fd1498Szrj   /* Check if the base variable is call-clobbered.  */
2210*38fd1498Szrj   if (DECL_P (base))
2211*38fd1498Szrj     return pt_solution_includes (gimple_call_clobber_set (call), base);
2212*38fd1498Szrj   else if ((TREE_CODE (base) == MEM_REF
2213*38fd1498Szrj 	    || TREE_CODE (base) == TARGET_MEM_REF)
2214*38fd1498Szrj 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2215*38fd1498Szrj     {
2216*38fd1498Szrj       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2217*38fd1498Szrj       if (!pi)
2218*38fd1498Szrj 	return true;
2219*38fd1498Szrj 
2220*38fd1498Szrj       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2221*38fd1498Szrj     }
2222*38fd1498Szrj 
2223*38fd1498Szrj   return true;
2224*38fd1498Szrj }
2225*38fd1498Szrj 
2226*38fd1498Szrj /* If the call in statement CALL may clobber the memory reference REF
2227*38fd1498Szrj    return true, otherwise return false.  */
2228*38fd1498Szrj 
2229*38fd1498Szrj bool
2230*38fd1498Szrj call_may_clobber_ref_p (gcall *call, tree ref)
2231*38fd1498Szrj {
2232*38fd1498Szrj   bool res;
2233*38fd1498Szrj   ao_ref r;
2234*38fd1498Szrj   ao_ref_init (&r, ref);
2235*38fd1498Szrj   res = call_may_clobber_ref_p_1 (call, &r);
2236*38fd1498Szrj   if (res)
2237*38fd1498Szrj     ++alias_stats.call_may_clobber_ref_p_may_alias;
2238*38fd1498Szrj   else
2239*38fd1498Szrj     ++alias_stats.call_may_clobber_ref_p_no_alias;
2240*38fd1498Szrj   return res;
2241*38fd1498Szrj }
2242*38fd1498Szrj 
2243*38fd1498Szrj 
2244*38fd1498Szrj /* If the statement STMT may clobber the memory reference REF return true,
2245*38fd1498Szrj    otherwise return false.  */
2246*38fd1498Szrj 
2247*38fd1498Szrj bool
2248*38fd1498Szrj stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
2249*38fd1498Szrj {
2250*38fd1498Szrj   if (is_gimple_call (stmt))
2251*38fd1498Szrj     {
2252*38fd1498Szrj       tree lhs = gimple_call_lhs (stmt);
2253*38fd1498Szrj       if (lhs
2254*38fd1498Szrj 	  && TREE_CODE (lhs) != SSA_NAME)
2255*38fd1498Szrj 	{
2256*38fd1498Szrj 	  ao_ref r;
2257*38fd1498Szrj 	  ao_ref_init (&r, lhs);
2258*38fd1498Szrj 	  if (refs_may_alias_p_1 (ref, &r, true))
2259*38fd1498Szrj 	    return true;
2260*38fd1498Szrj 	}
2261*38fd1498Szrj 
2262*38fd1498Szrj       return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2263*38fd1498Szrj     }
2264*38fd1498Szrj   else if (gimple_assign_single_p (stmt))
2265*38fd1498Szrj     {
2266*38fd1498Szrj       tree lhs = gimple_assign_lhs (stmt);
2267*38fd1498Szrj       if (TREE_CODE (lhs) != SSA_NAME)
2268*38fd1498Szrj 	{
2269*38fd1498Szrj 	  ao_ref r;
2270*38fd1498Szrj 	  ao_ref_init (&r, lhs);
2271*38fd1498Szrj 	  return refs_may_alias_p_1 (ref, &r, true);
2272*38fd1498Szrj 	}
2273*38fd1498Szrj     }
2274*38fd1498Szrj   else if (gimple_code (stmt) == GIMPLE_ASM)
2275*38fd1498Szrj     return true;
2276*38fd1498Szrj 
2277*38fd1498Szrj   return false;
2278*38fd1498Szrj }
2279*38fd1498Szrj 
2280*38fd1498Szrj bool
2281*38fd1498Szrj stmt_may_clobber_ref_p (gimple *stmt, tree ref)
2282*38fd1498Szrj {
2283*38fd1498Szrj   ao_ref r;
2284*38fd1498Szrj   ao_ref_init (&r, ref);
2285*38fd1498Szrj   return stmt_may_clobber_ref_p_1 (stmt, &r);
2286*38fd1498Szrj }
2287*38fd1498Szrj 
2288*38fd1498Szrj /* Return true if store1 and store2 described by corresponding tuples
2289*38fd1498Szrj    <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2290*38fd1498Szrj    address.  */
2291*38fd1498Szrj 
2292*38fd1498Szrj static bool
2293*38fd1498Szrj same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2294*38fd1498Szrj 			 poly_int64 max_size1,
2295*38fd1498Szrj 			 tree base2, poly_int64 offset2, poly_int64 size2,
2296*38fd1498Szrj 			 poly_int64 max_size2)
2297*38fd1498Szrj {
2298*38fd1498Szrj   /* Offsets need to be 0.  */
2299*38fd1498Szrj   if (maybe_ne (offset1, 0)
2300*38fd1498Szrj       || maybe_ne (offset2, 0))
2301*38fd1498Szrj     return false;
2302*38fd1498Szrj 
2303*38fd1498Szrj   bool base1_obj_p = SSA_VAR_P (base1);
2304*38fd1498Szrj   bool base2_obj_p = SSA_VAR_P (base2);
2305*38fd1498Szrj 
2306*38fd1498Szrj   /* We need one object.  */
2307*38fd1498Szrj   if (base1_obj_p == base2_obj_p)
2308*38fd1498Szrj     return false;
2309*38fd1498Szrj   tree obj = base1_obj_p ? base1 : base2;
2310*38fd1498Szrj 
2311*38fd1498Szrj   /* And we need one MEM_REF.  */
2312*38fd1498Szrj   bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2313*38fd1498Szrj   bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2314*38fd1498Szrj   if (base1_memref_p == base2_memref_p)
2315*38fd1498Szrj     return false;
2316*38fd1498Szrj   tree memref = base1_memref_p ? base1 : base2;
2317*38fd1498Szrj 
2318*38fd1498Szrj   /* Sizes need to be valid.  */
2319*38fd1498Szrj   if (!known_size_p (max_size1)
2320*38fd1498Szrj       || !known_size_p (max_size2)
2321*38fd1498Szrj       || !known_size_p (size1)
2322*38fd1498Szrj       || !known_size_p (size2))
2323*38fd1498Szrj     return false;
2324*38fd1498Szrj 
2325*38fd1498Szrj   /* Max_size needs to match size.  */
2326*38fd1498Szrj   if (maybe_ne (max_size1, size1)
2327*38fd1498Szrj       || maybe_ne (max_size2, size2))
2328*38fd1498Szrj     return false;
2329*38fd1498Szrj 
2330*38fd1498Szrj   /* Sizes need to match.  */
2331*38fd1498Szrj   if (maybe_ne (size1, size2))
2332*38fd1498Szrj     return false;
2333*38fd1498Szrj 
2334*38fd1498Szrj 
2335*38fd1498Szrj   /* Check that memref is a store to pointer with singleton points-to info.  */
2336*38fd1498Szrj   if (!integer_zerop (TREE_OPERAND (memref, 1)))
2337*38fd1498Szrj     return false;
2338*38fd1498Szrj   tree ptr = TREE_OPERAND (memref, 0);
2339*38fd1498Szrj   if (TREE_CODE (ptr) != SSA_NAME)
2340*38fd1498Szrj     return false;
2341*38fd1498Szrj   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2342*38fd1498Szrj   unsigned int pt_uid;
2343*38fd1498Szrj   if (pi == NULL
2344*38fd1498Szrj       || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2345*38fd1498Szrj     return false;
2346*38fd1498Szrj 
2347*38fd1498Szrj   /* Be conservative with non-call exceptions when the address might
2348*38fd1498Szrj      be NULL.  */
2349*38fd1498Szrj   if (flag_non_call_exceptions && pi->pt.null)
2350*38fd1498Szrj     return false;
2351*38fd1498Szrj 
2352*38fd1498Szrj   /* Check that ptr points relative to obj.  */
2353*38fd1498Szrj   unsigned int obj_uid = DECL_PT_UID (obj);
2354*38fd1498Szrj   if (obj_uid != pt_uid)
2355*38fd1498Szrj     return false;
2356*38fd1498Szrj 
2357*38fd1498Szrj   /* Check that the object size is the same as the store size.  That ensures us
2358*38fd1498Szrj      that ptr points to the start of obj.  */
2359*38fd1498Szrj   return (DECL_SIZE (obj)
2360*38fd1498Szrj 	  && poly_int_tree_p (DECL_SIZE (obj))
2361*38fd1498Szrj 	  && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2362*38fd1498Szrj }
2363*38fd1498Szrj 
2364*38fd1498Szrj /* If STMT kills the memory reference REF return true, otherwise
2365*38fd1498Szrj    return false.  */
2366*38fd1498Szrj 
2367*38fd1498Szrj bool
2368*38fd1498Szrj stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2369*38fd1498Szrj {
2370*38fd1498Szrj   if (!ao_ref_base (ref))
2371*38fd1498Szrj     return false;
2372*38fd1498Szrj 
2373*38fd1498Szrj   if (gimple_has_lhs (stmt)
2374*38fd1498Szrj       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2375*38fd1498Szrj       /* The assignment is not necessarily carried out if it can throw
2376*38fd1498Szrj 	 and we can catch it in the current function where we could inspect
2377*38fd1498Szrj 	 the previous value.
2378*38fd1498Szrj 	 ???  We only need to care about the RHS throwing.  For aggregate
2379*38fd1498Szrj 	 assignments or similar calls and non-call exceptions the LHS
2380*38fd1498Szrj 	 might throw as well.  */
2381*38fd1498Szrj       && !stmt_can_throw_internal (stmt))
2382*38fd1498Szrj     {
2383*38fd1498Szrj       tree lhs = gimple_get_lhs (stmt);
2384*38fd1498Szrj       /* If LHS is literally a base of the access we are done.  */
2385*38fd1498Szrj       if (ref->ref)
2386*38fd1498Szrj 	{
2387*38fd1498Szrj 	  tree base = ref->ref;
2388*38fd1498Szrj 	  tree innermost_dropped_array_ref = NULL_TREE;
2389*38fd1498Szrj 	  if (handled_component_p (base))
2390*38fd1498Szrj 	    {
2391*38fd1498Szrj 	      tree saved_lhs0 = NULL_TREE;
2392*38fd1498Szrj 	      if (handled_component_p (lhs))
2393*38fd1498Szrj 		{
2394*38fd1498Szrj 		  saved_lhs0 = TREE_OPERAND (lhs, 0);
2395*38fd1498Szrj 		  TREE_OPERAND (lhs, 0) = integer_zero_node;
2396*38fd1498Szrj 		}
2397*38fd1498Szrj 	      do
2398*38fd1498Szrj 		{
2399*38fd1498Szrj 		  /* Just compare the outermost handled component, if
2400*38fd1498Szrj 		     they are equal we have found a possible common
2401*38fd1498Szrj 		     base.  */
2402*38fd1498Szrj 		  tree saved_base0 = TREE_OPERAND (base, 0);
2403*38fd1498Szrj 		  TREE_OPERAND (base, 0) = integer_zero_node;
2404*38fd1498Szrj 		  bool res = operand_equal_p (lhs, base, 0);
2405*38fd1498Szrj 		  TREE_OPERAND (base, 0) = saved_base0;
2406*38fd1498Szrj 		  if (res)
2407*38fd1498Szrj 		    break;
2408*38fd1498Szrj 		  /* Remember if we drop an array-ref that we need to
2409*38fd1498Szrj 		     double-check not being at struct end.  */
2410*38fd1498Szrj 		  if (TREE_CODE (base) == ARRAY_REF
2411*38fd1498Szrj 		      || TREE_CODE (base) == ARRAY_RANGE_REF)
2412*38fd1498Szrj 		    innermost_dropped_array_ref = base;
2413*38fd1498Szrj 		  /* Otherwise drop handled components of the access.  */
2414*38fd1498Szrj 		  base = saved_base0;
2415*38fd1498Szrj 		}
2416*38fd1498Szrj 	      while (handled_component_p (base));
2417*38fd1498Szrj 	      if (saved_lhs0)
2418*38fd1498Szrj 		TREE_OPERAND (lhs, 0) = saved_lhs0;
2419*38fd1498Szrj 	    }
2420*38fd1498Szrj 	  /* Finally check if the lhs has the same address and size as the
2421*38fd1498Szrj 	     base candidate of the access.  Watch out if we have dropped
2422*38fd1498Szrj 	     an array-ref that was at struct end, this means ref->ref may
2423*38fd1498Szrj 	     be outside of the TYPE_SIZE of its base.  */
2424*38fd1498Szrj 	  if ((! innermost_dropped_array_ref
2425*38fd1498Szrj 	       || ! array_at_struct_end_p (innermost_dropped_array_ref))
2426*38fd1498Szrj 	      && (lhs == base
2427*38fd1498Szrj 		  || (((TYPE_SIZE (TREE_TYPE (lhs))
2428*38fd1498Szrj 			== TYPE_SIZE (TREE_TYPE (base)))
2429*38fd1498Szrj 		       || (TYPE_SIZE (TREE_TYPE (lhs))
2430*38fd1498Szrj 			   && TYPE_SIZE (TREE_TYPE (base))
2431*38fd1498Szrj 			   && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2432*38fd1498Szrj 					       TYPE_SIZE (TREE_TYPE (base)),
2433*38fd1498Szrj 					       0)))
2434*38fd1498Szrj 		      && operand_equal_p (lhs, base,
2435*38fd1498Szrj 					  OEP_ADDRESS_OF
2436*38fd1498Szrj 					  | OEP_MATCH_SIDE_EFFECTS))))
2437*38fd1498Szrj 	    return true;
2438*38fd1498Szrj 	}
2439*38fd1498Szrj 
2440*38fd1498Szrj       /* Now look for non-literal equal bases with the restriction of
2441*38fd1498Szrj          handling constant offset and size.  */
2442*38fd1498Szrj       /* For a must-alias check we need to be able to constrain
2443*38fd1498Szrj 	 the access properly.  */
2444*38fd1498Szrj       if (!ref->max_size_known_p ())
2445*38fd1498Szrj 	return false;
2446*38fd1498Szrj       poly_int64 size, offset, max_size, ref_offset = ref->offset;
2447*38fd1498Szrj       bool reverse;
2448*38fd1498Szrj       tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2449*38fd1498Szrj 					   &reverse);
2450*38fd1498Szrj       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2451*38fd1498Szrj 	 so base == ref->base does not always hold.  */
2452*38fd1498Szrj       if (base != ref->base)
2453*38fd1498Szrj 	{
2454*38fd1498Szrj 	  /* Try using points-to info.  */
2455*38fd1498Szrj 	  if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2456*38fd1498Szrj 				       ref->offset, ref->size, ref->max_size))
2457*38fd1498Szrj 	    return true;
2458*38fd1498Szrj 
2459*38fd1498Szrj 	  /* If both base and ref->base are MEM_REFs, only compare the
2460*38fd1498Szrj 	     first operand, and if the second operand isn't equal constant,
2461*38fd1498Szrj 	     try to add the offsets into offset and ref_offset.  */
2462*38fd1498Szrj 	  if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2463*38fd1498Szrj 	      && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2464*38fd1498Szrj 	    {
2465*38fd1498Szrj 	      if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2466*38fd1498Szrj 				       TREE_OPERAND (ref->base, 1)))
2467*38fd1498Szrj 		{
2468*38fd1498Szrj 		  poly_offset_int off1 = mem_ref_offset (base);
2469*38fd1498Szrj 		  off1 <<= LOG2_BITS_PER_UNIT;
2470*38fd1498Szrj 		  off1 += offset;
2471*38fd1498Szrj 		  poly_offset_int off2 = mem_ref_offset (ref->base);
2472*38fd1498Szrj 		  off2 <<= LOG2_BITS_PER_UNIT;
2473*38fd1498Szrj 		  off2 += ref_offset;
2474*38fd1498Szrj 		  if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2475*38fd1498Szrj 		    size = -1;
2476*38fd1498Szrj 		}
2477*38fd1498Szrj 	    }
2478*38fd1498Szrj 	  else
2479*38fd1498Szrj 	    size = -1;
2480*38fd1498Szrj 	}
2481*38fd1498Szrj       /* For a must-alias check we need to be able to constrain
2482*38fd1498Szrj 	 the access properly.  */
2483*38fd1498Szrj       if (known_eq (size, max_size)
2484*38fd1498Szrj 	  && known_subrange_p (ref_offset, ref->max_size, offset, size))
2485*38fd1498Szrj 	return true;
2486*38fd1498Szrj     }
2487*38fd1498Szrj 
2488*38fd1498Szrj   if (is_gimple_call (stmt))
2489*38fd1498Szrj     {
2490*38fd1498Szrj       tree callee = gimple_call_fndecl (stmt);
2491*38fd1498Szrj       if (callee != NULL_TREE
2492*38fd1498Szrj 	  && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2493*38fd1498Szrj 	switch (DECL_FUNCTION_CODE (callee))
2494*38fd1498Szrj 	  {
2495*38fd1498Szrj 	  case BUILT_IN_FREE:
2496*38fd1498Szrj 	    {
2497*38fd1498Szrj 	      tree ptr = gimple_call_arg (stmt, 0);
2498*38fd1498Szrj 	      tree base = ao_ref_base (ref);
2499*38fd1498Szrj 	      if (base && TREE_CODE (base) == MEM_REF
2500*38fd1498Szrj 		  && TREE_OPERAND (base, 0) == ptr)
2501*38fd1498Szrj 		return true;
2502*38fd1498Szrj 	      break;
2503*38fd1498Szrj 	    }
2504*38fd1498Szrj 
2505*38fd1498Szrj 	  case BUILT_IN_MEMCPY:
2506*38fd1498Szrj 	  case BUILT_IN_MEMPCPY:
2507*38fd1498Szrj 	  case BUILT_IN_MEMMOVE:
2508*38fd1498Szrj 	  case BUILT_IN_MEMSET:
2509*38fd1498Szrj 	  case BUILT_IN_MEMCPY_CHK:
2510*38fd1498Szrj 	  case BUILT_IN_MEMPCPY_CHK:
2511*38fd1498Szrj 	  case BUILT_IN_MEMMOVE_CHK:
2512*38fd1498Szrj 	  case BUILT_IN_MEMSET_CHK:
2513*38fd1498Szrj 	  case BUILT_IN_STRNCPY:
2514*38fd1498Szrj 	  case BUILT_IN_STPNCPY:
2515*38fd1498Szrj 	    {
2516*38fd1498Szrj 	      /* For a must-alias check we need to be able to constrain
2517*38fd1498Szrj 		 the access properly.  */
2518*38fd1498Szrj 	      if (!ref->max_size_known_p ())
2519*38fd1498Szrj 		return false;
2520*38fd1498Szrj 	      tree dest = gimple_call_arg (stmt, 0);
2521*38fd1498Szrj 	      tree len = gimple_call_arg (stmt, 2);
2522*38fd1498Szrj 	      if (!poly_int_tree_p (len))
2523*38fd1498Szrj 		return false;
2524*38fd1498Szrj 	      tree rbase = ref->base;
2525*38fd1498Szrj 	      poly_offset_int roffset = ref->offset;
2526*38fd1498Szrj 	      ao_ref dref;
2527*38fd1498Szrj 	      ao_ref_init_from_ptr_and_size (&dref, dest, len);
2528*38fd1498Szrj 	      tree base = ao_ref_base (&dref);
2529*38fd1498Szrj 	      poly_offset_int offset = dref.offset;
2530*38fd1498Szrj 	      if (!base || !known_size_p (dref.size))
2531*38fd1498Szrj 		return false;
2532*38fd1498Szrj 	      if (TREE_CODE (base) == MEM_REF)
2533*38fd1498Szrj 		{
2534*38fd1498Szrj 		  if (TREE_CODE (rbase) != MEM_REF)
2535*38fd1498Szrj 		    return false;
2536*38fd1498Szrj 		  // Compare pointers.
2537*38fd1498Szrj 		  offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2538*38fd1498Szrj 		  roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2539*38fd1498Szrj 		  base = TREE_OPERAND (base, 0);
2540*38fd1498Szrj 		  rbase = TREE_OPERAND (rbase, 0);
2541*38fd1498Szrj 		}
2542*38fd1498Szrj 	      if (base == rbase
2543*38fd1498Szrj 		  && known_subrange_p (roffset, ref->max_size, offset,
2544*38fd1498Szrj 				       wi::to_poly_offset (len)
2545*38fd1498Szrj 				       << LOG2_BITS_PER_UNIT))
2546*38fd1498Szrj 		return true;
2547*38fd1498Szrj 	      break;
2548*38fd1498Szrj 	    }
2549*38fd1498Szrj 
2550*38fd1498Szrj 	  case BUILT_IN_VA_END:
2551*38fd1498Szrj 	    {
2552*38fd1498Szrj 	      tree ptr = gimple_call_arg (stmt, 0);
2553*38fd1498Szrj 	      if (TREE_CODE (ptr) == ADDR_EXPR)
2554*38fd1498Szrj 		{
2555*38fd1498Szrj 		  tree base = ao_ref_base (ref);
2556*38fd1498Szrj 		  if (TREE_OPERAND (ptr, 0) == base)
2557*38fd1498Szrj 		    return true;
2558*38fd1498Szrj 		}
2559*38fd1498Szrj 	      break;
2560*38fd1498Szrj 	    }
2561*38fd1498Szrj 
2562*38fd1498Szrj 	  default:;
2563*38fd1498Szrj 	  }
2564*38fd1498Szrj     }
2565*38fd1498Szrj   return false;
2566*38fd1498Szrj }
2567*38fd1498Szrj 
2568*38fd1498Szrj bool
2569*38fd1498Szrj stmt_kills_ref_p (gimple *stmt, tree ref)
2570*38fd1498Szrj {
2571*38fd1498Szrj   ao_ref r;
2572*38fd1498Szrj   ao_ref_init (&r, ref);
2573*38fd1498Szrj   return stmt_kills_ref_p (stmt, &r);
2574*38fd1498Szrj }
2575*38fd1498Szrj 
2576*38fd1498Szrj 
2577*38fd1498Szrj /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2578*38fd1498Szrj    TARGET or a statement clobbering the memory reference REF in which
2579*38fd1498Szrj    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
2580*38fd1498Szrj 
2581*38fd1498Szrj static bool
2582*38fd1498Szrj maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
2583*38fd1498Szrj 		  tree vuse, unsigned int *cnt, bitmap *visited,
2584*38fd1498Szrj 		  bool abort_on_visited,
2585*38fd1498Szrj 		  void *(*translate)(ao_ref *, tree, void *, bool *),
2586*38fd1498Szrj 		  void *data)
2587*38fd1498Szrj {
2588*38fd1498Szrj   basic_block bb = gimple_bb (phi);
2589*38fd1498Szrj 
2590*38fd1498Szrj   if (!*visited)
2591*38fd1498Szrj     *visited = BITMAP_ALLOC (NULL);
2592*38fd1498Szrj 
2593*38fd1498Szrj   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2594*38fd1498Szrj 
2595*38fd1498Szrj   /* Walk until we hit the target.  */
2596*38fd1498Szrj   while (vuse != target)
2597*38fd1498Szrj     {
2598*38fd1498Szrj       gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2599*38fd1498Szrj       /* Recurse for PHI nodes.  */
2600*38fd1498Szrj       if (gimple_code (def_stmt) == GIMPLE_PHI)
2601*38fd1498Szrj 	{
2602*38fd1498Szrj 	  /* An already visited PHI node ends the walk successfully.  */
2603*38fd1498Szrj 	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2604*38fd1498Szrj 	    return !abort_on_visited;
2605*38fd1498Szrj 	  vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2606*38fd1498Szrj 					   visited, abort_on_visited,
2607*38fd1498Szrj 					   translate, data);
2608*38fd1498Szrj 	  if (!vuse)
2609*38fd1498Szrj 	    return false;
2610*38fd1498Szrj 	  continue;
2611*38fd1498Szrj 	}
2612*38fd1498Szrj       else if (gimple_nop_p (def_stmt))
2613*38fd1498Szrj 	return false;
2614*38fd1498Szrj       else
2615*38fd1498Szrj 	{
2616*38fd1498Szrj 	  /* A clobbering statement or the end of the IL ends it failing.  */
2617*38fd1498Szrj 	  ++*cnt;
2618*38fd1498Szrj 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2619*38fd1498Szrj 	    {
2620*38fd1498Szrj 	      bool disambiguate_only = true;
2621*38fd1498Szrj 	      if (translate
2622*38fd1498Szrj 		  && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2623*38fd1498Szrj 		;
2624*38fd1498Szrj 	      else
2625*38fd1498Szrj 		return false;
2626*38fd1498Szrj 	    }
2627*38fd1498Szrj 	}
2628*38fd1498Szrj       /* If we reach a new basic-block see if we already skipped it
2629*38fd1498Szrj          in a previous walk that ended successfully.  */
2630*38fd1498Szrj       if (gimple_bb (def_stmt) != bb)
2631*38fd1498Szrj 	{
2632*38fd1498Szrj 	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2633*38fd1498Szrj 	    return !abort_on_visited;
2634*38fd1498Szrj 	  bb = gimple_bb (def_stmt);
2635*38fd1498Szrj 	}
2636*38fd1498Szrj       vuse = gimple_vuse (def_stmt);
2637*38fd1498Szrj     }
2638*38fd1498Szrj   return true;
2639*38fd1498Szrj }
2640*38fd1498Szrj 
2641*38fd1498Szrj 
2642*38fd1498Szrj /* Starting from a PHI node for the virtual operand of the memory reference
2643*38fd1498Szrj    REF find a continuation virtual operand that allows to continue walking
2644*38fd1498Szrj    statements dominating PHI skipping only statements that cannot possibly
2645*38fd1498Szrj    clobber REF.  Increments *CNT for each alias disambiguation done.
2646*38fd1498Szrj    Returns NULL_TREE if no suitable virtual operand can be found.  */
2647*38fd1498Szrj 
2648*38fd1498Szrj tree
2649*38fd1498Szrj get_continuation_for_phi (gimple *phi, ao_ref *ref,
2650*38fd1498Szrj 			  unsigned int *cnt, bitmap *visited,
2651*38fd1498Szrj 			  bool abort_on_visited,
2652*38fd1498Szrj 			  void *(*translate)(ao_ref *, tree, void *, bool *),
2653*38fd1498Szrj 			  void *data)
2654*38fd1498Szrj {
2655*38fd1498Szrj   unsigned nargs = gimple_phi_num_args (phi);
2656*38fd1498Szrj 
2657*38fd1498Szrj   /* Through a single-argument PHI we can simply look through.  */
2658*38fd1498Szrj   if (nargs == 1)
2659*38fd1498Szrj     return PHI_ARG_DEF (phi, 0);
2660*38fd1498Szrj 
2661*38fd1498Szrj   /* For two or more arguments try to pairwise skip non-aliasing code
2662*38fd1498Szrj      until we hit the phi argument definition that dominates the other one.  */
2663*38fd1498Szrj   basic_block phi_bb = gimple_bb (phi);
2664*38fd1498Szrj   tree arg0, arg1;
2665*38fd1498Szrj   unsigned i;
2666*38fd1498Szrj 
2667*38fd1498Szrj   /* Find a candidate for the virtual operand which definition
2668*38fd1498Szrj      dominates those of all others.  */
2669*38fd1498Szrj   /* First look if any of the args themselves satisfy this.  */
2670*38fd1498Szrj   for (i = 0; i < nargs; ++i)
2671*38fd1498Szrj     {
2672*38fd1498Szrj       arg0 = PHI_ARG_DEF (phi, i);
2673*38fd1498Szrj       if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2674*38fd1498Szrj 	break;
2675*38fd1498Szrj       basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2676*38fd1498Szrj       if (def_bb != phi_bb
2677*38fd1498Szrj 	  && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2678*38fd1498Szrj 	break;
2679*38fd1498Szrj       arg0 = NULL_TREE;
2680*38fd1498Szrj     }
2681*38fd1498Szrj   /* If not, look if we can reach such candidate by walking defs
2682*38fd1498Szrj      of a PHI arg without crossing other PHIs.  */
2683*38fd1498Szrj   if (! arg0)
2684*38fd1498Szrj     for (i = 0; i < nargs; ++i)
2685*38fd1498Szrj       {
2686*38fd1498Szrj 	arg0 = PHI_ARG_DEF (phi, i);
2687*38fd1498Szrj 	gimple *def = SSA_NAME_DEF_STMT (arg0);
2688*38fd1498Szrj 	/* Backedges can't work.  */
2689*38fd1498Szrj 	if (dominated_by_p (CDI_DOMINATORS,
2690*38fd1498Szrj 			    gimple_bb (def), phi_bb))
2691*38fd1498Szrj 	  continue;
2692*38fd1498Szrj 	/* See below.  */
2693*38fd1498Szrj 	if (gimple_code (def) == GIMPLE_PHI)
2694*38fd1498Szrj 	  continue;
2695*38fd1498Szrj 	while (! dominated_by_p (CDI_DOMINATORS,
2696*38fd1498Szrj 				 phi_bb, gimple_bb (def)))
2697*38fd1498Szrj 	  {
2698*38fd1498Szrj 	    arg0 = gimple_vuse (def);
2699*38fd1498Szrj 	    if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2700*38fd1498Szrj 	      break;
2701*38fd1498Szrj 	    def = SSA_NAME_DEF_STMT (arg0);
2702*38fd1498Szrj 	    if (gimple_code (def) == GIMPLE_PHI)
2703*38fd1498Szrj 	      {
2704*38fd1498Szrj 		/* Do not try to look through arbitrarily complicated
2705*38fd1498Szrj 		   CFGs.  For those looking for the first VUSE starting
2706*38fd1498Szrj 		   from the end of the immediate dominator of phi_bb
2707*38fd1498Szrj 		   is likely faster.  */
2708*38fd1498Szrj 		arg0 = NULL_TREE;
2709*38fd1498Szrj 		goto next;
2710*38fd1498Szrj 	      }
2711*38fd1498Szrj 	  }
2712*38fd1498Szrj 	break;
2713*38fd1498Szrj next:;
2714*38fd1498Szrj       }
2715*38fd1498Szrj   if (! arg0)
2716*38fd1498Szrj     return NULL_TREE;
2717*38fd1498Szrj 
2718*38fd1498Szrj   /* Then check against the found candidate.  */
2719*38fd1498Szrj   for (i = 0; i < nargs; ++i)
2720*38fd1498Szrj     {
2721*38fd1498Szrj       arg1 = PHI_ARG_DEF (phi, i);
2722*38fd1498Szrj       if (arg1 == arg0)
2723*38fd1498Szrj 	;
2724*38fd1498Szrj       else if (! maybe_skip_until (phi, arg0, ref, arg1, cnt, visited,
2725*38fd1498Szrj 				   abort_on_visited, translate, data))
2726*38fd1498Szrj 	return NULL_TREE;
2727*38fd1498Szrj     }
2728*38fd1498Szrj 
2729*38fd1498Szrj   return arg0;
2730*38fd1498Szrj }
2731*38fd1498Szrj 
2732*38fd1498Szrj /* Based on the memory reference REF and its virtual use VUSE call
2733*38fd1498Szrj    WALKER for each virtual use that is equivalent to VUSE, including VUSE
2734*38fd1498Szrj    itself.  That is, for each virtual use for which its defining statement
2735*38fd1498Szrj    does not clobber REF.
2736*38fd1498Szrj 
2737*38fd1498Szrj    WALKER is called with REF, the current virtual use and DATA.  If
2738*38fd1498Szrj    WALKER returns non-NULL the walk stops and its result is returned.
2739*38fd1498Szrj    At the end of a non-successful walk NULL is returned.
2740*38fd1498Szrj 
2741*38fd1498Szrj    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2742*38fd1498Szrj    use which definition is a statement that may clobber REF and DATA.
2743*38fd1498Szrj    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2744*38fd1498Szrj    If TRANSLATE returns non-NULL the walk stops and its result is returned.
2745*38fd1498Szrj    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2746*38fd1498Szrj    to adjust REF and *DATA to make that valid.
2747*38fd1498Szrj 
2748*38fd1498Szrj    VALUEIZE if non-NULL is called with the next VUSE that is considered
2749*38fd1498Szrj    and return value is substituted for that.  This can be used to
2750*38fd1498Szrj    implement optimistic value-numbering for example.  Note that the
2751*38fd1498Szrj    VUSE argument is assumed to be valueized already.
2752*38fd1498Szrj 
2753*38fd1498Szrj    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2754*38fd1498Szrj 
2755*38fd1498Szrj void *
2756*38fd1498Szrj walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2757*38fd1498Szrj 			void *(*walker)(ao_ref *, tree, unsigned int, void *),
2758*38fd1498Szrj 			void *(*translate)(ao_ref *, tree, void *, bool *),
2759*38fd1498Szrj 			tree (*valueize)(tree),
2760*38fd1498Szrj 			void *data)
2761*38fd1498Szrj {
2762*38fd1498Szrj   bitmap visited = NULL;
2763*38fd1498Szrj   void *res;
2764*38fd1498Szrj   unsigned int cnt = 0;
2765*38fd1498Szrj   bool translated = false;
2766*38fd1498Szrj 
2767*38fd1498Szrj   timevar_push (TV_ALIAS_STMT_WALK);
2768*38fd1498Szrj 
2769*38fd1498Szrj   do
2770*38fd1498Szrj     {
2771*38fd1498Szrj       gimple *def_stmt;
2772*38fd1498Szrj 
2773*38fd1498Szrj       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2774*38fd1498Szrj       res = (*walker) (ref, vuse, cnt, data);
2775*38fd1498Szrj       /* Abort walk.  */
2776*38fd1498Szrj       if (res == (void *)-1)
2777*38fd1498Szrj 	{
2778*38fd1498Szrj 	  res = NULL;
2779*38fd1498Szrj 	  break;
2780*38fd1498Szrj 	}
2781*38fd1498Szrj       /* Lookup succeeded.  */
2782*38fd1498Szrj       else if (res != NULL)
2783*38fd1498Szrj 	break;
2784*38fd1498Szrj 
2785*38fd1498Szrj       if (valueize)
2786*38fd1498Szrj 	vuse = valueize (vuse);
2787*38fd1498Szrj       def_stmt = SSA_NAME_DEF_STMT (vuse);
2788*38fd1498Szrj       if (gimple_nop_p (def_stmt))
2789*38fd1498Szrj 	break;
2790*38fd1498Szrj       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2791*38fd1498Szrj 	vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2792*38fd1498Szrj 					 &visited, translated, translate, data);
2793*38fd1498Szrj       else
2794*38fd1498Szrj 	{
2795*38fd1498Szrj 	  cnt++;
2796*38fd1498Szrj 	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2797*38fd1498Szrj 	    {
2798*38fd1498Szrj 	      if (!translate)
2799*38fd1498Szrj 		break;
2800*38fd1498Szrj 	      bool disambiguate_only = false;
2801*38fd1498Szrj 	      res = (*translate) (ref, vuse, data, &disambiguate_only);
2802*38fd1498Szrj 	      /* Failed lookup and translation.  */
2803*38fd1498Szrj 	      if (res == (void *)-1)
2804*38fd1498Szrj 		{
2805*38fd1498Szrj 		  res = NULL;
2806*38fd1498Szrj 		  break;
2807*38fd1498Szrj 		}
2808*38fd1498Szrj 	      /* Lookup succeeded.  */
2809*38fd1498Szrj 	      else if (res != NULL)
2810*38fd1498Szrj 		break;
2811*38fd1498Szrj 	      /* Translation succeeded, continue walking.  */
2812*38fd1498Szrj 	      translated = translated || !disambiguate_only;
2813*38fd1498Szrj 	    }
2814*38fd1498Szrj 	  vuse = gimple_vuse (def_stmt);
2815*38fd1498Szrj 	}
2816*38fd1498Szrj     }
2817*38fd1498Szrj   while (vuse);
2818*38fd1498Szrj 
2819*38fd1498Szrj   if (visited)
2820*38fd1498Szrj     BITMAP_FREE (visited);
2821*38fd1498Szrj 
2822*38fd1498Szrj   timevar_pop (TV_ALIAS_STMT_WALK);
2823*38fd1498Szrj 
2824*38fd1498Szrj   return res;
2825*38fd1498Szrj }
2826*38fd1498Szrj 
2827*38fd1498Szrj 
2828*38fd1498Szrj /* Based on the memory reference REF call WALKER for each vdef which
2829*38fd1498Szrj    defining statement may clobber REF, starting with VDEF.  If REF
2830*38fd1498Szrj    is NULL_TREE, each defining statement is visited.
2831*38fd1498Szrj 
2832*38fd1498Szrj    WALKER is called with REF, the current vdef and DATA.  If WALKER
2833*38fd1498Szrj    returns true the walk is stopped, otherwise it continues.
2834*38fd1498Szrj 
2835*38fd1498Szrj    If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2836*38fd1498Szrj    The pointer may be NULL and then we do not track this information.
2837*38fd1498Szrj 
2838*38fd1498Szrj    At PHI nodes walk_aliased_vdefs forks into one walk for reach
2839*38fd1498Szrj    PHI argument (but only one walk continues on merge points), the
2840*38fd1498Szrj    return value is true if any of the walks was successful.
2841*38fd1498Szrj 
2842*38fd1498Szrj    The function returns the number of statements walked or -1 if
2843*38fd1498Szrj    LIMIT stmts were walked and the walk was aborted at this point.
2844*38fd1498Szrj    If LIMIT is zero the walk is not aborted.  */
2845*38fd1498Szrj 
2846*38fd1498Szrj static int
2847*38fd1498Szrj walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2848*38fd1498Szrj 		      bool (*walker)(ao_ref *, tree, void *), void *data,
2849*38fd1498Szrj 		      bitmap *visited, unsigned int cnt,
2850*38fd1498Szrj 		      bool *function_entry_reached, unsigned limit)
2851*38fd1498Szrj {
2852*38fd1498Szrj   do
2853*38fd1498Szrj     {
2854*38fd1498Szrj       gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2855*38fd1498Szrj 
2856*38fd1498Szrj       if (*visited
2857*38fd1498Szrj 	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2858*38fd1498Szrj 	return cnt;
2859*38fd1498Szrj 
2860*38fd1498Szrj       if (gimple_nop_p (def_stmt))
2861*38fd1498Szrj 	{
2862*38fd1498Szrj 	  if (function_entry_reached)
2863*38fd1498Szrj 	    *function_entry_reached = true;
2864*38fd1498Szrj 	  return cnt;
2865*38fd1498Szrj 	}
2866*38fd1498Szrj       else if (gimple_code (def_stmt) == GIMPLE_PHI)
2867*38fd1498Szrj 	{
2868*38fd1498Szrj 	  unsigned i;
2869*38fd1498Szrj 	  if (!*visited)
2870*38fd1498Szrj 	    *visited = BITMAP_ALLOC (NULL);
2871*38fd1498Szrj 	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2872*38fd1498Szrj 	    {
2873*38fd1498Szrj 	      int res = walk_aliased_vdefs_1 (ref,
2874*38fd1498Szrj 					      gimple_phi_arg_def (def_stmt, i),
2875*38fd1498Szrj 					      walker, data, visited, cnt,
2876*38fd1498Szrj 					      function_entry_reached, limit);
2877*38fd1498Szrj 	      if (res == -1)
2878*38fd1498Szrj 		return -1;
2879*38fd1498Szrj 	      cnt = res;
2880*38fd1498Szrj 	    }
2881*38fd1498Szrj 	  return cnt;
2882*38fd1498Szrj 	}
2883*38fd1498Szrj 
2884*38fd1498Szrj       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2885*38fd1498Szrj       cnt++;
2886*38fd1498Szrj       if (cnt == limit)
2887*38fd1498Szrj 	return -1;
2888*38fd1498Szrj       if ((!ref
2889*38fd1498Szrj 	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2890*38fd1498Szrj 	  && (*walker) (ref, vdef, data))
2891*38fd1498Szrj 	return cnt;
2892*38fd1498Szrj 
2893*38fd1498Szrj       vdef = gimple_vuse (def_stmt);
2894*38fd1498Szrj     }
2895*38fd1498Szrj   while (1);
2896*38fd1498Szrj }
2897*38fd1498Szrj 
2898*38fd1498Szrj int
2899*38fd1498Szrj walk_aliased_vdefs (ao_ref *ref, tree vdef,
2900*38fd1498Szrj 		    bool (*walker)(ao_ref *, tree, void *), void *data,
2901*38fd1498Szrj 		    bitmap *visited,
2902*38fd1498Szrj 		    bool *function_entry_reached, unsigned int limit)
2903*38fd1498Szrj {
2904*38fd1498Szrj   bitmap local_visited = NULL;
2905*38fd1498Szrj   int ret;
2906*38fd1498Szrj 
2907*38fd1498Szrj   timevar_push (TV_ALIAS_STMT_WALK);
2908*38fd1498Szrj 
2909*38fd1498Szrj   if (function_entry_reached)
2910*38fd1498Szrj     *function_entry_reached = false;
2911*38fd1498Szrj 
2912*38fd1498Szrj   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2913*38fd1498Szrj 			      visited ? visited : &local_visited, 0,
2914*38fd1498Szrj 			      function_entry_reached, limit);
2915*38fd1498Szrj   if (local_visited)
2916*38fd1498Szrj     BITMAP_FREE (local_visited);
2917*38fd1498Szrj 
2918*38fd1498Szrj   timevar_pop (TV_ALIAS_STMT_WALK);
2919*38fd1498Szrj 
2920*38fd1498Szrj   return ret;
2921*38fd1498Szrj }
2922*38fd1498Szrj 
2923